diff --git a/.bandit b/.bandit index 3afe77f3bb..d89efffe79 100644 --- a/.bandit +++ b/.bandit @@ -1 +1,3 @@ -skips: ["B506"] +[bandit] +skips: B506 +exclude: satpy/tests diff --git a/.git_archival.txt b/.git_archival.txt index 082d6c2563..95cb3eea4e 100644 --- a/.git_archival.txt +++ b/.git_archival.txt @@ -1 +1 @@ -ref-names: $Format:%D$ \ No newline at end of file +ref-names: $Format:%D$ diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 5d248f05e3..644d3aa6ae 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -7,6 +7,9 @@ concurrency: on: [push, pull_request] +env: + CACHE_NUMBER: 0 + jobs: lint: name: lint and style checks @@ -29,37 +32,6 @@ jobs: run: | flake8 satpy/ - website: - name: build website - runs-on: ubuntu-latest - steps: - - name: Checkout source - uses: actions/checkout@v2 - with: - fetch-depth: 0 - - - name: Setup Conda Environment - uses: conda-incubator/setup-miniconda@v2 - with: - miniforge-variant: Mambaforge - miniforge-version: latest - use-mamba: true - python-version: "3.8" - environment-file: continuous_integration/environment.yaml - activate-environment: test-environment - - - name: Install Satpy - shell: bash -l {0} - run: | - pip install sphinx sphinx_rtd_theme sphinxcontrib-apidoc; \ - pip install --no-deps -e . - - - name: Run Sphinx Build - shell: bash -l {0} - run: | - cd doc; \ - make html SPHINXOPTS="-W" - test: runs-on: ${{ matrix.os }} continue-on-error: ${{ matrix.experimental }} @@ -68,10 +40,10 @@ jobs: fail-fast: true matrix: os: ["windows-latest", "ubuntu-latest", "macos-latest"] - python-version: ["3.7", "3.8", "3.9"] + python-version: ["3.8", "3.9", "3.10"] experimental: [false] include: - - python-version: "3.8" + - python-version: "3.9" os: "ubuntu-latest" experimental: true @@ -92,9 +64,25 @@ jobs: miniforge-version: latest use-mamba: true python-version: ${{ matrix.python-version }} - environment-file: continuous_integration/environment.yaml activate-environment: test-environment + - name: Set cache environment variables + shell: bash -l {0} + run: | + echo "DATE=$(date +'%Y%m%d')" >> $GITHUB_ENV + CONDA_PREFIX=$(python -c "import sys; print(sys.prefix)") + echo "CONDA_PREFIX=$CONDA_PREFIX" >> $GITHUB_ENV + + - uses: actions/cache@v3 + with: + path: ${{ env.CONDA_PREFIX }} + key: ${{ matrix.os }}-${{matrix.python-version}}-conda-${{ hashFiles('continuous_integration/environment.yaml') }}-${{ env.DATE }}-${{matrix.experimental}}-${{ env.CACHE_NUMBER }} + id: cache + + - name: Update environment + run: mamba env update -n test-environment -f continuous_integration/environment.yaml + if: steps.cache.outputs.cache-hit != 'true' + - name: Install unstable dependencies if: matrix.experimental == true shell: bash -l {0} @@ -148,7 +136,7 @@ jobs: coverage xml - name: Upload behaviour test coverage to Codecov - uses: codecov/codecov-action@v1 + uses: codecov/codecov-action@v2 with: flags: behaviourtests file: ./coverage.xml @@ -162,4 +150,3 @@ jobs: uses: AndreMiras/coveralls-python-action@develop with: parallel-finished: true - diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index c85a80b889..9470413b6b 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -22,4 +22,4 @@ jobs: uses: pypa/gh-action-pypi-publish@v1.4.1 with: user: __token__ - password: ${{ secrets.pypi_password }} \ No newline at end of file + password: ${{ secrets.pypi_password }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7614e50c69..30e81afcae 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,19 +1,26 @@ exclude: '^$' fail_fast: false repos: - - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v2.2.3 + - repo: https://github.com/PyCQA/flake8 + rev: 4.0.1 hooks: - id: flake8 additional_dependencies: [flake8-docstrings, flake8-debugger, flake8-bugbear, mccabe] args: [--max-complexity, "10"] + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.2.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml + args: [--unsafe] - repo: https://github.com/PyCQA/bandit - rev: '1.7.0' # Update me! + rev: '1.7.4' # Update me! hooks: - id: bandit - args: [-x, satpy/tests, -c, .bandit] + args: [--ini, .bandit] - repo: https://github.com/pre-commit/mirrors-mypy - rev: 'v0.910-1' # Use the sha / tag you want to point at + rev: 'v0.950' # Use the sha / tag you want to point at hooks: - id: mypy additional_dependencies: diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index f48e5beb6b..0000000000 --- a/.travis.yml +++ /dev/null @@ -1,90 +0,0 @@ -language: python -env: - global: - # Set defaults to avoid repeating in most cases - - PYTHON_VERSION=$TRAVIS_PYTHON_VERSION - - NUMPY_VERSION=stable - - MAIN_CMD='python setup.py' - - CONDA_DEPENDENCIES='xarray dask distributed toolz Cython sphinx cartopy pillow matplotlib scipy pyyaml - pyproj pyresample coveralls coverage codecov behave netcdf4 h5py h5netcdf gdal rasterio imageio pyhdf - mock libtiff geoviews zarr python-eccodes geoviews pytest pytest-cov fsspec pylibtiff' - - PIP_DEPENDENCIES='trollsift trollimage pyspectral pyorbital' - - SETUP_XVFB=False - - EVENT_TYPE='push pull_request' - - SETUP_CMD='test' - - CONDA_CHANNELS='conda-forge' - - CONDA_CHANNEL_PRIORITY='strict' - - MAMBA=True - - UNSTABLE_DEPS=False -matrix: - include: - - env: PYTHON_VERSION=3.8 - os: linux - - env: PYTHON_VERSION=3.8 - os: osx - language: generic - - env: PYTHON_VERSION=3.8 - os: windows - language: bash - - env: PYTHON_VERSION=3.7 - os: linux - - env: PYTHON_VERSION=3.7 - os: osx - language: generic - # allowed to fail: - - os: linux - env: - - PYTHON_VERSION=3.8 - - UNSTABLE_DEPS=True - - allow_failures: - - os: linux - env: - - PYTHON_VERSION=3.8 - - UNSTABLE_DEPS=True -install: - - git clone --depth 1 git://github.com/astropy/ci-helpers.git - - source ci-helpers/travis/setup_conda.sh - # See https://github.com/travis-ci/travis-ci/issues/8920 - - if [ $TRAVIS_OS_NAME != "windows" ]; then - python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)"; - fi - - if [ "$UNSTABLE_DEPS" == "True" ]; then - python -m pip install - -f https://7933911d6844c6c53a7d-47bd50c35cd79bd838daf386af554a83.ssl.cf2.rackcdn.com - --no-deps --pre --upgrade - matplotlib - numpy - pandas - scipy; - python -m pip install - --no-deps --upgrade - git+https://github.com/dask/dask - git+https://github.com/dask/distributed - git+https://github.com/zarr-developers/zarr - git+https://github.com/Unidata/cftime - git+https://github.com/mapbox/rasterio - git+https://github.com/pydata/bottleneck - git+https://github.com/pydata/xarray; - fi - - pip install --no-deps -e . -script: -- pytest --cov=satpy satpy/tests -- coverage run -a --source=satpy -m behave satpy/tests/features --tags=-download -- if [ "$TRAVIS_EVENT_TYPE" == "cron" ]; then coverage run -a --source=satpy -m behave satpy/tests/features; fi -#after_success: -#- if [[ $PYTHON_VERSION == 3.8 ]]; then coveralls; codecov; fi -#deploy: -# - provider: pypi -# user: dhoese -# password: -# secure: frK+0k1STeTM7SizRseP0qdTfOVz9ZMIra+3qEytPdxCLceXAH8LxPU16zj5rdNQxasF1hZ6rAd952fly+ypw2TEf5r2WnStrt7G5QlyE7VB6XGSDpIUxKF1FYccLvYs0/R6Y35MTEPqdM51PM5yEBjoY5b4tA3RF3fDq11cqc/SiWr6DgSLB1WJZULOdtCzBbfGbm5LyJ7yeNbISASSAwVvZTGWw7kJDgi0W5zxwEX82N5tBGbfKIu59qmxyj8FxmcrUwKZ4P3rQNg1kN1utzAB+PSf3GAVvbZfWJQuAKwMqpZgaV9lX0V7eUd/AxPobzEk9WyoNBMIdrSPej5BKWTDiYvaeRTOsggoUCSQJJA/SITEvkJgLWXoKKX2OWrM8RBUO4MoZJpPGXN42PRtMJkV2sx6ZigkpJlHdn39SsIRZX31zsfv8bBhclb70bt1Ts0fDd0rVdZAI6gMI+sgUePwEUn+XbWrvI0sMfDX3QsXDMV393RHgaIPxd+lRqUlYsNOxjsWpsbsvX55ePLxYHsNrv11KKyL/iGjGotVeVUO5D78qvfd4JrsUnMalQyZfW8NTEKa5Ebcs7gYJTwYEOTCQU12BkHOv1zFkjZG5RdGwkEvG3pykLhx+qDyYEd7pKB3TvhzLPqZPSrPxirwcoc0UzCc6ocYdzpqVuViFuk= -# distributions: sdist -# skip_existing: true -# on: -# tags: true -# repo: pytroll/satpy -#notifications: -# slack: -# rooms: -# - pytroll:96mNSYSI1dBjGyzVXkBT6qFt#github diff --git a/AUTHORS.md b/AUTHORS.md index 9383fceda3..dd2b24750d 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -52,6 +52,7 @@ The following people have made contributions to this project: - [Esben S. Nielsen (storpipfugl)](https://github.com/storpipfugl) - [Tom Parker (tparker-usgs)](https://github.com/tparker-usgs) - [Christian Peters (peters77)](https://github.com/peters77) +- [Pepe Phillips (pepephillips)](https://github.com/pepephillips) - [Ghislain Picard (ghislainp)](https://github.com/ghislainp) - [Simon R. Proud (simonrp84)](https://github.com/simonrp84) - [Lars Ørum Rasmussen (loerum)](https://github.com/loerum) @@ -76,4 +77,4 @@ The following people have made contributions to this project: - [oananicola (oananicola)](https://github.com/oananicola) - [praerien (praerien)](https://github.com/praerien) - [Xin Zhang (zxdawn)](https://github.com/zxdawn) -- [Yufei Zhu (yufeizhu600)](https://github.com/yufeizhu600) \ No newline at end of file +- [Yufei Zhu (yufeizhu600)](https://github.com/yufeizhu600) diff --git a/CHANGELOG.md b/CHANGELOG.md index 890ce5154b..d8a5b720a2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,282 @@ +## Version 0.36.0 (2022/04/14) + +### Issues Closed + +* [Issue 2082](https://github.com/pytroll/satpy/issues/2082) - Some composite are produced with the wrong colors +* [Issue 2073](https://github.com/pytroll/satpy/issues/2073) - Creating scene with SEVIRI HRIT reader fails with UnicodeDecodeError ([PR 2077](https://github.com/pytroll/satpy/pull/2077) by [@pdebuyl](https://github.com/pdebuyl)) +* [Issue 2066](https://github.com/pytroll/satpy/issues/2066) - RGBs should never have units, but some do ([PR 2068](https://github.com/pytroll/satpy/pull/2068) by [@gerritholl](https://github.com/gerritholl)) +* [Issue 2062](https://github.com/pytroll/satpy/issues/2062) - Can make Trollimage colorbar in scene +* [Issue 1975](https://github.com/pytroll/satpy/issues/1975) - bunzip regular seviri hrit segments ([PR 2060](https://github.com/pytroll/satpy/pull/2060) by [@pdebuyl](https://github.com/pdebuyl)) +* [Issue 1954](https://github.com/pytroll/satpy/issues/1954) - Cloud Phase/Type/Phase Distinction RGBs for VIIRS and FCI ([PR 1957](https://github.com/pytroll/satpy/pull/1957) by [@gerritholl](https://github.com/gerritholl)) +* [Issue 1702](https://github.com/pytroll/satpy/issues/1702) - Resampling not working with SLSTR ancillary datasets + +In this release 7 issues were closed. + +### Pull Requests Merged + +#### Bugs fixed + +* [PR 2084](https://github.com/pytroll/satpy/pull/2084) - Fix CREFL using incorrect coefficients for MODIS +* [PR 2083](https://github.com/pytroll/satpy/pull/2083) - Fix VIIRS L1B reader sensor not matching VIIRS SDR reader +* [PR 2080](https://github.com/pytroll/satpy/pull/2080) - Ignore alpha when adding luminance in Sandwich compositor +* [PR 2079](https://github.com/pytroll/satpy/pull/2079) - Remove marine_clean_aerosol from default AHI rayleigh_corrected modifier +* [PR 2077](https://github.com/pytroll/satpy/pull/2077) - Fix missing 'rb' mode for opening files ([2073](https://github.com/pytroll/satpy/issues/2073)) +* [PR 2070](https://github.com/pytroll/satpy/pull/2070) - Fix lru_cache memory leaks and other linting errors +* [PR 2048](https://github.com/pytroll/satpy/pull/2048) - Fix CMIC CRE product in nwcsaf yaml reader +* [PR 2016](https://github.com/pytroll/satpy/pull/2016) - Fix the sensor name for msu_gsa_l1b reader +* [PR 1410](https://github.com/pytroll/satpy/pull/1410) - Fix osisaf SST reader + +#### Features added + +* [PR 2086](https://github.com/pytroll/satpy/pull/2086) - Update FCI reader for new test data release and add patches for IDPF data +* [PR 2078](https://github.com/pytroll/satpy/pull/2078) - Add possibility to define the dataset rectification longitude in seviri_l2_bufr reader +* [PR 2076](https://github.com/pytroll/satpy/pull/2076) - Support reading FSFiles in SEVIRI HRIT reader. +* [PR 2068](https://github.com/pytroll/satpy/pull/2068) - Make sure RGBs do not have units attributes. ([2066](https://github.com/pytroll/satpy/issues/2066)) +* [PR 2065](https://github.com/pytroll/satpy/pull/2065) - Add filename to YAML for NASA NRT VIIRS files with creation date/time. +* [PR 2060](https://github.com/pytroll/satpy/pull/2060) - Allow reading Bz2 hrit segments ([1975](https://github.com/pytroll/satpy/issues/1975)) +* [PR 2057](https://github.com/pytroll/satpy/pull/2057) - Add option to replace saturated MODIS L1b values with max valid value +* [PR 1980](https://github.com/pytroll/satpy/pull/1980) - Adapt AAPP reader for generic chunk size +* [PR 1957](https://github.com/pytroll/satpy/pull/1957) - Add RGBs for cloud phase (distinction) and type ([1954](https://github.com/pytroll/satpy/issues/1954)) +* [PR 1410](https://github.com/pytroll/satpy/pull/1410) - Fix osisaf SST reader + +#### Documentation changes + +* [PR 2075](https://github.com/pytroll/satpy/pull/2075) - Add documentation on how the colorize enhancement can be used +* [PR 2071](https://github.com/pytroll/satpy/pull/2071) - Add example to the documentation using multiple readers + +#### Refactoring + +* [PR 2087](https://github.com/pytroll/satpy/pull/2087) - Refactor HRIT/LRIT format reader. + +In this release 22 pull requests were closed. + + +## Version 0.35.0 (2022/03/16) + +### Issues Closed + +* [Issue 2063](https://github.com/pytroll/satpy/issues/2063) - Unable to commit changes due to bandit (use of subprocess module) +* [Issue 2037](https://github.com/pytroll/satpy/issues/2037) - Why the lon/lat is interpolated to 1km while data are still 5km for MOD06 product +* [Issue 2012](https://github.com/pytroll/satpy/issues/2012) - Define time metadata options and usage ([PR 2031](https://github.com/pytroll/satpy/pull/2031) by [@djhoese](https://github.com/djhoese)) +* [Issue 1973](https://github.com/pytroll/satpy/issues/1973) - Using cached geolocation and angles results in an error if chunk size not appropriately set. ([PR 2041](https://github.com/pytroll/satpy/pull/2041) by [@djhoese](https://github.com/djhoese)) +* [Issue 1842](https://github.com/pytroll/satpy/issues/1842) - Update needed for vii_l1b_nc reader to match a change to the Test Data and processor ([PR 1979](https://github.com/pytroll/satpy/pull/1979) by [@pepephillips](https://github.com/pepephillips)) +* [Issue 1110](https://github.com/pytroll/satpy/issues/1110) - NWCSAF reader does not support GOES or HIMAWARI +* [Issue 1022](https://github.com/pytroll/satpy/issues/1022) - Factorize area def computation in goes_imager_hrit ([PR 1934](https://github.com/pytroll/satpy/pull/1934) by [@sfinkens](https://github.com/sfinkens)) +* [Issue 956](https://github.com/pytroll/satpy/issues/956) - UnboundLocalError when passing "empty" generator as filenames +* [Issue 723](https://github.com/pytroll/satpy/issues/723) - Passing multiple readers fails if `generic_image` is among them +* [Issue 684](https://github.com/pytroll/satpy/issues/684) - Gracefully handle pykdtree's use of OpenMP (OMP_NUM_THREADS) with dask + +In this release 10 issues were closed. + +### Pull Requests Merged + +#### Bugs fixed + +* [PR 2054](https://github.com/pytroll/satpy/pull/2054) - Fix DifferenceCompositor not using metadata from YAML +* [PR 2049](https://github.com/pytroll/satpy/pull/2049) - Fix dataset attribute typo and reduce amount of categorical dataset filtering in fci_l2_nc reader +* [PR 2044](https://github.com/pytroll/satpy/pull/2044) - Fix unit handling in ERF DNB normalization's saturation correction +* [PR 2041](https://github.com/pytroll/satpy/pull/2041) - Fix angle generation caching not working with irregular chunks ([1973](https://github.com/pytroll/satpy/issues/1973)) +* [PR 2032](https://github.com/pytroll/satpy/pull/2032) - Fix various metadata bugs in 'awips_tiled' writer ([417](https://github.com/ssec/polar2grid/issues/417)) +* [PR 1933](https://github.com/pytroll/satpy/pull/1933) - Change tested Python versions to 3.8, 3.9 and 3.10 + +#### Features added + +* [PR 2056](https://github.com/pytroll/satpy/pull/2056) - Update SLSTR calibration coefficients +* [PR 2055](https://github.com/pytroll/satpy/pull/2055) - Skip dataset flipping in GEOFlippableFileYAMLReader in case of SwathDefinition data +* [PR 2047](https://github.com/pytroll/satpy/pull/2047) - Add missing GOES-18 support to glm_l2 reader +* [PR 2034](https://github.com/pytroll/satpy/pull/2034) - Update angle generation to prefer "actual" satellite position +* [PR 2033](https://github.com/pytroll/satpy/pull/2033) - Remove use of legacy satellite position attributes +* [PR 2031](https://github.com/pytroll/satpy/pull/2031) - Update AHI HSD reader with observation/scheduled times and nominal satellite position ([2012](https://github.com/pytroll/satpy/issues/2012)) +* [PR 2030](https://github.com/pytroll/satpy/pull/2030) - Add 'preference' option to 'get_satpos' utility +* [PR 2028](https://github.com/pytroll/satpy/pull/2028) - Add 'colormap_tag' keyword argument to geotiff writer +* [PR 1993](https://github.com/pytroll/satpy/pull/1993) - Add 'l2_flags' quality filtering to 'seadas_l2' reader +* [PR 1979](https://github.com/pytroll/satpy/pull/1979) - Update VII reader for test data v2 ([1842](https://github.com/pytroll/satpy/issues/1842)) +* [PR 1933](https://github.com/pytroll/satpy/pull/1933) - Change tested Python versions to 3.8, 3.9 and 3.10 +* [PR 1927](https://github.com/pytroll/satpy/pull/1927) - Add support for more FCI L2 products and datasets + +#### Refactoring + +* [PR 2040](https://github.com/pytroll/satpy/pull/2040) - Refactor composite generation to avoid unneeded warnings +* [PR 1934](https://github.com/pytroll/satpy/pull/1934) - Factorize area computation in goes_imager_hrit ([1022](https://github.com/pytroll/satpy/issues/1022)) + +In this release 20 pull requests were closed. + + +## Version 0.34.0 (2022/02/18) + +### Issues Closed + +* [Issue 2026](https://github.com/pytroll/satpy/issues/2026) - Missing units in avhrr_l1b_eps reader ([PR 2027](https://github.com/pytroll/satpy/pull/2027) by [@gerritholl](https://github.com/gerritholl)) +* [Issue 2024](https://github.com/pytroll/satpy/issues/2024) - Allow to skip unit conversion in ninjotiff writer ([PR 2025](https://github.com/pytroll/satpy/pull/2025) by [@gerritholl](https://github.com/gerritholl)) +* [Issue 2023](https://github.com/pytroll/satpy/issues/2023) - Allow to keep units in composite +* [Issue 2022](https://github.com/pytroll/satpy/issues/2022) - save_dataset changes dataset in-place +* [Issue 2018](https://github.com/pytroll/satpy/issues/2018) - Wrong AxisIntercept (add_offset) when writing °C temperature units with ninjogeotiff writer +* [Issue 2014](https://github.com/pytroll/satpy/issues/2014) - Problem in converting VIIRS hdf to geotif +* [Issue 2010](https://github.com/pytroll/satpy/issues/2010) - AHI HSD true_color incorrect with cache_sensor_angles ([PR 2013](https://github.com/pytroll/satpy/pull/2013) by [@djhoese](https://github.com/djhoese)) +* [Issue 2008](https://github.com/pytroll/satpy/issues/2008) - abi_l1b reader leaks memory in Python-3.7 ([PR 2011](https://github.com/pytroll/satpy/pull/2011) by [@sfinkens](https://github.com/sfinkens)) +* [Issue 2004](https://github.com/pytroll/satpy/issues/2004) - Configure image type returned by MaskingCompositor ([PR 2005](https://github.com/pytroll/satpy/pull/2005) by [@gerritholl](https://github.com/gerritholl)) +* [Issue 2001](https://github.com/pytroll/satpy/issues/2001) - Failed to load AVHRR LAC data +* [Issue 1999](https://github.com/pytroll/satpy/issues/1999) - Reader for Арктика-М (Arktika-M) МСУ-ГС (MSU-GS) data ([PR 2000](https://github.com/pytroll/satpy/pull/2000) by [@simonrp84](https://github.com/simonrp84)) +* [Issue 1998](https://github.com/pytroll/satpy/issues/1998) - Add reader for Arctica M N-1 hdf5 data +* [Issue 1995](https://github.com/pytroll/satpy/issues/1995) - AttributeError when cropping data for VIIRS +* [Issue 1959](https://github.com/pytroll/satpy/issues/1959) - Unittest failure in test_modifiers.py +* [Issue 1948](https://github.com/pytroll/satpy/issues/1948) - Contribute to Satpy +* [Issue 1945](https://github.com/pytroll/satpy/issues/1945) - Wrong dtype of `uint32` array saved by the cf_writer +* [Issue 1943](https://github.com/pytroll/satpy/issues/1943) - sza_check from trollflow2 fails with KeyError: 'start_time' +* [Issue 1883](https://github.com/pytroll/satpy/issues/1883) - Test failure on i386 and armhf ([PR 1966](https://github.com/pytroll/satpy/pull/1966) by [@djhoese](https://github.com/djhoese)) +* [Issue 1384](https://github.com/pytroll/satpy/issues/1384) - AHI HRIT reader has gotten slower ([PR 1986](https://github.com/pytroll/satpy/pull/1986) by [@pnuu](https://github.com/pnuu)) +* [Issue 1099](https://github.com/pytroll/satpy/issues/1099) - `find_files_and_readers` read unneeded files + +In this release 20 issues were closed. + +### Pull Requests Merged + +#### Bugs fixed + +* [PR 2027](https://github.com/pytroll/satpy/pull/2027) - Include units with AVHRR EPS metadata ([2026](https://github.com/pytroll/satpy/issues/2026)) +* [PR 2017](https://github.com/pytroll/satpy/pull/2017) - Fix ABI rayleigh_corrected_crefl modifier using deprecated DEM specifier +* [PR 2015](https://github.com/pytroll/satpy/pull/2015) - Fix various dask array bugs in CREFL modifier +* [PR 2013](https://github.com/pytroll/satpy/pull/2013) - Fix angle generation caching occassionally swapping results ([2010](https://github.com/pytroll/satpy/issues/2010)) +* [PR 2011](https://github.com/pytroll/satpy/pull/2011) - Fix memory leak in cached_property backport ([2008](https://github.com/pytroll/satpy/issues/2008), [2008](https://github.com/pytroll/satpy/issues/2008)) +* [PR 2006](https://github.com/pytroll/satpy/pull/2006) - Fix Scene not being serializable +* [PR 2002](https://github.com/pytroll/satpy/pull/2002) - Update tests to be more flexible to CRS and enhancement changes +* [PR 1991](https://github.com/pytroll/satpy/pull/1991) - Update reference to dask distributed setup page +* [PR 1988](https://github.com/pytroll/satpy/pull/1988) - Update geometry.py docstring from compositor to modifier +* [PR 1987](https://github.com/pytroll/satpy/pull/1987) - Check that time is not already a coordinate in CF writer +* [PR 1983](https://github.com/pytroll/satpy/pull/1983) - More general filename filter for ascat soil moisture, allowing for Metop-B and Metop-C +* [PR 1982](https://github.com/pytroll/satpy/pull/1982) - Fix ninjotiff writer from erraneous K to C conversion + +#### Features added + +* [PR 2025](https://github.com/pytroll/satpy/pull/2025) - Allow skipping unit conversion in NinJoTIFF ([2024](https://github.com/pytroll/satpy/issues/2024)) +* [PR 2007](https://github.com/pytroll/satpy/pull/2007) - Update abi_l2_nc to include filename metadata similar to abi_l1b +* [PR 2005](https://github.com/pytroll/satpy/pull/2005) - Add flag to MaskingCompositor to return RGBA for single-band input ([2004](https://github.com/pytroll/satpy/issues/2004)) +* [PR 2000](https://github.com/pytroll/satpy/pull/2000) - Add a reader for the MSU-GS/A + Arctica-M1 data ([1999](https://github.com/pytroll/satpy/issues/1999)) +* [PR 1992](https://github.com/pytroll/satpy/pull/1992) - Add support for CMIC product from PPSv2021 +* [PR 1989](https://github.com/pytroll/satpy/pull/1989) - read the "elevation" variable in slstr_l1b +* [PR 1986](https://github.com/pytroll/satpy/pull/1986) - Add reader kwarg to 'ahi_hrit' to disable exact start_time ([1384](https://github.com/pytroll/satpy/issues/1384)) +* [PR 1967](https://github.com/pytroll/satpy/pull/1967) - Add ability to read comma-separated colormaps during enhancement +* [PR 1966](https://github.com/pytroll/satpy/pull/1966) - Reduce MODIS L1b/L2 test case size for better test performance ([1883](https://github.com/pytroll/satpy/issues/1883)) +* [PR 1962](https://github.com/pytroll/satpy/pull/1962) - Use a dependency matrix for benchmarking + +#### Documentation changes + +* [PR 2020](https://github.com/pytroll/satpy/pull/2020) - Clarify documentation regarding attributes used in get_angles +* [PR 1991](https://github.com/pytroll/satpy/pull/1991) - Update reference to dask distributed setup page +* [PR 1988](https://github.com/pytroll/satpy/pull/1988) - Update geometry.py docstring from compositor to modifier +* [PR 1969](https://github.com/pytroll/satpy/pull/1969) - Improve modifier documentation +* [PR 1968](https://github.com/pytroll/satpy/pull/1968) - Improve API documentation in CompositeBase +* [PR 1961](https://github.com/pytroll/satpy/pull/1961) - Update documentation to refer to all EO satellite data +* [PR 1960](https://github.com/pytroll/satpy/pull/1960) - Add release notes and security policy to documentation +* [PR 1950](https://github.com/pytroll/satpy/pull/1950) - Fix formatting in configuration documentation + +In this release 30 pull requests were closed. + + +## Version 0.33.1 (2021/12/17) + +### Issues Closed + +* [Issue 1937](https://github.com/pytroll/satpy/issues/1937) - Add SECURITY.md +* [Issue 1932](https://github.com/pytroll/satpy/issues/1932) - warnings of `invalid value encountered in true_divide` and `invalid value encountered in double_scalars` in +* [Issue 1903](https://github.com/pytroll/satpy/issues/1903) - MPEF Product Header record definition , in seviri_base.py, needs to be updated +* [Issue 1799](https://github.com/pytroll/satpy/issues/1799) - Deprecate Scene.attrs property +* [Issue 1192](https://github.com/pytroll/satpy/issues/1192) - Harmonize SEVIRI area definitions + +In this release 5 issues were closed. + +### Pull Requests Merged + +#### Bugs fixed + +* [PR 1946](https://github.com/pytroll/satpy/pull/1946) - Fix angle generation not working for StackedAreaDefinitions +* [PR 1942](https://github.com/pytroll/satpy/pull/1942) - Fix dynamic_dnb composite converting NaNs to 0s +* [PR 1941](https://github.com/pytroll/satpy/pull/1941) - Fix SAFE SAR azimuth noise array construction +* [PR 1918](https://github.com/pytroll/satpy/pull/1918) - Fix geo interpolation for aapp data + +#### Features added + +* [PR 1674](https://github.com/pytroll/satpy/pull/1674) - Feature add support for AHI True Color Reproduction + +In this release 5 pull requests were closed. + + +## Version 0.33.0 (2021/12/10) + +### Issues Closed + +* [Issue 1930](https://github.com/pytroll/satpy/issues/1930) - ninjogeotiff writer produces file with ninjo_TransparentPixel=None ([PR 1931](https://github.com/pytroll/satpy/pull/1931) by [@gerritholl](https://github.com/gerritholl)) +* [Issue 1902](https://github.com/pytroll/satpy/issues/1902) - High memory usage generating composites from ABI/AHI + +In this release 2 issues were closed. + +### Pull Requests Merged + +#### Bugs fixed + +* [PR 1931](https://github.com/pytroll/satpy/pull/1931) - When no fill value is used, write TransparentPixel=-1 in ninjogeotiff headers ([1930](https://github.com/pytroll/satpy/issues/1930)) +* [PR 1926](https://github.com/pytroll/satpy/pull/1926) - Update seadas_l2 chlor_a enhancement to use new log10 stretch +* [PR 1922](https://github.com/pytroll/satpy/pull/1922) - Fix ABI cloud_phase composite recipe and enhancement + +#### Features added + +* [PR 1917](https://github.com/pytroll/satpy/pull/1917) - Add support to read and visualize NOAA GOESR L2+ cloud mask products +* [PR 1912](https://github.com/pytroll/satpy/pull/1912) - Add Frequency range +* [PR 1908](https://github.com/pytroll/satpy/pull/1908) - Update AHI HSD calibration coefficients +* [PR 1905](https://github.com/pytroll/satpy/pull/1905) - Updated mpef product header to include new fields +* [PR 1882](https://github.com/pytroll/satpy/pull/1882) - Update GDAL_OPTIONS with driver= and COG-specific options +* [PR 1370](https://github.com/pytroll/satpy/pull/1370) - Add support for reading AAPP level-1c MHS/AMSU-B data + +#### Refactoring + +* [PR 1910](https://github.com/pytroll/satpy/pull/1910) - Refactor SZA and cos(SZA) generation to reduce duplicate computations + +In this release 10 pull requests were closed. + + +## Version 0.32.0 (2021/12/01) + +### Issues Closed + +* [Isse1900](https://github.com/pytroll/satpy/issues/1900) - Load composites mixed from files or provided data ([PR1901](https://github.com/pytroll/satpy/pull/1901) by [@djhoese](https://github.com/djhoese)) +* [Isse1898](https://github.com/pytroll/satpy/issues/1898) - Loading composites without file handlers fails with KeyError ([PR1899](https://github.com/pytroll/satpy/pull/1899) by [@erritholl](https://github.com/gerritholl)) +* [Isse1893](https://github.com/pytroll/satpy/issues/1893) - Download and install Satpy for raspberry pi +* [Isse1889](https://github.com/pytroll/satpy/issues/1889) - Question: How to release loaded data from memory? +* [Isse1880](https://github.com/pytroll/satpy/issues/1880) - Add area definitions corresponding to geostationary imager fields of regard ([PR1881](https://github.com/pytroll/satpy/pull/1881) by [@erritholl](https://github.com/gerritholl)) +* [Isse1879](https://github.com/pytroll/satpy/issues/1879) - How to use histogram enhancement in yaml files? +* [Isse1749](https://github.com/pytroll/satpy/issues/1749) - Load from blended scene ([PR 1797](https://github.com/pytroll/satpy/pull/1797) by [@djhoese](https://github.com/djhoese)) +* [Issue 1747](https://github.com/pytroll/satpy/issues/1747) - Load composites without file handlers. ([PR 1797](https://github.com/pytroll/satpy/pull/1797) by [@djhoese](https://github.com/djhoese)) +* [Issue 1456](https://github.com/pytroll/satpy/issues/1456) - Default cache directory should respect XDG Base Directory Specification. +* [Issue 583](https://github.com/pytroll/satpy/issues/583) - PPP_CONFIG_DIR set locally does not include the global dir for the eps_l1b reader + +In this release 10 issues were closed. + +### Pull Requests Merged + +#### Bugs fixed + +* [PR 1899](https://github.com/pytroll/satpy/pull/1899) - Fix loading multi-sensor composites for manually added data ([1898](https://github.com/pytroll/satpy/issues/1898)) +* [PR 1891](https://github.com/pytroll/satpy/pull/1891) - Fix file handlers improperly matching some file types +* [PR 1884](https://github.com/pytroll/satpy/pull/1884) - Fix nucaps reader failing when given multiple input files + +#### Features added + +* [PR 1901](https://github.com/pytroll/satpy/pull/1901) - Update Scene.sensor_names to include sensors from readers and contained data ([1900](https://github.com/pytroll/satpy/issues/1900)) +* [PR 1897](https://github.com/pytroll/satpy/pull/1897) - Update AHI gridded reader to use HTTP instead of FTP +* [PR 1894](https://github.com/pytroll/satpy/pull/1894) - Add 'seadas_l2' reader for 'chlor_a' product +* [PR 1892](https://github.com/pytroll/satpy/pull/1892) - Add new pre-commit checks +* [PR 1888](https://github.com/pytroll/satpy/pull/1888) - Optimize composite YAML loading +* [PR 1885](https://github.com/pytroll/satpy/pull/1885) - Add optional on-disk zarr caching to sensor angle generation +* [PR 1881](https://github.com/pytroll/satpy/pull/1881) - Add area definitions for GOES ABI FOR ([1880](https://github.com/pytroll/satpy/issues/1880)) +* [PR 1797](https://github.com/pytroll/satpy/pull/1797) - Allow loading of composites after Scene resampling ([1752](https://github.com/pytroll/satpy/issues/1752), [1749](https://github.com/pytroll/satpy/issues/1749), [1747](https://github.com/pytroll/satpy/issues/1747)) + +#### Documentation changes + +* [PR 1873](https://github.com/pytroll/satpy/pull/1873) - Fix a typo in the ninjogeotiff documentation + +In this release 12 pull requests were closed. + + ## Version 0.31.0 (2021/11/03) ### Issues Closed diff --git a/MANIFEST.in b/MANIFEST.in index 948dcc64b9..7c8ea0e146 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -3,4 +3,4 @@ include doc/source/* include doc/examples/*.py include LICENSE.txt include README.rst -include satpy/version.py \ No newline at end of file +include satpy/version.py diff --git a/README.rst b/README.rst index c3c924af93..d971a872f5 100644 --- a/README.rst +++ b/README.rst @@ -9,7 +9,7 @@ Satpy .. image:: https://badge.fury.io/py/satpy.svg :target: https://badge.fury.io/py/satpy - + .. image:: https://anaconda.org/conda-forge/satpy/badges/version.svg :target: https://anaconda.org/conda-forge/satpy/ diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 0000000000..a743afa44c --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,24 @@ +# Security Policy + +## Supported Versions + +Satpy is currently pre-1.0 and includes a lot of changes in every release. As such we can't +guarantee that releases before 1.0 will see security updates except for the most recent +release. After 1.0, you can expect more stability in the interfaces and security fixes to be +backported more regularly. + +| Version | Supported | +| ------- | ------------------ | +| 0.x.x (latest) | :white_check_mark: | +| < 0.33.0 | :x: | + +## Unsafe YAML Loading + +Satpy allows for unsafe loading of YAML configuration files. Any YAML files +from untrusted sources should be sanitized of possibly malicious code. + +## Reporting a Vulnerability + +Do you think you've found a security vulnerability or issue in this project? Let us know by sending +an email to the maintainers at `pytroll-security@groups.io`. Please include as much information on +the issue as possible like code examples, documentation on the issue in other packages, etc. diff --git a/asv.conf.json b/asv.conf.json index 412614fb8d..dbecadf79a 100644 --- a/asv.conf.json +++ b/asv.conf.json @@ -22,8 +22,7 @@ // Customizable commands for building, installing, and // uninstalling the project. See asv.conf.json documentation. // - "install_command": ["in-dir={env_dir} python -mpip install {wheel_file} pyspectral pyorbital s3fs rasterio h5py netCDF4 pyhdf gcsfs shapely"], - // "install_command": ["in-dir={env_dir} conda install {wheel_file} s3fs rasterio"], + //"install_command": ["in-dir={env_dir} python -mpip install {wheel_file} s3fs rasterio h5py netCDF4 pyhdf gcsfs shapely"], // "uninstall_command": ["return-code=any python -mpip uninstall -y {project}"], // "build_command": [ // "python setup.py build", @@ -46,7 +45,8 @@ // If missing or the empty string, the tool will be automatically // determined by looking for tools on the PATH environment // variable. - "environment_type": "virtualenv", + //"environment_type": "virtualenv", + "environment_type": "conda", // timeout in seconds for installing any dependencies in environment // defaults to 10 min @@ -58,10 +58,11 @@ // The Pythons you'd like to test against. If not provided, defaults // to the current version of Python used to run `asv`. // "pythons": ["2.7", "3.6"], + "pythons": ["3.9", "3.10"], // The list of conda channel names to be searched for benchmark // dependency packages in the specified order - // "conda_channels": ["conda-forge", "defaults"], + "conda_channels": ["conda-forge"], // The matrix of dependencies to test. Each key is the name of a // package (in PyPI) and the values are version numbers. An empty @@ -78,6 +79,23 @@ // "six": ["", null], // test with and without six installed // "pip+emcee": [""], // emcee is only available for install with pip. // }, + "matrix": { + "pyresample": ["1.22.3"], + "trollimage": ["1.17.0"], + "pyorbital": ["1.7.1"], + "pyspectral": ["0.10.6"], + "rasterio": ["1.2.10"], + "dask": ["2021.12.0"], + "xarray": ["0.20.2"], + "numpy": ["1.22.0"], + "s3fs": [], + "h5py": [], + "netCDF4": [], + "pyhdf": [], + "gcsfs": [], + "shapely": [], + "trollsift": [] + }, // Combinations of libraries/python versions can be excluded/included // from the set to test. Each entry is a dictionary containing additional diff --git a/changelog_pre0.9.0.rst b/changelog_pre0.9.0.rst index e4bbc5c021..90f9c65995 100644 --- a/changelog_pre0.9.0.rst +++ b/changelog_pre0.9.0.rst @@ -4374,6 +4374,3 @@ Other - Modified image inversion unit test to reflect new behaviour. [Martin Raspaud] - New rebase. [Martin Raspaud] - - - diff --git a/continuous_integration/environment.yaml b/continuous_integration/environment.yaml index 10b8cdb415..ad768209b3 100644 --- a/continuous_integration/environment.yaml +++ b/continuous_integration/environment.yaml @@ -11,6 +11,7 @@ dependencies: - Cython - sphinx - cartopy + - panel>=0.12.7 - pillow - matplotlib - scipy @@ -28,6 +29,7 @@ dependencies: - rasterio - bottleneck - rioxarray + - defusedxml - imageio - pyhdf - mock @@ -42,6 +44,7 @@ dependencies: - pytest-cov - pytest-lazy-fixture - fsspec + - s3fs - pylibtiff - python-geotiepoints - pooch diff --git a/doc/Makefile b/doc/Makefile index dd35fc4fdd..624fe21234 100644 --- a/doc/Makefile +++ b/doc/Makefile @@ -2,7 +2,7 @@ # # You can set these variables from the command line. -SPHINXOPTS = +SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = build diff --git a/doc/source/_static/theme_overrides.css b/doc/source/_static/theme_overrides.css index 174fade5f2..63ee6cc74c 100644 --- a/doc/source/_static/theme_overrides.css +++ b/doc/source/_static/theme_overrides.css @@ -10,4 +10,4 @@ .wy-table-responsive { overflow: visible !important; } -} \ No newline at end of file +} diff --git a/doc/source/composites.rst b/doc/source/composites.rst index cdfc0277a9..03352d25f0 100644 --- a/doc/source/composites.rst +++ b/doc/source/composites.rst @@ -324,9 +324,9 @@ to be added. Using modifiers --------------- -In many cases the basic datasets need to be adjusted, e.g. for Solar -zenith angle normalization. These modifiers can be applied in the -following way:: +In many cases the basic datasets that go into the composite need to be +adjusted, e.g. for Solar zenith angle normalization. These modifiers +can be applied in the following way:: overview: compositor: !!python/name:satpy.composites.GenericCompositor @@ -347,6 +347,12 @@ Here we see two changes: The modifier above is a built-in that normalizes the Solar zenith angle to Sun being directly at the zenith. +More examples can be found in Satpy source code directory +`satpy/etc/composites `_. + +See the :doc:`modifiers` documentation for more information on +available built-in modifiers. + Using other composites ---------------------- @@ -535,3 +541,5 @@ More examples can be found in SatPy source code directory See the :doc:`enhancements` documentation for more information on available built-in enhancements. + +.. include:: modifiers.rst diff --git a/doc/source/conf.py b/doc/source/conf.py index 514569c490..cda2efd236 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -264,14 +264,14 @@ def __getattr__(cls, name): 'dask': ('https://docs.dask.org/en/latest', None), 'geoviews': ('http://geoviews.org', None), 'jobqueue': ('https://jobqueue.dask.org/en/latest', None), - 'numpy': ('https://docs.scipy.org/doc/numpy', None), + 'numpy': ('https://numpy.org/doc/stable', None), 'pydecorate': ('https://pydecorate.readthedocs.io/en/stable', None), 'pyorbital': ('https://pyorbital.readthedocs.io/en/stable', None), 'pyproj': ('https://pyproj4.github.io/pyproj/dev', None), 'pyresample': ('https://pyresample.readthedocs.io/en/stable', None), 'pytest': ('https://docs.pytest.org/en/stable/', None), 'python': ('https://docs.python.org/3', None), - 'scipy': ('https://docs.scipy.org/doc/scipy/', None), + 'scipy': ('http://scipy.github.io/devdocs', None), 'trollimage': ('https://trollimage.readthedocs.io/en/stable', None), 'trollsift': ('https://trollsift.readthedocs.io/en/stable', None), 'xarray': ('https://xarray.pydata.org/en/stable', None), diff --git a/doc/source/config.rst b/doc/source/config.rst index 3a2aad619a..7378ceb910 100644 --- a/doc/source/config.rst +++ b/doc/source/config.rst @@ -106,7 +106,7 @@ Whether or not generated longitude and latitude coordinates should be cached to on-disk zarr arrays. Currently this only works in very specific cases. Mainly the lon/lats that are generated when computing sensor and solar zenith and azimuth angles used in various modifiers and compositors. This caching is -only done for ``AreaDefinition``-based geolocation, not ``SwathDefinition``s. +only done for ``AreaDefinition``-based geolocation, not ``SwathDefinition``. Arrays are stored in ``cache_dir`` (see above). When setting this as an environment variable, this should be set with the @@ -132,7 +132,7 @@ Cache Sensor Angles Whether or not generated sensor azimuth and sensor zenith angles should be cached to on-disk zarr arrays. These angles are primarily used in certain modifiers and compositors. This caching is only done for -``AreaDefinition``-based geolocation, not ``SwathDefinition``s. +``AreaDefinition``-based geolocation, not ``SwathDefinition``. Arrays are stored in ``cache_dir`` (see above). This caching requires producing an estimate of the angles to avoid needing to @@ -242,6 +242,22 @@ will download and cache any necessary data files to :ref:`data_dir_setting` when needed. If ``False`` then pre-downloaded files will be used, but any other files will not be downloaded or checked for validity. +Sensor Angles Position Preference +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +* **Environment variable**: ``SATPY_SENSOR_ANGLES_POSITION_PREFERENCE`` +* **YAML/Config Key**: ``sensor_angles_position_preference`` +* **Default**: "actual" + +Control which satellite position should be preferred when generating sensor +azimuth and sensor zenith angles. This value is passed directly to the +:func:`~satpy.utils.get_satpos` function. See the documentation for that +function for more information on how the value will be used. This is used +as part of the :func:`~satpy.modifiers.angles.get_angles` and +:func:`~satpy.modifiers.angles.get_satellite_zenith_angle` functions which is +used by multiple modifiers and composites including the default rayleigh +correction. + .. _component_configuration: Component Configuration diff --git a/doc/source/data_download.rst b/doc/source/data_download.rst index 109a310b8c..b8742fac96 100644 --- a/doc/source/data_download.rst +++ b/doc/source/data_download.rst @@ -37,7 +37,7 @@ NOAA GOES on Amazon Web Services * Associated Readers: ``abi_l1b`` In addition to the pages above, Brian Blaylock's `GOES-2-Go `_ -python package is useful for downloading GOES data to your local machine. +python package is useful for downloading GOES data to your local machine. Brian also prepared some instructions for using the ``rclone`` tool for downloading AWS data to a local machine. The instructions can be found diff --git a/doc/source/dev_guide/custom_reader.rst b/doc/source/dev_guide/custom_reader.rst index 84871d5442..08d322902d 100644 --- a/doc/source/dev_guide/custom_reader.rst +++ b/doc/source/dev_guide/custom_reader.rst @@ -60,17 +60,24 @@ if needed (ex. goes-imager). :file format: If the file format of the files is informative to the user or can distinguish one reader from another then this field should be specified. Common format names should be abbreviated following existing - abbreviations like `nc` for NetCDF3 or NetCDF4, `hdf` for HDF4, `h5` for + abbreviations like ``nc`` for NetCDF3 or NetCDF4, ``hdf`` for HDF4, ``h5`` for HDF5. The existing :ref:`reader's table ` can be used for reference. -When in doubt, reader names can be discussed in the github pull -request when this reader is added to Satpy or a github issue. +When in doubt, reader names can be discussed in the GitHub pull +request when this reader is added to Satpy, or in a GitHub issue. The YAML file ------------- -The yaml file is composed of three sections: +If your reader is going to be part of Satpy, the YAML file should be +located in the ``satpy/etc/readers`` directory, along with the YAML +files for all other readers. If you are developing a reader for internal +purposes (such as for unpublished data), the YAML file should be located +in any directory in ``$SATPY_CONFIG_PATH`` within the subdirectory +``readers/`` (see :doc:`../../config`). + +The YAML file is composed of three sections: - the :ref:`reader ` section, that provides basic parameters for the reader @@ -109,7 +116,9 @@ The parameters to provide in this section are: - sensors: The list of sensors this reader will support. This must be all lowercase letters for full support throughout in Satpy. - reader: The main python reader class to use, in most cases the - ``FileYAMLReader`` is a good choice. + ``FileYAMLReader`` is a good choice. Note that this is **not** the file handler + class described later (the file handler will be specified in the ``file_types`` + section later down in the YAML file). .. code:: yaml @@ -122,8 +131,8 @@ The parameters to provide in this section are: sensors: [seviri] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader -Optionally, if you need to customize the `DataID` for this reader, you can provide the -relevant keys with a `data_identification_keys` item here. See the :doc:`satpy_internals` +Optionally, if you need to customize the ``DataID`` for this reader, you can provide the +relevant keys with a ``data_identification_keys`` item here. See the :doc:`satpy_internals` section for more information. .. _custom_reader_file_types_section: @@ -203,7 +212,7 @@ Parameters you can define for example are: is optional if the data being read is gridded already. Swath data, from example data from some polar-orbiting satellites, should have these defined or no geolocation information will be available when the data - is loaded. For gridded datasets a `get_area_def` function will be + are loaded. For gridded datasets a ``get_area_def`` function will be implemented in python (see below) to define geolocation information. - Any other field that is relevant for the reader or could be useful metadata provided to the user. @@ -433,7 +442,7 @@ This method is good when you want to: 1. Define datasets dynamically without needing to define them in the YAML. 2. Supplement metadata from the YAML file with information from the file - content (ex. `resolution`). + content (ex. ``resolution``). 3. Determine if a dataset is available by the file contents. This differs from the default behavior of a dataset being considered loadable if its "file_type" is loaded. @@ -503,6 +512,11 @@ needs to implement a few methods: On top of that, two attributes need to be defined: ``start_time`` and ``end_time``, that define the start and end times of the sensing. +See the :ref:`time_metadata` section for a description of the different +times that Satpy readers typically use and what times should be used +for the ``start_time`` and ``end_time``. Note that these properties will +be assigned to the ``start_time`` and ``end_time`` metadata of any DataArrays +returned by ``get_dataset``, any existing values will be overwritten. If you are writing a file handler for more common formats like HDF4, HDF5, or NetCDF4 you may want to consider using the utility base classes for each: diff --git a/doc/source/dev_guide/xarray_migration.rst b/doc/source/dev_guide/xarray_migration.rst index 065939306c..d460ab76d5 100644 --- a/doc/source/dev_guide/xarray_migration.rst +++ b/doc/source/dev_guide/xarray_migration.rst @@ -316,4 +316,3 @@ Helpful functions - :doc:`delayed` - :func:`~dask.array.rechunk` - :attr:`~dask.array.Array.vindex` - diff --git a/doc/source/enhancements.rst b/doc/source/enhancements.rst index 142be49531..635f3919b6 100644 --- a/doc/source/enhancements.rst +++ b/doc/source/enhancements.rst @@ -93,6 +93,67 @@ lookup colorize -------- + +The colorize enhancement can be used to map scaled/calibrated physical values +to colors. One or several `standard Trollimage color maps`_ may be used as in +the example here:: + + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + palettes: + - {colors: spectral, min_value: 193.15, max_value: 253.149999} + - {colors: greys, min_value: 253.15, max_value: 303.15} + +It is also possible to provide your own custom defined color mapping by +specifying a list of RGB values and the corresponding min and max values +between which to apply the colors. This is for instance a common use case for +Sea Surface Temperature (SST) imagery, as in this example with the EUMETSAT +Ocean and Sea Ice SAF (OSISAF) GHRSST product:: + + - name: osisaf_sst + method: !!python/name:satpy.enhancements.colorize + kwargs: + palettes: + - colors: [ + [255, 0, 255], + [195, 0, 129], + [129, 0, 47], + [195, 0, 0], + [255, 0, 0], + [236, 43, 0], + [217, 86, 0], + [200, 128, 0], + [211, 154, 13], + [222, 180, 26], + [233, 206, 39], + [244, 232, 52], + [255.99609375, 255.99609375, 63.22265625], + [203.125, 255.99609375, 52.734375], + [136.71875, 255.99609375, 27.34375], + [0, 255.99609375, 0], + [0, 207.47265625, 0], + [0, 158.94921875, 0], + [0, 110.42578125, 0], + [0, 82.8203125, 63.99609375], + [0, 55.21484375, 127.9921875], + [0, 27.609375, 191.98828125], + [0, 0, 255.99609375], + [100.390625, 100.390625, 255.99609375], + [150.5859375, 150.5859375, 255.99609375]] + min_value: 296.55 + max_value: 273.55 + +The RGB color values will be interpolated to give a smooth result. This is +contrary to using the palettize enhancement. + +The above examples are just two different ways to apply colors to images with +Satpy. There is a wealth of other options for how to declare a colormap, please +see :func:`~satpy.enhancements.create_colormap` for more inspiration. + +.. _`standard Trollimage color maps`: https://trollimage.readthedocs.io/en/latest/colormap.html#default-colormaps + + palettize --------- diff --git a/doc/source/examples/fci_l1c_natural_color.rst b/doc/source/examples/fci_l1c_natural_color.rst index 243fdd0aee..016cb889b7 100644 --- a/doc/source/examples/fci_l1c_natural_color.rst +++ b/doc/source/examples/fci_l1c_natural_color.rst @@ -11,6 +11,22 @@ to generate a Natural Color RGB composite over the European area. not work with the currently released version of Satpy. Additional updates to this example will be coming soon. +.. note:: + + For reading compressed data, a decompression library is + needed. Either install the FCIDECOMP library (see the `FCI L1 Product User + Guide `_, or the + ``hdf5plugin`` package with:: + + pip install hdf5plugin + + or:: + + conda install hdf5plugin -c conda-forge + + If you use ``hdf5plugin``, make sure to add the line ``import hdf5plugin`` + at the top of your script. + .. code-block:: python from satpy.scene import Scene diff --git a/doc/source/faq.rst b/doc/source/faq.rst index 8617557755..2544132d13 100644 --- a/doc/source/faq.rst +++ b/doc/source/faq.rst @@ -129,13 +129,13 @@ control the number of threads used during compression by specifying the to set this to at least the same number of dask workers you use. Do this by adding ``num_threads`` to your `save_dataset` or `save_datasets` call:: - scn.save_datasets(base_dir='/tmp', tiled=True, num_threads=8) + scn.save_datasets(base_dir='/tmp', num_threads=8) -Here we're also using the `tiled` option to store our data as "tiles" instead +Satpy also stores our data as "tiles" instead of "stripes" which is another way to get more efficient compression of our -GeoTIFF image. +GeoTIFF image. You can disable this with ``tiled=False``. See the `GDAL GeoTIFF documentation `_ for more information on the creation options available including other -compression choices. \ No newline at end of file +compression choices. diff --git a/doc/source/index.rst b/doc/source/index.rst index 6c79923e10..605298d4ad 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -3,7 +3,7 @@ Satpy's Documentation ===================== Satpy is a python library for reading, manipulating, and writing data from -remote-sensing earth-observing meteorological satellite instruments. Satpy +remote-sensing earth-observing satellite instruments. Satpy provides users with readers that convert geophysical parameters from various file formats to the common Xarray :class:`~xarray.DataArray` and :class:`~xarray.Dataset` classes for easier interoperability with other @@ -30,7 +30,7 @@ libraries maintained by the Pytroll group including: Go to the Satpy project_ page for source code and downloads. -Satpy is designed to be easily extendable to support any meteorological +Satpy is designed to be easily extendable to support any earth observation satellite by the creation of plugins (readers, compositors, writers, etc). The table at the bottom of this page shows the input formats supported by the base Satpy installation. @@ -56,6 +56,7 @@ the base Satpy installation. examples/index quickstart readers + remote_reading composites resample enhancements @@ -68,6 +69,8 @@ the base Satpy installation. Satpy API faq + Release Notes + Security Policy .. _reader_table: @@ -94,7 +97,7 @@ the base Satpy installation. - AMV BUFR products not supported yet. * - MSG (Meteosat 8 to 11) L2 products in GRIB2 format - `seviri_l2_grib` - - In development, CLM, OCA and FIR products supported + - Nominal * - MFG (Meteosat 2 to 7) MVIRI data in netCDF format (FIDUCEO FCDR) - `mviri_l1b_fiduceo_nc` - Beta @@ -148,6 +151,9 @@ the base Satpy installation. * - MTG FCI Level 1C data in NetCDF format - `fci_l1c_nc` - In development (beta for FDHSI files, HRFI not supported yet) + * - MTG FCI Level 2 data in NetCDF format + - `fci_l2_nc` + - In development. * - Callipso Caliop Level 2 Cloud Layer data (v3) in EOS-hdf4 format - `caliop_l2_cloud` - In development @@ -180,7 +186,7 @@ the base Satpy installation. - `slstr_l1b` - In development * - OSISAF SST data in GHRSST (netcdf) format - - `ghrsst_l3c_sst` + - `ghrsst_l2` - In development * - NUCAPS EDR Retrieval in NetCDF4 format - `nucaps` @@ -245,7 +251,7 @@ the base Satpy installation. - `glm_l2` - Beta * - Sentinel-3 SLSTR SST data in NetCDF4 format - - `slstr_l2` + - `ghrsst_l2` - Beta * - IASI level 2 SO2 in BUFR format - `iasi_l2_so2_bufr` @@ -283,6 +289,18 @@ the base Satpy installation. * - MIMIC Total Precipitable Water Product Reader in NetCDF format - mimicTPW2_comp - Beta + * - SEADAS L2 Chlorphyll A product in HDF4 format + - seadas_l2 + - Beta + * - AAPP L1C MHS format + - `aapp_mhs_l1c` + - Nominal + * - AAPP L1C AMSU-B format + - `aapp_amsub_l1c` + - Beta + * - Arctica-M (N1) MSU-GS/A data in HDF5 format + - `msu_gsa_l1b` + - Beta Indices and tables ================== diff --git a/doc/source/install.rst b/doc/source/install.rst index bdb4216000..3c3ba26a41 100644 --- a/doc/source/install.rst +++ b/doc/source/install.rst @@ -118,5 +118,3 @@ created. $ virtualenv /path/to/pytroll-env $ source /path/to/pytroll-env/bin/activate $ pip install satpy - - diff --git a/doc/source/modifiers.rst b/doc/source/modifiers.rst new file mode 100644 index 0000000000..4669aca145 --- /dev/null +++ b/doc/source/modifiers.rst @@ -0,0 +1,57 @@ +Modifiers +========= + +Modifiers are filters applied to datasets prior to computing composites. +They take at least one input (a dataset) and have exactly one output +(the same dataset, modified). They can take additional input datasets +or parameters. + +Modifiers are defined in composites files in ``etc/composites`` within +``$SATPY_CONFIG_PATH``. + +The instruction to use a certain modifier can be contained in a composite +definition or in a reader definition. If it is defined in a composite +definition, it is applied upon constructing the composite. + +When using built-in composites, Satpy users do not need to understand +the mechanics of modifiers, as they are applied automatically. +The :doc:`composites` documentation contains information on how to apply +modifiers when creating new composites. + +Some readers read data where certain modifiers are already applied. Here, +the reader definition will refer to the Satpy modifier. This marking +adds the modifier to the metadata to prevent it from being applied again +upon composite calculation. + +Commonly used modifiers are listed in the table below. Further details +on those modifiers can be found in the linked API documentation. + +.. list-table:: Commonly used modifiers + :header-rows: 1 + + * - Label + - Class + - Description + * - ``sunz_corrected`` + - :class:`~satpy.modifiers.geometry.SunZenithCorrector` + - Modifies solar channels for the solar zenith angle to provide + smoother images. + * - ``effective_solar_pathlength_corrected`` + - :class:`~satpy.modifiers.geometry.EffectiveSolarPathLengthCorrector` + - Modifies solar channels for atmospheric path length of solar radiation. + * - ``nir_reflectance`` + - :class:`~satpy.modifiers.spectral.NIRReflectance` + - Calculates reflective part of channels at the edge of solar and + terrestrial radiation (3.7 µm or 3.9 µm). + * - ``nir_emissive`` + - :class:`~satpy.modifiers.spectral.NIREmissivePartFromReflectance` + - Calculates emissive part of channels at the edge of solar and terrestrial + radiation (3.7 µm or 3.9 µm) + * - ``rayleigh_corrected`` + - :class:`~satpy.modifiers.atmosphere.PSPRayleighReflectance` + - Modifies solar channels to filter out the visual impact of rayleigh + scattering. + +A complete list can be found in the `etc/composites +`_ +source code and in the :mod:`~satpy.modifiers` module documentation. diff --git a/doc/source/multiscene.rst b/doc/source/multiscene.rst index 37e417ff3a..8a7be6b8aa 100644 --- a/doc/source/multiscene.rst +++ b/doc/source/multiscene.rst @@ -174,7 +174,7 @@ This will compute one video frame (image) at a time and write it to the MPEG-4 video file. For users with more powerful systems it is possible to use the ``client`` and ``batch_size`` keyword arguments to compute multiple frames in parallel using the dask ``distributed`` library (if installed). -See the :doc:`dask distributed ` documentation +See the :doc:`dask distributed ` documentation for information on creating a ``Client`` object. If working on a cluster you may want to use :doc:`dask jobqueue ` to take advantage of multiple nodes at a time. diff --git a/doc/source/quickstart.rst b/doc/source/quickstart.rst index f78d8be0b8..fcb6aa5a6b 100644 --- a/doc/source/quickstart.rst +++ b/doc/source/quickstart.rst @@ -34,14 +34,12 @@ method. Printing the Scene object will list each of the * x (x) float64 5.567e+06 5.564e+06 5.561e+06 5.558e+06 5.555e+06 ... * y (y) float64 -5.567e+06 -5.564e+06 -5.561e+06 -5.558e+06 ... Attributes: - satellite_longitude: 0.0 + orbital_parameters: {'projection_longitude': 0.0, 'pr... sensor: seviri - satellite_altitude: 35785831.0 platform_name: Meteosat-11 standard_name: brightness_temperature units: K wavelength: (9.8, 10.8, 11.8) - satellite_latitude: 0.0 start_time: 2018-02-28 15:00:10.814000 end_time: 2018-02-28 15:12:43.956000 area: Area ID: some_area_name\nDescription: On-the-fly ar... @@ -58,14 +56,12 @@ method. Printing the Scene object will list each of the * x (x) float64 5.567e+06 5.564e+06 5.561e+06 5.558e+06 5.555e+06 ... * y (y) float64 -5.567e+06 -5.564e+06 -5.561e+06 -5.558e+06 ... Attributes: - satellite_longitude: 0.0 + orbital_parameters: {'projection_longitude': 0.0, 'pr... sensor: seviri - satellite_altitude: 35785831.0 platform_name: Meteosat-11 standard_name: toa_bidirectional_reflectance units: % wavelength: (0.74, 0.81, 0.88) - satellite_latitude: 0.0 start_time: 2018-02-28 15:00:10.814000 end_time: 2018-02-28 15:12:43.956000 area: Area ID: some_area_name\nDescription: On-the-fly ar... @@ -82,14 +78,12 @@ method. Printing the Scene object will list each of the * x (x) float64 5.567e+06 5.564e+06 5.561e+06 5.558e+06 5.555e+06 ... * y (y) float64 -5.567e+06 -5.564e+06 -5.561e+06 -5.558e+06 ... Attributes: - satellite_longitude: 0.0 + orbital_parameters: {'projection_longitude': 0.0, 'pr... sensor: seviri - satellite_altitude: 35785831.0 platform_name: Meteosat-11 standard_name: toa_bidirectional_reflectance units: % wavelength: (0.56, 0.635, 0.71) - satellite_latitude: 0.0 start_time: 2018-02-28 15:00:10.814000 end_time: 2018-02-28 15:12:43.956000 area: Area ID: some_area_name\nDescription: On-the-fly ar... @@ -146,30 +140,30 @@ The 'area' attribute of the DataArray, if present, can be converted to latitude >>> vis006_lon, vis006_lat = vis006.attrs['area'].get_lonlats() -Visualizing data -================ +Visualizing data +================ + +To visualize loaded data in a pop-up window: + + >>> global_scene.show(0.6) -To visualize loaded data in a pop-up window: - - >>> global_scene.show(0.6) - Alternatively if working in a Jupyter notebook the scene can be converted to a `geoviews `_ object using the :meth:`~satpy.scene.Scene.to_geoviews` method. The geoviews package is not a requirement of the base satpy install so in order to use this feature the user needs to install the geoviews package himself. - - >>> import holoviews as hv - >>> import geoviews as gv - >>> import geoviews.feature as gf - >>> gv.extension("bokeh", "matplotlib") - >>> %opts QuadMesh Image [width=600 height=400 colorbar=True] Feature [apply_ranges=False] - >>> %opts Image QuadMesh (cmap='RdBu_r') + + >>> import holoviews as hv + >>> import geoviews as gv + >>> import geoviews.feature as gf + >>> gv.extension("bokeh", "matplotlib") + >>> %opts QuadMesh Image [width=600 height=400 colorbar=True] Feature [apply_ranges=False] + >>> %opts Image QuadMesh (cmap='RdBu_r') >>> gview = global_scene.to_geoviews(vdims=[0.6]) - >>> gview[::5,::5] * gf.coastline * gf.borders - -Creating new datasets -===================== + >>> gview[::5,::5] * gf.coastline * gf.borders + +Creating new datasets +===================== Calculations based on loaded datasets/channels can easily be assigned to a new dataset: @@ -183,7 +177,7 @@ Assigning additional custom metadata is also possible. >>> from satpy.dataset import combine_metadata >>> scene['new_band'] = scene[0.8] / scene[0.6] >>> scene['new_band'].attrs = combine_metadata(scene[0.8], scene[0.6]) - >>> scene['new_band'].attrs['some_other_key'] = 'whatever_value_you_want' + >>> scene['new_band'].attrs['some_other_key'] = 'whatever_value_you_want' Generating composites ===================== diff --git a/doc/source/readers.rst b/doc/source/readers.rst index ecb57fd59a..76cfa21dca 100644 --- a/doc/source/readers.rst +++ b/doc/source/readers.rst @@ -113,7 +113,7 @@ Load remote data ================ Starting with Satpy version 0.25.1 with supported readers it is possible to -load data from remote file systems like ``s3fs`` or ``fsspec``. +load data from remote file systems like ``s3fs`` or ``fsspec``. For example: :: @@ -162,56 +162,102 @@ See the :func:`~satpy.readers.find_files_and_readers` documentation for more information on the possible parameters as well as for searching on remote file systems. +.. _dataset_metadata: + Metadata ======== -.. _dataset_metadata: - The datasets held by a scene also provide vital metadata such as dataset name, units, observation time etc. The following attributes are standardized across all readers: * ``name``, and other identifying metadata keys: See :doc:`dev_guide/satpy_internals`. * ``start_time``: Left boundary of the time interval covered by the dataset. + For more information see the :ref:`time_metadata` section below. * ``end_time``: Right boundary of the time interval covered by the dataset. + For more information see the :ref:`time_metadata` section below. * ``area``: :class:`~pyresample.geometry.AreaDefinition` or :class:`~pyresample.geometry.SwathDefinition` if data is geolocated. Areas are used for gridded projected data and Swaths when data must be described by individual longitude/latitude coordinates. See the Coordinates section below. * ``reader``: The name of the Satpy reader that produced the dataset. * ``orbital_parameters``: Dictionary of orbital parameters describing the satellite's position. - - * For *geostationary* satellites it is described using the following scalar attributes: - - * ``satellite_actual_longitude/latitude/altitude``: Current position of the satellite at the - time of observation in geodetic coordinates (i.e. altitude is relative and normal to the - surface of the ellipsoid). - * ``satellite_nominal_longitude/latitude/altitude``: Center of the station keeping box (a - confined area in which the satellite is actively maintained in using maneuvres). Inbetween - major maneuvres, when the satellite is permanently moved, the nominal position is constant. - * ``nadir_longitude/latitude``: Intersection of the instrument's Nadir with the surface of the - earth. May differ from the actual satellite position, if the instrument is pointing slightly - off the axis (satellite, earth-center). If available, this should be used to compute viewing - angles etc. Otherwise, use the actual satellite position. - * ``projection_longitude/latitude/altitude``: Projection center of the re-projected data. This - should be used to compute lat/lon coordinates. Note that the projection center can differ - considerably from the actual satellite position. For example MSG-1 was at times positioned - at 3.4 degrees west, while the image data was re-projected to 0 degrees. - * [DEPRECATED] ``satellite_longitude/latitude/altitude``: Current position of the satellite at - the time of observation in geodetic coordinates. - - .. note:: Longitudes and latitudes are given in degrees, altitude in meters. For use in - pyorbital, the altitude has to be converted to kilometers, see for example - :func:`pyorbital.orbital.get_observer_look`. - - * For *polar orbiting* satellites the readers usually provide coordinates and viewing angles of - the swath as ancillary datasets. Additional metadata related to the satellite position include: - - * ``tle``: Two-Line Element (TLE) set used to compute the satellite's orbit - + See the :ref:`orbital_parameters` section below for more information. +* ``time_parameters``: Dictionary of additional time parameters describing the + time ranges related to the requests or schedules for when observations + should happen and when they actually do. See :ref:`time_metadata` below for + details. * ``raw_metadata``: Raw, unprocessed metadata from the reader. Note that the above attributes are not necessarily available for each dataset. +.. _time_metadata: + +Time Metadata +------------- + +In addition to the generic ``start_time`` and ``end_time`` pieces of metadata +there are other time fields that may be provided if the reader supports them. +These items are stored in a ``time_parameters`` sub-dictionary and they include +values like: + +* ``observation_start_time``: The point in time when a sensor began recording + for the current data. +* ``observation_end_time``: Same as ``observation_start_time``, but when data + has stopped being recorded. +* ``nominal_start_time``: The "human friendly" time describing the start of + the data observation interval or repeat cycle. This time is often on a round + minute (seconds=0). Along with the nominal end time, these times define the + regular interval of the data collection. For example, GOES-16 ABI full disk + images are collected every 10 minutes (in the common configuration) so + ``nominal_start_time`` and ``nominal_end_time`` would be 10 minutes apart + regardless of when the instrument recorded data inside that interval. + This time may also be referred to as the repeat cycle, repeat slot, or time + slot. +* ``nominal_end_time``: Same as ``nominal_start_time``, but the end of the + interval. + +In general, ``start_time`` and ``end_time`` will be set to the "nominal" +time by the reader. This ensures that other Satpy components get a +consistent time for calculations (ex. generation of solar zenith angles) +and can be reused between bands. + +See the :ref:`data_array_coordinates` section below for more information on +time information that may show up as a per-element/row "coordinate" on the +DataArray (ex. acquisition time) instead of as metadata. + +.. _orbital_parameters: + +Orbital Parameters +------------------ + +Orbital parameters describe the position of the satellite. As such they +typically come in a few "flavors" for the common types of orbits a satellite +may have. + +For *geostationary* satellites it is described using the following scalar attributes: + + * ``satellite_actual_longitude/latitude/altitude``: Current position of the satellite at the + time of observation in geodetic coordinates (i.e. altitude is relative and normal to the + surface of the ellipsoid). + * ``satellite_nominal_longitude/latitude/altitude``: Center of the station keeping box (a + confined area in which the satellite is actively maintained in using maneuvers). Inbetween + major maneuvers, when the satellite is permanently moved, the nominal position is constant. + * ``nadir_longitude/latitude``: Intersection of the instrument's Nadir with the surface of the + earth. May differ from the actual satellite position, if the instrument is pointing slightly + off the axis (satellite, earth-center). If available, this should be used to compute viewing + angles etc. Otherwise, use the actual satellite position. + * ``projection_longitude/latitude/altitude``: Projection center of the re-projected data. This + should be used to compute lat/lon coordinates. Note that the projection center can differ + considerably from the actual satellite position. For example MSG-1 was at times positioned + at 3.4 degrees west, while the image data was re-projected to 0 degrees. + +For *polar orbiting* satellites the readers usually provide coordinates and viewing angles of +the swath as ancillary datasets. Additional metadata related to the satellite position includes: + + * ``tle``: Two-Line Element (TLE) set used to compute the satellite's orbit + +.. _data_array_coordinates: + Coordinates =========== @@ -308,3 +354,12 @@ satpy cf nc readers .. automodule:: satpy.readers.satpy_cf_nc :noindex: + +hdf5 based readers +------------------ + +Arctica-M N1 HDF5 format reader +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. automodule:: satpy.readers.msu_gsa_l1b + :noindex: diff --git a/doc/source/remote_reading.rst b/doc/source/remote_reading.rst new file mode 100644 index 0000000000..2a5c06e283 --- /dev/null +++ b/doc/source/remote_reading.rst @@ -0,0 +1,173 @@ +==================== +Reading remote files +==================== + +Using a single reader +===================== + +Some of the readers in Satpy can read data directly over various transfer protocols. This is done +using `fsspec `_ and various packages +it is using underneath. + +As an example, reading ABI data from public AWS S3 storage can be done in the following way:: + + from satpy import Scene + + storage_options = {'anon': True} + filenames = ['s3://noaa-goes16/ABI-L1b-RadC/2019/001/17/*_G16_s20190011702186*'] + scn = Scene(reader='abi_l1b', filenames=filenames, reader_kwargs={'storage_options': storage_options}) + scn.load(['true_color_raw']) + +Reading from S3 as above requires the `s3fs` library to be installed in addition to `fsspec`. + +As an alternative, the storage options can be given using +`fsspec configuration `_. +For the above example, the configuration could be saved to `s3.json` in the `fsspec` configuration directory +(by default placed in `~/.config/fsspec/` directory in Linux):: + + { + "s3": { + "anon": "true" + } + } + +.. note:: + + Options given in `reader_kwargs` override only the matching options given in configuration file and everythin else is left + as-is. In case of problems in data access, remove the configuration file to see if that solves the issue. + + +For reference, reading SEVIRI HRIT data from a local S3 storage works the same way:: + + filenames = [ + 's3://satellite-data-eumetcast-seviri-rss/H-000-MSG3*202204260855*', + ] + storage_options = { + "client_kwargs": {"endpoint_url": "https://PLACE-YOUR-SERVER-URL-HERE"}, + "secret": "VERYBIGSECRET", + "key": "ACCESSKEY" + } + scn = Scene(reader='seviri_l1b_hrit', filenames=filenames, reader_kwargs={'storage_options': storage_options}) + scn.load(['WV_073']) + +Using the `fsspec` configuration in `s3.json` the configuration would look like this:: + + { + "s3": { + "client_kwargs": {"endpoint_url": "https://PLACE-YOUR-SERVER-URL-HERE"}, + "secret": "VERYBIGSECRET", + "key": "ACCESSKEY" + } + } + + +Using multiple readers +====================== + +If multiple readers are used and the required credentials differ, the storage options are passed per reader like this:: + + reader1_filenames = [...] + reader2_filenames = [...] + filenames = { + 'reader1': reader1_filenames, + 'reader2': reader2_filenames, + } + reader1_storage_options = {...} + reader2_storage_options = {...} + reader_kwargs = { + 'reader1': { + 'option1': 'foo', + 'storage_options': reader1_storage_options, + }, + 'reader2': { + 'option1': 'foo', + 'storage_options': reader1_storage_options, + } + } + scn = Scene(filenames=filenames, reader_kwargs=reader_kwargs) + + +Caching the remote files +======================== + +Caching the remote file locally can speedup the overall processing time significantly, especially if the data are re-used +for example when testing. The caching can be done by taking advantage of the `fsspec caching mechanism +`_:: + + reader_kwargs = { + 'storage_options': { + 's3': {'anon': True}, + 'simple': { + 'cache_storage': '/tmp/s3_cache', + } + } + } + + filenames = ['simplecache::s3://noaa-goes16/ABI-L1b-RadC/2019/001/17/*_G16_s20190011702186*'] + scn = Scene(reader='abi_l1b', filenames=filenames, reader_kwargs=reader_kwargs) + scn.load(['true_color_raw']) + scn2 = scn.resample(scn.coarsest_area(), resampler='native') + scn2.save_datasets(base_dir='/tmp/', tiled=True, blockxsize=512, blockysize=512, driver='COG', overviews=[]) + + +The following table shows the timings for running the above code with different cache statuses:: + +.. _cache_timing_table: + +.. list-table:: Processing times without and with caching + :header-rows: 1 + :widths: 40 30 30 + + * - Caching + - Elapsed time + - Notes + * - No caching + - 650 s + - remove `reader_kwargs` and `simplecache::` from the code + * - File cache + - 66 s + - Initial run + * - File cache + - 13 s + - Second run + +.. note:: + + The cache is not cleaned by Satpy nor fsspec so the user should handle cleaning excess files from `cache_storage`. + + +.. note:: + + Only `simplecache` is considered thread-safe, so using the other caching mechanisms may or may not work depending + on the reader, Dask scheduler or the phase of the moon. + + +Resources +========= + +See :class:`~satpy.readers.FSFile` for direct usage of `fsspec` with Satpy, and +`fsspec documentation `_ for more details on connection options +and detailes. + + +Supported readers +================= + +.. _reader_table: + +.. list-table:: Satpy Readers capable of reading remote files using `fsspec` + :header-rows: 1 + :widths: 70 30 + + * - Description + - Reader name + * - MSG (Meteosat 8 to 11) SEVIRI data in HRIT format + - `seviri_l1b_hrit` + * - GOES-R imager data in netcdf format + - `abi_l1b` + * - NOAA GOES-R ABI L2+ products in netcdf format + - `abi_l2_nc` + * - Sentinel-3 A and B OLCI Level 1B data in netCDF4 format + - `olci_l1b` + * - Sentinel-3 A and B OLCI Level 2 data in netCDF4 format + - `olci_l2` diff --git a/doc/source/writers.rst b/doc/source/writers.rst index 3112a416b2..a4add46618 100644 --- a/doc/source/writers.rst +++ b/doc/source/writers.rst @@ -54,7 +54,7 @@ One common parameter across almost all Writers is ``filename`` and * - GeoTIFF with NinJo tags - :class:`ninjogeotiff ` - Beta - - + - Available Writers ================= diff --git a/satpy/_compat.py b/satpy/_compat.py index edce4b91b6..6a2a4fd528 100644 --- a/satpy/_compat.py +++ b/satpy/_compat.py @@ -17,19 +17,74 @@ # satpy. If not, see . """Backports and compatibility fixes for satpy.""" +from threading import RLock + +_NOT_FOUND = object() + + +class CachedPropertyBackport: + """Backport of cached_property from Python-3.8. + + Source: https://github.com/python/cpython/blob/v3.8.0/Lib/functools.py#L930 + """ + + def __init__(self, func): # noqa + self.func = func + self.attrname = None + self.__doc__ = func.__doc__ + self.lock = RLock() + + def __set_name__(self, owner, name): # noqa + if self.attrname is None: + self.attrname = name + elif name != self.attrname: + raise TypeError( + "Cannot assign the same cached_property to two different names " + f"({self.attrname!r} and {name!r})." + ) + + def __get__(self, instance, owner=None): # noqa + if instance is None: + return self + if self.attrname is None: + raise TypeError( + "Cannot use cached_property instance without calling __set_name__ on it.") + try: + cache = instance.__dict__ + except AttributeError: # not all objects have __dict__ (e.g. class defines slots) + msg = ( + f"No '__dict__' attribute on {type(instance).__name__!r} " + f"instance to cache {self.attrname!r} property." + ) + raise TypeError(msg) from None + val = cache.get(self.attrname, _NOT_FOUND) + if val is _NOT_FOUND: + with self.lock: + # check if another thread filled cache while we awaited lock + val = cache.get(self.attrname, _NOT_FOUND) + if val is _NOT_FOUND: + val = self.func(instance) + try: + cache[self.attrname] = val + except TypeError: + msg = ( + f"The '__dict__' attribute on {type(instance).__name__!r} instance " + f"does not support item assignment for caching {self.attrname!r} property." + ) + raise TypeError(msg) from None + return val + + try: - from functools import cached_property + from functools import cached_property # type: ignore except ImportError: # for python < 3.8 - from functools import lru_cache - - def cached_property(func): # type: ignore - """Port back functools.cached_property.""" - return property(lru_cache(maxsize=None)(func)) + cached_property = CachedPropertyBackport # type: ignore try: - from numpy.typing import ArrayLike # noqa + from numpy.typing import ArrayLike, DTypeLike # noqa except ImportError: # numpy <1.20 + from numpy import dtype as DTypeLike # noqa from numpy import ndarray as ArrayLike # noqa diff --git a/satpy/_config.py b/satpy/_config.py index 2684f04ad8..7490e53564 100644 --- a/satpy/_config.py +++ b/satpy/_config.py @@ -43,6 +43,7 @@ 'data_dir': _satpy_dirs.user_data_dir, 'demo_data_dir': '.', 'download_aux': True, + 'sensor_angles_position_preference': 'actual', } # Satpy main configuration object diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index c449f9fb24..6dee796087 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2015-2020 Satpy developers +# Copyright (c) 2015-2022 Satpy developers # # This file is part of satpy. # @@ -35,7 +35,7 @@ LOG = logging.getLogger(__name__) -NEGLIBLE_COORDS = ['time'] +NEGLIGIBLE_COORDS = ['time'] """Keywords identifying non-dimensional coordinates to be ignored during composite generation.""" MASKING_COMPOSITOR_METHODS = ['less', 'less_equal', 'equal', 'greater_equal', @@ -90,7 +90,7 @@ def sub_arrays(proj1, proj2): class CompositeBase: - """Base class for all compositors. + """Base class for all compositors and modifiers. A compositor in Satpy is a class that takes in zero or more input DataArrays and produces a new DataArray with its own identifier (name). @@ -153,18 +153,51 @@ def apply_modifier_info(self, origin, destination): d[k] = o[k] def match_data_arrays(self, data_arrays): - """Match data arrays so that they can be used together in a composite.""" + """Match data arrays so that they can be used together in a composite. + + For the purpose of this method, "can be used together" means: + + - All arrays should have the same dimensions. + - Either all arrays should have an area, or none should. + - If all have an area, the areas should be all the same. + + In addition, negligible non-dimensional coordinates are dropped (see + :meth:`drop_coordinates`) and dask chunks are unified (see + :func:`satpy.utils.unify_chunks`). + + Args: + data_arrays (List[arrays]): Arrays to be checked + + Returns: + data_arrays (List[arrays]): + Arrays with negligible non-dimensional coordinates removed. + + Raises: + :class:`IncompatibleAreas`: + If dimension or areas do not match. + :class:`ValueError`: + If some, but not all data arrays lack an area attribute. + """ self.check_geolocation(data_arrays) new_arrays = self.drop_coordinates(data_arrays) new_arrays = list(unify_chunks(*new_arrays)) return new_arrays def drop_coordinates(self, data_arrays): - """Drop neglible non-dimensional coordinates.""" + """Drop negligible non-dimensional coordinates. + + Drops negligible coordinates if they do not correspond to any + dimension. Negligible coordinates are defined in the + :attr:`NEGLIGIBLE_COORDS` module attribute. + + Args: + data_arrays (List[arrays]): Arrays to be checked + """ new_arrays = [] for ds in data_arrays: drop = [coord for coord in ds.coords - if coord not in ds.dims and any([neglible in coord for neglible in NEGLIBLE_COORDS])] + if coord not in ds.dims and + any([neglible in coord for neglible in NEGLIGIBLE_COORDS])] if drop: new_arrays.append(ds.drop(drop)) else: @@ -173,7 +206,23 @@ def drop_coordinates(self, data_arrays): return new_arrays def check_geolocation(self, data_arrays): - """Check that the geolocations of the *data_arrays* are compatible.""" + """Check that the geolocations of the *data_arrays* are compatible. + + For the purpose of this method, "compatible" means: + + - All arrays should have the same dimensions. + - Either all arrays should have an area, or none should. + - If all have an area, the areas should be all the same. + + Args: + data_arrays (List[arrays]): Arrays to be checked + + Raises: + :class:`IncompatibleAreas`: + If dimension or areas do not match. + :class:`ValueError`: + If some, but not all data arrays lack an area attribute. + """ if len(data_arrays) == 1: return @@ -197,12 +246,6 @@ def check_geolocation(self, data_arrays): "'{}'".format(self.attrs['name'])) raise IncompatibleAreas("Areas are different") - def check_areas(self, data_arrays): - """Check that the areas of the *data_arrays* are compatible.""" - warnings.warn('satpy.composites.CompositeBase.check_areas is deprecated, use ' - 'satpy.composites.CompositeBase.match_data_arrays instead') - return self.match_data_arrays(data_arrays) - class DifferenceCompositor(CompositeBase): """Make the difference of two data arrays.""" @@ -214,7 +257,8 @@ def __call__(self, projectables, nonprojectables=None, **attrs): projectables = self.match_data_arrays(projectables) info = combine_metadata(*projectables) info['name'] = self.attrs['name'] - info.update(attrs) + info.update(self.attrs) # attrs from YAML/__init__ + info.update(attrs) # overwriting of DataID properties proj = projectables[0] - projectables[1] proj.attrs = info @@ -451,7 +495,7 @@ def build_colormap(palette, dtype, info): Colormaps come in different forms, but they are all supposed to have color values between 0 and 255. The following cases are considered: - - Palettes comprised of only a list on colors. If *dtype* is uint8, + - Palettes comprised of only a list of colors. If *dtype* is uint8, the values of the colormap are the enumeration of the colors. Otherwise, the colormap values will be spread evenly from the min to the max of the valid_range provided in `info`. @@ -587,18 +631,10 @@ def __call__(self, projectables, **kwargs): try: coszen = np.cos(np.deg2rad(projectables[2 if self.day_night == "day_night" else 1])) except IndexError: - from pyorbital.astronomy import cos_zen + from satpy.modifiers.angles import get_cos_sza LOG.debug("Computing sun zenith angles.") # Get chunking that matches the data - try: - chunks = foreground_data.sel(bands=foreground_data['bands'][0]).chunks - except KeyError: - chunks = foreground_data.chunks - lons, lats = foreground_data.attrs["area"].get_lonlats(chunks=chunks) - coszen = xr.DataArray(cos_zen(foreground_data.attrs["start_time"], - lons, lats), - dims=['y', 'x'], - coords=[foreground_data['y'], foreground_data['x']]) + coszen = get_cos_sza(foreground_data) # Calculate blending weights coszen -= np.min((lim_high, lim_low)) coszen /= np.abs(lim_low - lim_high) @@ -876,7 +912,10 @@ def _get_band(self, high_res, low_res, color, ratio): return ret def __call__(self, datasets, optional_datasets=None, **info): - """Sharpen low resolution datasets by multiplying by the ratio of ``high_res / low_res``.""" + """Sharpen low resolution datasets by multiplying by the ratio of ``high_res / low_res``. + + The resulting RGB has the units attribute removed. + """ if len(datasets) != 3: raise ValueError("Expected 3 datasets, got %d" % (len(datasets), )) if not all(x.shape == datasets[0].shape for x in datasets[1:]) or \ @@ -935,7 +974,9 @@ def __call__(self, datasets, optional_datasets=None, **info): info.update(self.attrs) # Force certain pieces of metadata that we *know* to be true info.setdefault("standard_name", "true_color") - return super(RatioSharpenedRGB, self).__call__((r, g, b), **info) + res = super(RatioSharpenedRGB, self).__call__((r, g, b), **info) + res.attrs.pop("units", None) + return res def _mean4(data, offset=(0, 0), block_id=None): @@ -1057,7 +1098,8 @@ def __call__(self, projectables, *args, **kwargs): # Get the enhanced version of the RGB composite to be sharpened rgb_img = enhance2dataset(projectables[1]) - rgb_img *= luminance + # Ignore alpha band when applying luminance + rgb_img = rgb_img.where(rgb_img.bands == 'A', rgb_img * luminance) return super(SandwichCompositor, self).__call__(rgb_img, *args, **kwargs) @@ -1295,7 +1337,10 @@ def _get_merged_image_data(foreground: xr.DataArray, class MaskingCompositor(GenericCompositor): """A compositor that masks e.g. IR 10.8 channel data using cloud products from NWC SAF.""" - def __init__(self, name, transparency=None, conditions=None, **kwargs): + _supported_modes = {"LA", "RGBA"} + + def __init__(self, name, transparency=None, conditions=None, mode="LA", + **kwargs): """Collect custom configuration values. Kwargs: @@ -1305,6 +1350,10 @@ def __init__(self, name, transparency=None, conditions=None, **kwargs): DEPRECATED. conditions (list): list of three items determining the masking settings. + mode (str, optional): Image mode to return. For single-band input, + this shall be "LA" (default) or "RGBA". For + multi-band input, this argument is ignored + as the result is always RGBA. Each condition in *conditions* consists of three items: @@ -1363,6 +1412,10 @@ def __init__(self, name, transparency=None, conditions=None, **kwargs): self.conditions = conditions if self.conditions is None: raise ValueError("Masking conditions not defined.") + if mode not in self._supported_modes: + raise ValueError(f"Invalid mode {mode!s}. Supported modes: " + + ", ".join(self._supported_modes)) + self.mode = mode super(MaskingCompositor, self).__init__(name, **kwargs) @@ -1373,34 +1426,11 @@ def __call__(self, projectables, *args, **kwargs): projectables = self.match_data_arrays(projectables) data_in = projectables[0] mask_in = projectables[1] - mask_data = mask_in.data alpha_attrs = data_in.attrs.copy() - if 'bands' in data_in.dims: - data = [data_in.sel(bands=b) for b in data_in['bands'] if b != 'A'] - else: - data = [data_in] + data = self._select_data_bands(data_in) - # Create alpha band - alpha = da.ones((data[0].sizes['y'], - data[0].sizes['x']), - chunks=data[0].chunks) - - for condition in self.conditions: - method = condition['method'] - value = condition.get('value', None) - if isinstance(value, str): - value = _get_flag_value(mask_in, value) - transparency = condition['transparency'] - mask = self._get_mask(method, value, mask_data) - - if transparency == 100.0: - data = self._set_data_nans(data, mask, alpha_attrs) - alpha_val = 1. - transparency / 100. - alpha = da.where(mask, alpha_val, alpha) - - alpha = xr.DataArray(data=alpha, attrs=alpha_attrs, - dims=data[0].dims, coords=data[0].coords) + alpha = self._get_alpha_bands(data, mask_in, alpha_attrs) data.append(alpha) res = super(MaskingCompositor, self).__call__(data, **kwargs) @@ -1434,6 +1464,44 @@ def _set_data_nans(self, data, mask, attrs): return data + def _select_data_bands(self, data_in): + """Select data to be composited from input data. + + From input data, select the bands that need to have masking applied. + """ + if 'bands' in data_in.dims: + return [data_in.sel(bands=b) for b in data_in['bands'] if b != 'A'] + if self.mode == "RGBA": + return [data_in, data_in, data_in] + return [data_in] + + def _get_alpha_bands(self, data, mask_in, alpha_attrs): + """Get alpha bands. + + From input data, masks, and attributes, get alpha band. + """ + # Create alpha band + mask_data = mask_in.data + alpha = da.ones((data[0].sizes['y'], + data[0].sizes['x']), + chunks=data[0].chunks) + + for condition in self.conditions: + method = condition['method'] + value = condition.get('value', None) + if isinstance(value, str): + value = _get_flag_value(mask_in, value) + transparency = condition['transparency'] + mask = self._get_mask(method, value, mask_data) + + if transparency == 100.0: + data = self._set_data_nans(data, mask, alpha_attrs) + alpha_val = 1. - transparency / 100. + alpha = da.where(mask, alpha_val, alpha) + + return xr.DataArray(data=alpha, attrs=alpha_attrs, + dims=data[0].dims, coords=data[0].coords) + def _get_flag_value(mask, val): """Get a numerical value of the named flag. diff --git a/satpy/composites/ahi.py b/satpy/composites/ahi.py index c170eb0543..edd195bbcf 100644 --- a/satpy/composites/ahi.py +++ b/satpy/composites/ahi.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2015-2017 Satpy developers +# Copyright (c) 2015-2021 Satpy developers # # This file is part of satpy. # @@ -20,6 +20,7 @@ import logging from satpy.composites import GenericCompositor +from satpy.dataset import combine_metadata LOG = logging.getLogger(__name__) @@ -27,17 +28,17 @@ class GreenCorrector(GenericCompositor): """Corrector of the AHI green band to compensate for the deficit of chlorophyll signal.""" - def __init__(self, *args, **kwargs): + def __init__(self, *args, fractions=(0.85, 0.15), **kwargs): """Set default keyword argument values.""" # XXX: Should this be 0.93 and 0.07 - self.fractions = kwargs.pop('fractions', [0.85, 0.15]) + self.fractions = fractions super(GreenCorrector, self).__init__(*args, **kwargs) def __call__(self, projectables, optional_datasets=None, **attrs): """Boost vegetation effect thanks to NIR (0.8µm) band.""" - green, nir = self.match_data_arrays(projectables) LOG.info('Boosting vegetation on green band') - new_green = green * self.fractions[0] + nir * self.fractions[1] - new_green.attrs = green.attrs.copy() + projectables = self.match_data_arrays(projectables) + new_green = sum(fraction * value for fraction, value in zip(self.fractions, projectables)) + new_green.attrs = combine_metadata(*projectables) return super(GreenCorrector, self).__call__((new_green,), **attrs) diff --git a/satpy/composites/viirs.py b/satpy/composites/viirs.py index a94bfce11d..2c6587570c 100644 --- a/satpy/composites/viirs.py +++ b/satpy/composites/viirs.py @@ -268,7 +268,7 @@ def _saturation_correction(self, dnb_data, unit_factor, min_val, max_val)) / dnb_data.size LOG.debug("Dynamic DNB saturation percentage: %f", saturation_pct) while saturation_pct > 0.005: - max_val *= 1.1 * unit_factor + max_val *= 1.1 saturation_pct = float(np.count_nonzero( dnb_data > max_val)) / dnb_data.size LOG.debug("Dynamic DNB saturation percentage: %f", @@ -340,7 +340,7 @@ def __call__(self, datasets, **info): else: inner_sqrt = (output_dataset - min_val) / (max_val - min_val) # clip negative values to 0 before the sqrt - inner_sqrt = inner_sqrt.where(inner_sqrt > 0, 0) + inner_sqrt.data = np.clip(inner_sqrt.data, 0, None) output_dataset.data = np.sqrt(inner_sqrt).data info = dnb_data.attrs.copy() @@ -748,10 +748,8 @@ def _calculate_weights(tile_size): # that has all 8 surrounding tiles available # create our empty template tiles template_tile = np.zeros((3, 3, tile_size, tile_size), dtype=np.float32) - """ # TEMP FOR TESTING, create a weight tile that does no interpolation - template_tile[1,1] = template_tile[1,1] + 1.0 - """ + # template_tile[1,1] = template_tile[1,1] + 1.0 # for ease of calculation, figure out the index of the center pixel in a tile # and how far that pixel is from the edge of the tile (in pixel units) @@ -955,18 +953,19 @@ class SnowAge(GenericCompositor): Product is based on method presented at the second CSPP/IMAPP users' meeting at Eumetsat in Darmstadt on 14-16 April 2015 - # Bernard Bellec snow Look-Up Tables V 1.0 (c) Meteo-France - # These Look-up Tables allow you to create the RGB snow product - # for SUOMI-NPP VIIRS Imager according to the algorithm - # presented at the second CSPP/IMAPP users' meeting at Eumetsat - # in Darmstadt on 14-16 April 2015 - # The algorithm and the product are described in this - # presentation : - # http://www.ssec.wisc.edu/meetings/cspp/2015/Agenda%20PDF/Wednesday/Roquet_snow_product_cspp2015.pdf - # For further information you may contact - # Bernard Bellec at Bernard.Bellec@meteo.fr - # or - # Pascale Roquet at Pascale.Roquet@meteo.fr + Bernard Bellec snow Look-Up Tables V 1.0 (c) Meteo-France + These Look-up Tables allow you to create the RGB snow product + for SUOMI-NPP VIIRS Imager according to the algorithm + presented at the second CSPP/IMAPP users' meeting at Eumetsat + in Darmstadt on 14-16 April 2015 + The algorithm and the product are described in this + presentation : + http://www.ssec.wisc.edu/meetings/cspp/2015/Agenda%20PDF/Wednesday/Roquet_snow_product_cspp2015.pdf + as well as in the paper http://dx.doi.org/10.1016/j.rse.2017.04.028 + For further information you may contact + Bernard Bellec at Bernard.Bellec@meteo.fr + or + Pascale Roquet at Pascale.Roquet@meteo.fr """ def __call__(self, projectables, nonprojectables=None, **info): @@ -975,11 +974,13 @@ def __call__(self, projectables, nonprojectables=None, **info): The algorithm and the product are described in this presentation : http://www.ssec.wisc.edu/meetings/cspp/2015/Agenda%20PDF/Wednesday/Roquet_snow_product_cspp2015.pdf + as well as in the paper http://dx.doi.org/10.1016/j.rse.2017.04.028 For further information you may contact Bernard Bellec at Bernard.Bellec@meteo.fr or Pascale Roquet at Pascale.Roquet@meteo.fr + The resulting RGB has the units attribute removed. """ if len(projectables) != 5: raise ValueError("Expected 5 datasets, got %d" % @@ -1008,4 +1009,6 @@ def __call__(self, projectables, nonprojectables=None, **info): ch2.attrs = info ch3.attrs = info - return super(SnowAge, self).__call__([ch1, ch2, ch3], **info) + res = super(SnowAge, self).__call__([ch1, ch2, ch3], **info) + res.attrs.pop("units", None) + return res diff --git a/satpy/dataset/dataid.py b/satpy/dataset/dataid.py index 7093e949b8..3c29e350eb 100644 --- a/satpy/dataset/dataid.py +++ b/satpy/dataset/dataid.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2015-2020 Satpy developers +# Copyright (c) 2015-2021 Satpy developers # # This file is part of satpy. # @@ -45,7 +45,14 @@ def get_keys_from_config(common_id_keys, config): class ValueList(IntEnum): - """A static value list.""" + """A static value list. + + This class is meant to be used for dynamically created Enums. Due to this + it should not be used as a normal Enum class or there may be some + unexpected behavior. For example, this class contains custom pickling and + unpickling handling that may break in subclasses. + + """ @classmethod def convert(cls, value): @@ -55,6 +62,21 @@ def convert(cls, value): except KeyError: raise ValueError('{} invalid value for {}'.format(value, cls)) + @classmethod + def _unpickle(cls, enum_name, enum_members, enum_member): + """Create dynamic class that was previously pickled. + + See :meth:`__reduce_ex__` for implementation details. + + """ + enum_cls = cls(enum_name, enum_members) + return enum_cls[enum_member] + + def __reduce_ex__(self, proto): + """Reduce the object for pickling.""" + return (ValueList._unpickle, + (self.__class__.__name__, list(self.__class__.__members__.keys()), self.name)) + def __eq__(self, other): """Check equality.""" return self.name == other @@ -244,7 +266,6 @@ def __hash__(self): }, } - #: Default ID keys for coordinate DataArrays. default_co_keys_config = {'name': { 'required': True, @@ -448,7 +469,7 @@ def __lt__(self, other): popitem = _immutable clear = _immutable update = _immutable # type: ignore - setdefault = _immutable + setdefault = _immutable # type: ignore def _find_modifiers_key(self): for key, val in self.items(): @@ -475,12 +496,12 @@ def _generalize_value_for_comparison(val): """Get a generalize value for comparisons.""" if isinstance(val, numbers.Number): return 0 - elif isinstance(val, str): + if isinstance(val, str): return "" - elif isinstance(val, tuple): + if isinstance(val, tuple): return tuple() - else: - raise NotImplementedError("Don't know how to generalize " + str(type(val))) + + raise NotImplementedError("Don't know how to generalize " + str(type(val))) class DataQuery: diff --git a/satpy/enhancements/__init__.py b/satpy/enhancements/__init__.py index c58875297a..4849f9e2f6 100644 --- a/satpy/enhancements/__init__.py +++ b/satpy/enhancements/__init__.py @@ -17,6 +17,7 @@ """Enhancements.""" import logging +import os import warnings from functools import partial from numbers import Number @@ -28,6 +29,7 @@ from trollimage.xrimage import XRImage from satpy._compat import ArrayLike +from satpy._config import get_config_path LOG = logging.getLogger(__name__) @@ -354,10 +356,21 @@ def create_colormap(palette): **From a file** - Colormaps can be loaded from ``.npy`` files as 2D raw arrays with rows for - each color. The filename to load can be provided with the ``filename`` key - in the provided palette information. The colormap is interpreted as 1 of 4 - different "colormap modes": ``RGB``, ``RGBA``, ``VRGB``, or ``VRGBA``. The + Colormaps can be loaded from ``.npy``, ``.npz``, or comma-separated text + files. Numpy (npy/npz) files should be 2D arrays with rows for each color. + Comma-separated files should have a row for each color with each column + representing a single value/channel. The filename to load can be provided + with the ``filename`` key in the provided palette information. A filename + ending with ``.npy`` or ``.npz`` is read as a numpy file with + :func:`numpy.load`. All other extensions are + read as a comma-separated file. For ``.npz`` files the data must be stored + as a positional list where the first element represents the colormap to + use. See :func:`numpy.savez` for more information. The path to the + colormap can be relative if it is stored in a directory specified by + :ref:`config_path_setting`. Otherwise it should be an absolute path. + + The colormap is interpreted as 1 of 4 different "colormap modes": + ``RGB``, ``RGBA``, ``VRGB``, or ``VRGBA``. The colormap mode can be forced with the ``colormap_mode`` key in the provided palette information. If it is not provided then a default will be chosen based on the number of columns in the array (3: RGB, 4: VRGB, 5: VRGBA). @@ -456,7 +469,7 @@ def _create_colormap_from_sequence(colors, palette, color_scale): def _create_colormap_from_file(filename, palette, color_scale): from trollimage.colormap import Colormap - data = np.load(filename) + data = _read_colormap_data_from_file(filename) cols = data.shape[1] default_modes = { 3: 'RGB', @@ -482,6 +495,21 @@ def _create_colormap_from_file(filename, palette, color_scale): return Colormap(*zip(values, colors)) +def _read_colormap_data_from_file(filename): + if not os.path.exists(filename): + filename = get_config_path(filename) + ext = os.path.splitext(filename)[1] + if ext in (".npy", ".npz"): + file_content = np.load(filename) + if ext == ".npz": + # .npz is a collection + # assume position list-like and get the first element + file_content = file_content["arr_0"] + return file_content + # CSV + return np.loadtxt(filename, delimiter=",") + + def _three_d_effect_delayed(band_data, kernel, mode): """Kernel for running delayed 3D effect creation.""" from scipy.signal import convolve2d diff --git a/satpy/enhancements/ahi.py b/satpy/enhancements/ahi.py new file mode 100644 index 0000000000..bafe55f1d6 --- /dev/null +++ b/satpy/enhancements/ahi.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python +# Copyright (c) 2021 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Enhancement functions specific to the AHI sensor.""" +import dask.array as da +import numpy as np + +from satpy.enhancements import apply_enhancement + + +def jma_true_color_reproduction(img, **kwargs): + """Apply CIE XYZ matrix and return True Color Reproduction data. + + Himawari-8 True Color Reproduction Approach Based on the CIE XYZ Color System + Hidehiko MURATA, Kotaro SAITOH, and Yasuhiko SUMIDA + Meteorological Satellite Center, Japan Meteorological Agency + NOAA National Environmental Satellite, Data, and Information Service + Colorado State University—CIRA + https://www.jma.go.jp/jma/jma-eng/satellite/introduction/TCR.html + """ + + def func(img_data): + ccm = np.array([ + [1.1759, 0.0561, -0.1322], + [-0.0386, 0.9587, 0.0559], + [-0.0189, -0.1161, 1.0777] + ]) + output = da.dot(img_data.T, ccm.T) + return output.T + + apply_enhancement(img.data, func, pass_dask=True) diff --git a/satpy/etc/areas.yaml b/satpy/etc/areas.yaml index d4aa14512e..04233d059d 100644 --- a/satpy/etc/areas.yaml +++ b/satpy/etc/areas.yaml @@ -114,6 +114,160 @@ msg_seviri_iodc_1km: lower_left_xy: [-5571248.412732527, -5566247.740968115] upper_right_xy: [5566247.740968115, 5571248.412732527] +# Full disk - segmented products +msg_seviri_fes_9km: + description: + MSG SEVIRI Full Earth Scanning service area definition + with 9 km resolution + projection: + proj: geos + lon_0: 0.0 + a: 6378169.0 + b: 6356583.8 + h: 35785831.0 + shape: + height: 1237 + width: 1237 + area_extent: + lower_left_xy: [-5567248.28351984, -5567248.28340708] + upper_right_xy: [5567248.28340708 , 5567248.28351984] + +msg_seviri_rss_9km: + description: + MSG SEVIRI Rapid Scanning Service area definition + with 9 km resolution + projection: + proj: geos + lon_0: 9.5 + a: 6378169.0 + b: 6356583.8 + h: 35785831.0 + shape: + height: 1237 + width: 1237 + area_extent: + lower_left_xy: [-5567248.28351984, -5567248.28340708] + upper_right_xy: [5567248.28340708 , 5567248.28351984] + +msg_seviri_iodc_9km: + description: + MSG SEVIRI Indian Ocean Data Coverage service area definition + with 9 km resolution + projection: + proj: geos + lon_0: 41.5 + a: 6378169.0 + b: 6356583.8 + h: 35785831.0 + shape: + height: 1237 + width: 1237 + area_extent: + lower_left_xy: [-5567248.28351984, -5567248.28340708] + upper_right_xy: [5567248.28340708 , 5567248.28351984] + +msg_seviri_fes_9km_ext: + description: + MSG SEVIRI Full Earth Scanning service area definition + with 9 km resolution (extended outside original 3km grid) + projection: + proj: geos + lon_0: 0.0 + a: 6378169.0 + b: 6356583.8 + h: 35785831.0 + shape: + height: 1238 + width: 1238 + area_extent: + lower_left_xy: [-5571748.888268564, -5571748.888155806] + upper_right_xy: [5571748.888155806, 5571748.888268564] + +msg_seviri_rss_9km_ext: + description: + MSG SEVIRI Rapid Scanning Service area definition + with 9 km resolution (extended outside original 3km grid) + projection: + proj: geos + lon_0: 9.5 + a: 6378169.0 + b: 6356583.8 + h: 35785831.0 + shape: + height: 1238 + width: 1238 + area_extent: + lower_left_xy: [-5571748.888268564, -5571748.888155806] + upper_right_xy: [5571748.888155806, 5571748.888268564] + +msg_seviri_iodc_9km_ext: + description: + MSG SEVIRI Indian Ocean Data Coverage service area definition + with 9 km resolution (extended outside original 3km grid) + projection: + proj: geos + lon_0: 41.5 + a: 6378169.0 + b: 6356583.8 + h: 35785831.0 + shape: + height: 1238 + width: 1238 + area_extent: + lower_left_xy: [-5571748.888268564, -5571748.888155806] + upper_right_xy: [5571748.888155806, 5571748.888268564] + +msg_seviri_fes_48km: + description: + MSG SEVIRI Full Earth Scanning service area definition + with 48 km resolution + projection: + proj: geos + lon_0: 0.0 + a: 6378169.0 + b: 6356583.8 + h: 35785831.0 + shape: + height: 232 + width: 232 + area_extent: + lower_left_xy: [-5570248.686685662, -5567248.28340708] + upper_right_xy: [5567248.28340708, 5570248.686685662] + +msg_seviri_rss_48km: + description: + MSG SEVIRI Rapid Scanning Service area definition + with 48 km resolution + projection: + proj: geos + lon_0: 9.5 + a: 6378169.0 + b: 6356583.8 + h: 35785831.0 + shape: + height: 232 + width: 232 + area_extent: + lower_left_xy: [-5570248.686685662, -5567248.28340708] + upper_right_xy: [5567248.28340708, 5570248.686685662] + +msg_seviri_iodc_48km: + description: + MSG SEVIRI Indian Ocean Data Coverage service area definition + with 48 km resolution + projection: + proj: geos + lon_0: 41.5 + a: 6378169.0 + b: 6356583.8 + h: 35785831.0 + shape: + height: 232 + width: 232 + area_extent: + lower_left_xy: [-5570248.686685662, -5567248.28340708] + upper_right_xy: [5567248.28340708, 5570248.686685662] + # Regional @@ -221,6 +375,47 @@ mtg_fci_fdss_2km: upper_right_xy: [5567999.994206558, 5567999.994206558] units: m +# Full disk - segmented products +mtg_fci_fdss_6km: + description: + MTG FCI Full Disk Scanning Service area definition + with 6 km resolution + projection: + proj: geos + lon_0: 0 + h: 35786400 + x_0: 0 + y_0: 0 + ellps: WGS84 + no_defs: null + shape: + height: 1856 + width: 1856 + area_extent: + lower_left_xy: [-5567999.994200589, -5567999.994200589] + upper_right_xy: [5567999.994206558, 5567999.994206558] + units: m + +mtg_fci_fdss_32km: + description: + MTG FCI Full Disk Scanning Service area definition + with 32 km resolution + projection: + proj: geos + lon_0: 0 + h: 35786400 + x_0: 0 + y_0: 0 + ellps: WGS84 + no_defs: null + shape: + height: 348 + width: 348 + area_extent: + lower_left_xy: [-5567999.994200589, -5567999.994200589] + upper_right_xy: [5567999.994206558, 5567999.994206558] + units: m + # Geostationary Operational Environmental Satellite (GOES) / ABI Instrument # Full disk diff --git a/satpy/etc/composites/abi.yaml b/satpy/etc/composites/abi.yaml index 052237969d..67f9bf5b82 100644 --- a/satpy/etc/composites/abi.yaml +++ b/satpy/etc/composites/abi.yaml @@ -3,7 +3,8 @@ sensor_name: visir/abi modifiers: rayleigh_corrected_crefl: modifier: !!python/name:satpy.modifiers.atmosphere.ReflectanceCorrector - dem_filename: CMGDEM.hdf + url: "https://www.ssec.wisc.edu/~davidh/polar2grid/viirs_crefl/CMGDEM.hdf" + known_hash: "sha256:f33f1f867d79fff4fafe128f61c154236dd74fcc97bf418ea1437977a38d0604" optional_prerequisites: - name: satellite_azimuth_angle - name: satellite_zenith_angle @@ -321,7 +322,7 @@ composites: appear red. With the increasing intensity and temperature the fires will also be detected by the 2.2 μm and 1.6 μm bands resulting very intense fires in white. references: - Research Article: http://rammb.cira.colostate.edu/training/visit/quick_guides/Fire_Temperature_RGB.pdf + Research Article: https://rammb.cira.colostate.edu/training/visit/quick_guides/Fire_Temperature_RGB.pdf prerequisites: - name: C07 - name: C06 @@ -335,7 +336,7 @@ composites: indicative of severe storms. Bright yellow in the RGB indicates strong updrafts prior to the mature storm stage. references: - Research Article: http://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_DayConvectionRGB_final.pdf + Research Article: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_DayConvectionRGB_final.pdf prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: @@ -363,11 +364,12 @@ composites: - name: C04 - name: C02 - name: C05 + ash: description: > Ash RGB, for GOESR: NASA, NOAA references: - CIRA Quick Guide: http://rammb.cira.colostate.edu/training/visit/quick_guides/GOES_Ash_RGB.pdf + CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/GOES_Ash_RGB.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor @@ -385,7 +387,7 @@ composites: description: > Dust RGB, for GOESR: NASA, NOAA references: - CIRA Quick Guide: http://rammb.cira.colostate.edu/training/visit/quick_guides/Dust_RGB_Quick_Guide.pdf + CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/Dust_RGB_Quick_Guide.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor @@ -404,8 +406,8 @@ composites: Day Cloud Phase Distinction RGB, for GOESR: NASA, NOAA Cloud Type RGB, for EUMETSAT (https://www.eumetsat.int/website/home/Images/ImageLibrary/DAT_3958037.html) references: - CIRA Quick Guide: http://rammb.cira.colostate.edu/training/visit/quick_guides/Day_Cloud_Phase_Distinction.pdf - Cloud Type recipe and typical colours: https://www.eumetsat.int/website/wcm/idc/idcplg?IdcService=GET_FILE&dDocName=PDF_IL_18_05_13_A&RevisionSelectionMethod=LatestReleased&Rendition=Web + CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_DayCloudPhaseDistinction_final_v2.pdf + Cloud Type recipe and typical colours: https://www.eumetsat.int/fr/media/45659 ## it uses the default used in etc/enhancements/generic.yaml compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: @@ -430,7 +432,7 @@ composites: description: > Simple Water Vapor RGB, for GOESR: NASA, NOAA references: - CIRA Quick Guide: http://rammb.cira.colostate.edu/training/visit/quick_guides/Simple_Water_Vapor_RGB.pdf + CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/Simple_Water_Vapor_RGB.pdf ## it uses the default used in etc/enhancements/generic.yaml compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: @@ -443,7 +445,7 @@ composites: description: > Differential Water Vapor RGB, for GOESR: NASA, NOAA references: - CIRA Quick Guide: http://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_DifferentialWaterVaporRGB_final.pdf + CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_DifferentialWaterVaporRGB_final.pdf ## it uses the default used in etc/enhancements/generic.yaml compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: @@ -459,7 +461,7 @@ composites: description: > Day Convection RGB, for GOESR: NASA, NOAA references: - CIRA Quick Guide: http://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_DayConvectionRGB_final.pdf + CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_DayConvectionRGB_final.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor @@ -480,7 +482,7 @@ composites: description: > SO2 RGB, for GOESR: NASA, NOAA references: - CIRA Quick Guide: http://rammb.cira.colostate.edu/training/visit/quick_guides/Quick_Guide_SO2_RGB.pdf + CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/Quick_Guide_SO2_RGB.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor @@ -498,7 +500,7 @@ composites: description: > Day Snow-Fog RGB, for GOESR: NASA, NOAA references: - CIRA Quick Guide: http://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_DaySnowFog.pdf + CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_DaySnowFogRGB_final_v2.pdf ## it uses the default used in etc/enhancements/generic.yaml of snow_defaul compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: @@ -516,7 +518,7 @@ composites: description: > Nighttime Microphysics RGB, for GOESR: NASA, NOAA references: - CIRA Quick Guide: http://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_NtMicroRGB_final.pdf + CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_NtMicroRGB_Final_20191206.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor @@ -534,7 +536,7 @@ composites: description: > Fire Temperature RGB, for GOESR: NASA, NOAA references: - CIRA Quick Guide: http://rammb.cira.colostate.edu/training/visit/quick_guides/Fire_Temperature_RGB.pdf + CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/Fire_Temperature_RGB.pdf ## adapted from etc/composites/viirs.yaml compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: @@ -548,7 +550,7 @@ composites: description: > Day Land Cloud Fire RGB, for GOESR: NASA, NOAA references: - CIRA Quick Guide: http://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_DayLandCloudFireRGB_final.pdf + CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_DayLandCloudFireRGB_final.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C06 @@ -562,7 +564,7 @@ composites: description: > Day Land Cloud RGB, for GOESR: NASA, NOAA references: - CIRA Quick Guide: http://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_daylandcloudRGB_final.pdf + CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_daylandcloudRGB_final.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C05 @@ -618,12 +620,10 @@ composites: cloud_phase: description: > - Cloud Phase RGB, for EUMETSAT - Day Cloud Phase RGB, for EUMETSAT (https://www.eumetsat.int/website/home/Images/ImageLibrary/DAT_2861499.html) - "When we use the NIR2.3 instead of the VIS0.8 on the green beam, we can devise a new RGB product (let us call it 'Day Cloud Phase RGB') that has similar cloud colours than the Natural Colour RGB, but with improved separation of ice and water clouds." + EUMETSAT Cloud Phase RGB product references: EUMETRAIN Quick Guide: http://www.eumetrain.org/rgb_quick_guides/quick_guides/CloudPhaseRGB.pdf - Cloud Phase recipe and typical colours: https://www.eumetsat.int/website/wcm/idc/idcplg?IdcService=GET_FILE&dDocName=PDF_IL_18_05_13&RevisionSelectionMethod=LatestReleased&Rendition=Web + Recipe : http://eumetrain.org/RGBguide/recipes/RGB_recipes.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C05 @@ -632,17 +632,17 @@ composites: modifiers: [sunz_corrected] - name: C02 modifiers: [sunz_corrected, rayleigh_corrected] - standard_name: natural_color + standard_name: cloud_phase cloud_phase_raw: description: > - same as cloud_phase + same as eum_cloud_phase RGB product, without modifiers compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C05 - name: C06 - name: C02 - standard_name: natural_color + standard_name: cloud_phase tropical_airmass: description: > diff --git a/satpy/etc/composites/agri.yaml b/satpy/etc/composites/agri.yaml index 33b4ffe803..7e67210492 100644 --- a/satpy/etc/composites/agri.yaml +++ b/satpy/etc/composites/agri.yaml @@ -30,7 +30,7 @@ composites: Day Cloud Phase Distinction RGB, for GOESR: NASA, NOAA Cloud Type RGB, for EUMETSAT (https://www.eumetsat.int/website/home/Images/ImageLibrary/DAT_3958037.html) references: - CIRA Quick Guide: http://rammb.cira.colostate.edu/training/visit/quick_guides/Day_Cloud_Phase_Distinction.pdf + CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_DayCloudPhaseDistinction_final_v2.pdf Cloud Type recipe and typical colours: https://www.eumetsat.int/website/wcm/idc/idcplg?IdcService=GET_FILE&dDocName=PDF_IL_18_05_13_A&RevisionSelectionMethod=LatestReleased&Rendition=Web ## it uses the default used in etc/enhancements/generic.yaml compositor: !!python/name:satpy.composites.GenericCompositor @@ -56,7 +56,7 @@ composites: description: > Day Snow-Fog RGB, for GOESR: NASA, NOAA references: - CIRA Quick Guide: http://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_DaySnowFog.pdf + CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_DaySnowFogRGB_final_v2.pdf ## it uses the default used in etc/enhancements/generic.yaml of snow_defaul compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: @@ -74,7 +74,7 @@ composites: description: > Fire Temperature RGB, for GOESR: NASA, NOAA references: - CIRA Quick Guide: http://rammb.cira.colostate.edu/training/visit/quick_guides/Fire_Temperature_RGB.pdf + CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/Fire_Temperature_RGB.pdf ## adapted from etc/composites/viirs.yaml compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: @@ -88,7 +88,7 @@ composites: description: > Day Land Cloud Fire RGB, for GOESR: NASA, NOAA references: - CIRA Quick Guide: http://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_DayLandCloudFireRGB_final.pdf + CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_DayLandCloudFireRGB_final.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C06 @@ -102,7 +102,8 @@ composites: description: > Day Land Cloud RGB, for GOESR: NASA, NOAA references: - CIRA Quick Guide: http://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_daylandcloudRGB_final.pdf + CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_daylandcloudRGB_final.pdf + compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: C05 diff --git a/satpy/etc/composites/ahi.yaml b/satpy/etc/composites/ahi.yaml index e733b00507..f29a36357f 100644 --- a/satpy/etc/composites/ahi.yaml +++ b/satpy/etc/composites/ahi.yaml @@ -4,9 +4,9 @@ modifiers: rayleigh_corrected: modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard - aerosol_type: marine_clean_aerosol + aerosol_type: rayleigh_only prerequisites: - - wavelength: 0.64 + - name: B03 modifiers: [sunz_corrected] optional_prerequisites: - satellite_azimuth_angle @@ -28,6 +28,20 @@ composites: modifiers: [sunz_corrected] standard_name: toa_bidirectional_reflectance + green_true_color_reproduction: + # JMA True Color Reproduction green band + # http://www.jma.go.jp/jma/jma-eng/satellite/introduction/TCR.html + compositor: !!python/name:satpy.composites.ahi.GreenCorrector + fractions: [0.6321, 0.2928, 0.0751] + prerequisites: + - name: B02 + modifiers: [sunz_corrected, rayleigh_corrected] + - name: B03 + modifiers: [sunz_corrected, rayleigh_corrected] + - name: B04 + modifiers: [sunz_corrected] + standard_name: none + green_nocorr: compositor: !!python/name:satpy.composites.ahi.GreenCorrector # FUTURE: Set a wavelength...see what happens. Dependency finding @@ -155,13 +169,22 @@ composites: standard_name: fire_temperature name: fire_temperature_39refl - overview: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - 0.65 - - 0.85 - - 10.4 + - name: B03 + modifiers: [sunz_corrected] + - name: B04 + modifiers: [sunz_corrected] + - name: B13 + standard_name: overview + + overview_raw: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - B03 + - B04 + - B13 standard_name: overview natural_color: @@ -205,6 +228,18 @@ composites: high_resolution_band: red standard_name: true_color + true_color_reproduction: + # JMA True Color Reproduction + # http://www.jma.go.jp/jma/jma-eng/satellite/introduction/TCR.html + compositor: !!python/name:satpy.composites.SelfSharpenedRGB + prerequisites: + - name: B03 + modifiers: [sunz_corrected, rayleigh_corrected] + - name: green_true_color_reproduction + - name: B01 + modifiers: [sunz_corrected, rayleigh_corrected] + standard_name: true_color_reproduction + # true_color_reducedsize_land: # compositor: !!python/name:satpy.composites.GenericCompositor # prerequisites: diff --git a/satpy/etc/composites/mhs.yaml b/satpy/etc/composites/mhs.yaml new file mode 100644 index 0000000000..245991f53e --- /dev/null +++ b/satpy/etc/composites/mhs.yaml @@ -0,0 +1,18 @@ +sensor_name: mhs + +composites: + mw183_humidity: + compositor: !!python/name:satpy.composites.RGBCompositor + prerequisites: + - name: '3' + - name: '4' + - name: '5' + standard_name: mw183_humidity + + mw183_humidity_surface: + compositor: !!python/name:satpy.composites.RGBCompositor + prerequisites: + - name: '1' + - name: '2' + - name: '3' + standard_name: mw183_humidity_surface diff --git a/satpy/etc/composites/msi.yaml b/satpy/etc/composites/msi.yaml index b34ff4891d..010bd240b0 100644 --- a/satpy/etc/composites/msi.yaml +++ b/satpy/etc/composites/msi.yaml @@ -79,7 +79,7 @@ composites: - name: 'B04' modifiers: [effective_solar_pathlength_corrected] standard_name: natural_color - + true_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: diff --git a/satpy/etc/composites/msu_gsa.yaml b/satpy/etc/composites/msu_gsa.yaml new file mode 100644 index 0000000000..0ab6840af4 --- /dev/null +++ b/satpy/etc/composites/msu_gsa.yaml @@ -0,0 +1,77 @@ +sensor_name: visir/msu_gsa + +composites: + overview_raw: + compositor: !!python/name:satpy.composites.RGBCompositor + prerequisites: + - name: C01 + - name: C03 + - name: C09 + standard_name: overview + overview: + compositor: !!python/name:satpy.composites.RGBCompositor + prerequisites: + - name: C01 + modifiers: [sunz_corrected] + - name: C03 + modifiers: [sunz_corrected] + - name: C09 + standard_name: overview + msugsa_color: + compositor: !!python/name:satpy.composites.RGBCompositor + prerequisites: + - name: C03 + modifiers: [sunz_corrected] + - name: C02 + modifiers: [sunz_corrected] + - name: C01 + modifiers: [sunz_corrected] + standard_name: natural_color + msugsa_color_raw: + compositor: !!python/name:satpy.composites.RGBCompositor + prerequisites: + - name: C03 + - name: C02 + - name: C01 + standard_name: natural_color + + night_ir_alpha: + compositor: !!python/name:satpy.composites.GenericCompositor + standard_name: night_ir_alpha + prerequisites: + - 3.8 + - 10.8 + - 11.9 + - 10.8 + + day_color_with_night_ir: + compositor: !!python/name:satpy.composites.DayNightCompositor + standard_name: day_color_with_night_ir + lim_low: 90.0 + lim_high: 100.0 + prerequisites: + - msugsa_color_raw + - night_ir_with_background + + day_color_with_night_ir_hires: + compositor: !!python/name:satpy.composites.DayNightCompositor + standard_name: day_color_with_night_ir + lim_low: 90.0 + lim_high: 100.0 + prerequisites: + - msugsa_color_raw + - night_ir_with_background_hires + + night_ir_with_background: + compositor: !!python/name:satpy.composites.BackgroundCompositor + standard_name: night_ir_with_background + prerequisites: + - night_ir_alpha + - _night_background + + night_ir_with_background_hires: + compositor: !!python/name:satpy.composites.BackgroundCompositor + standard_name: night_ir_with_background_hires + prerequisites: + - night_ir_alpha + - _night_background_hires diff --git a/satpy/etc/composites/scatterometer.yaml b/satpy/etc/composites/scatterometer.yaml index 02f6ceb0ff..30d1733f4a 100644 --- a/satpy/etc/composites/scatterometer.yaml +++ b/satpy/etc/composites/scatterometer.yaml @@ -12,4 +12,3 @@ composites: prerequisites: - name: surface_soil_moisture standard_name: soil_moisture - diff --git a/satpy/etc/composites/tropomi.yaml b/satpy/etc/composites/tropomi.yaml index c52c0aa430..e7037de0c5 100644 --- a/satpy/etc/composites/tropomi.yaml +++ b/satpy/etc/composites/tropomi.yaml @@ -13,4 +13,3 @@ composites: prerequisites: - nitrogendioxide_tropospheric_column standard_name: no2_tropospheric_polluted - diff --git a/satpy/etc/composites/viirs.yaml b/satpy/etc/composites/viirs.yaml index 4fbc3a6b3c..050df02cb5 100644 --- a/satpy/etc/composites/viirs.yaml +++ b/satpy/etc/composites/viirs.yaml @@ -547,3 +547,81 @@ composites: modifiers: [sunz_corrected_iband, rayleigh_corrected_iband] standard_name: ocean_color high_resolution_band: red + + cloud_phase_distinction: + description: > + Day Cloud Phase Distinction RGB, for GOESR: NASA, NOAA + Cloud Type RGB, for EUMETSAT (https://www.eumetsat.int/website/home/Images/ImageLibrary/DAT_3958037.html) + references: + CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_DayCloudPhaseDistinction_final_v2.pdf + Cloud Type recipe and typical colours: https://www.eumetsat.int/fr/media/45659 + ## it uses the default used in etc/enhancements/generic.yaml + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: M15 + - name: I01 + modifiers: [sunz_corrected_iband, rayleigh_corrected] + - name: I03 + modifiers: [sunz_corrected_iband] + standard_name: cloud_phase_distinction + + cloud_phase_distinction_raw: + description: > + same as cloud_phase_distinction + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: M15 + - name: I01 + - name: I03 + standard_name: cloud_phase_distinction + + cloud_phase: + description: > + EUMETSAT Cloud Phase RGB product + references: + EUMETRAIN Quick Guide: http://www.eumetrain.org/rgb_quick_guides/quick_guides/CloudPhaseRGB.pdf + Recipe : http://eumetrain.org/RGBguide/recipes/RGB_recipes.pdf + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: I03 + modifiers: [sunz_corrected_iband] + - name: M11 + modifiers: [sunz_corrected] + - name: M05 + modifiers: [sunz_corrected, rayleigh_corrected] + standard_name: cloud_phase + + cloud_phase_raw: + description: > + same as cloud_phase RGB product, without modifiers + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: I03 + - name: M11 + - name: M05 + standard_name: cloud_phase + + cimss_cloud_type: + description: > + Cloud Type RGB, candidate for standard FCI RGB + references: + EUMETRAIN Quick Guide: http://eumetrain.org/rgb_quick_guides/quick_guides/CloudTypeRGB.pdf + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: M09 + modifiers: [sunz_corrected] + - name: I01 + modifiers: [sunz_corrected_iband] + - name: I03 + modifiers: [sunz_corrected_iband] + standard_name: cimss_cloud_type + + cimss_cloud_type_raw: + description: > + Cloud Type RGB, candidate for standard FCI RGB. Raw version without sun zenith correction. + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: M09 + - name: I01 + - name: I03 + standard_name: cimss_cloud_type diff --git a/satpy/etc/composites/visir.yaml b/satpy/etc/composites/visir.yaml index b00cfe8dac..8ef2280c85 100644 --- a/satpy/etc/composites/visir.yaml +++ b/satpy/etc/composites/visir.yaml @@ -337,43 +337,43 @@ composites: cloud_top_phase: compositor: !!python/name:satpy.composites.PaletteCompositor prerequisites: - - cpp_phase - - cpp_phase_pal + - cmic_phase + - cmic_phase_pal standard_name: cloud_top_phase cloud_drop_effective_radius: compositor: !!python/name:satpy.composites.ColorizeCompositor prerequisites: - - cpp_reff - - cpp_reff_pal + - cmic_reff + - cmic_reff_pal standard_name: cloud_drop_effective_radius cloud_optical_thickness: compositor: !!python/name:satpy.composites.ColorizeCompositor prerequisites: - - cpp_cot - - cpp_cot_pal + - cmic_cot + - cmic_cot_pal standard_name: cloud_optical_thickness cloud_water_path: compositor: !!python/name:satpy.composites.ColorizeCompositor prerequisites: - - cpp_cwp - - cpp_cwp_pal + - cmic_cwp + - cmic_cwp_pal standard_name: cloud_water_path ice_water_path: compositor: !!python/name:satpy.composites.ColorizeCompositor prerequisites: - - cpp_iwp - - cpp_iwp_pal + - cmic_iwp + - cmic_iwp_pal standard_name: ice_water_path liquid_water_path: compositor: !!python/name:satpy.composites.ColorizeCompositor prerequisites: - - cpp_lwp - - cpp_lwp_pal + - cmic_lwp + - cmic_lwp_pal standard_name: liquid_water_path night_microphysics: @@ -427,3 +427,81 @@ composites: standard_name: night_background_hires url: "https://neo.sci.gsfc.nasa.gov/archive/blackmarble/2016/global/BlackMarble_2016_3km_geo.tif" known_hash: "sha256:e915ef2a20d84e2a59e1547d3ad564463ad4bcf22bfa02e0e0b8ed1cd722e9c0" # optional + + cloud_phase_distinction: + description: > + Day Cloud Phase Distinction RGB, for GOESR: NASA, NOAA + Cloud Type RGB, for EUMETSAT (https://www.eumetsat.int/website/home/Images/ImageLibrary/DAT_3958037.html) + references: + CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_DayCloudPhaseDistinction_final_v2.pdf + Cloud Type recipe and typical colours: https://www.eumetsat.int/fr/media/45659 + ## it uses the default used in etc/enhancements/generic.yaml + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - wavelength: 10.3 + - wavelength: 0.64 + modifiers: [sunz_corrected, rayleigh_corrected] + - wavelength: 1.6 + modifiers: [sunz_corrected] + standard_name: cloud_phase_distinction + + cloud_phase_distinction_raw: + description: > + same as cloud_phase_distinction + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - wavelength: 10.3 + - wavelength: 0.64 + - wavelength: 1.6 + standard_name: cloud_phase_distinction + + cloud_phase: + description: > + EUMETSAT Cloud Phase RGB product + references: + EUMETRAIN Quick Guide: http://www.eumetrain.org/rgb_quick_guides/quick_guides/CloudPhaseRGB.pdf + Recipe : http://eumetrain.org/RGBguide/recipes/RGB_recipes.pdf + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - wavelength: 1.6 + modifiers: [sunz_corrected] + - wavelength: 2.25 + modifiers: [sunz_corrected] + - wavelength: 0.67 + modifiers: [sunz_corrected, rayleigh_corrected] + standard_name: cloud_phase + + cloud_phase_raw: + description: > + same as cloud_phase RGB product, without modifiers + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - wavelength: 1.6 + - wavelength: 2.25 + - wavelength: 0.67 + standard_name: cloud_phase + + cimss_cloud_type: + description: > + Cloud Type RGB, candidate for standard FCI RGB + references: + EUMETRAIN Quick Guide: http://eumetrain.org/rgb_quick_guides/quick_guides/CloudTypeRGB.pdf + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - wavelength: 1.38 + modifiers: [sunz_corrected] + - wavelength: 0.64 + modifiers: [sunz_corrected] + - wavelength: 1.61 + modifiers: [sunz_corrected] + standard_name: cimss_cloud_type + + cimss_cloud_type_raw: + description: > + Cloud Type RGB, candidate for standard FCI RGB. Raw version without sun zenith correction. + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - wavelength: 1.38 + - wavelength: 0.64 + - wavelength: 1.61 + standard_name: cimss_cloud_type diff --git a/satpy/etc/enhancements/abi.yaml b/satpy/etc/enhancements/abi.yaml index 5260d8e9d7..a71dc6bd5f 100644 --- a/satpy/etc/enhancements/abi.yaml +++ b/satpy/etc/enhancements/abi.yaml @@ -155,3 +155,70 @@ enhancements: threshold: 242.0 min_in: 163.0 max_in: 330.0 + + # EumetSat cloud phase and cloud type RGB recipes + # http://eumetrain.org/RGBguide/recipes/RGB_recipes.pdf + cloud_phase: + standard_name: cloud_phase + operations: + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: + stretch: crude + min_stretch: [ 0, 0, 0] + max_stretch: [50, 50, 100] + + # NOAA GOES-R Level-2 ABI Cloud Mask product + # https://www.goes-r.gov/products/baseline-clear-sky-mask.html + binary_cloud_mask: + name: BCM + operations: + - name: palettize + method: !!python/name:satpy.enhancements.palettize + kwargs: + palettes: + - {'values': [ 0, # Clear + 1, # Cloudy + 255, # Fill Value + ], + 'colors': [[ 94, 79, 162], # blue, 0 = Clear + [255, 255, 255], # white, 1 = Cloudy + [ 0, 0, 0], # black, 255 = Fill Value + ], + 'color_scale': 255, + } + + four_level_cloud_mask: + name: ACM + operations: + - name: palettize + method: !!python/name:satpy.enhancements.palettize + kwargs: + palettes: + - {'values': [ 0, # Clear + 1, # Probably Clear + 2, # Probably Cloudy + 3, # Cloudy + 255, # Fill Value + ], + 'colors': [[ 94, 79, 162], # blue, 0 = Clear + [ 73, 228, 242], # cyan, 1 = Probably Clear + [158, 1, 66], # red, 2 = Probably Cloudy + [255, 255, 255], # white, 3 = Cloudy + [ 0, 0, 0], # black, 255 = Fill Value + ], + 'color_scale': 255, + } + + cloud_probability: + name: Cloud_Probabilities + operations: + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + palettes: + - {colors: 'spectral', + reverse: true, + min_value: 0.0, + max_value: 1.0, + } diff --git a/satpy/etc/enhancements/ahi.yaml b/satpy/etc/enhancements/ahi.yaml index 8951eaf7cd..ffd96a45e0 100644 --- a/satpy/etc/enhancements/ahi.yaml +++ b/satpy/etc/enhancements/ahi.yaml @@ -9,3 +9,17 @@ enhancements: stretch: crude min_stretch: [-26.2, -43.2, 243.9] max_stretch: [0.6, 6.7, 208.5] + + true_color_reproduction: + standard_name: true_color_reproduction + operations: + - name: color + method: !!python/name:satpy.enhancements.ahi.jma_true_color_reproduction + - name: cira_stretch + method: !!python/name:satpy.enhancements.cira_stretch + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: + stretch: crude + min_stretch: [0.08, 0.08, 0.08] + max_stretch: [.93, .90, .90] diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index 73de008986..9e28a74ce2 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -917,3 +917,40 @@ enhancements: stretch: crude min_stretch: [26.2, 27.4, 243.9] max_stretch: [ 0.6, -26.2, 208.5] + # SEADAS Cholorphyll A - MODIS or VIIRS + chlor_a_default: + name: chlor_a + reader: seadas_l2 + operations: + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: + stretch: log + base: "10" + factor: 21.0 + min_stretch: 0.0 + max_stretch: 20.0 + + cimss_cloud_type: + standard_name: cimss_cloud_type + operations: + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: + stretch: crude + min_stretch: [0.0, 0.0, 0.0] + max_stretch: [10.0, 80.0, 80.0] + - name: gamma + method: !!python/name:satpy.enhancements.gamma + kwargs: + gamma: [1.5, 0.75, 1.0] + + cloud_phase: + standard_name: cloud_phase + operations: + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: + stretch: crude + min_stretch: [ 0, 0, 0] + max_stretch: [50, 50, 100] diff --git a/satpy/etc/enhancements/mhs.yaml b/satpy/etc/enhancements/mhs.yaml new file mode 100644 index 0000000000..c997a11350 --- /dev/null +++ b/satpy/etc/enhancements/mhs.yaml @@ -0,0 +1,29 @@ +enhancements: + + mw183_humidity: + standard_name: mw183_humidity + operations: + - name: inverse + method: !!python/name:satpy.enhancements.invert + args: + - [true, true, true] + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: {stretch: linear} + - name: gamma + method: !!python/name:satpy.enhancements.gamma + kwargs: {gamma: 1.2} + + mw183_humidity_surface: + standard_name: mw183_humidity_surface + operations: + - name: inverse + method: !!python/name:satpy.enhancements.invert + args: + - [true, true, true] + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: {stretch: linear} + - name: gamma + method: !!python/name:satpy.enhancements.gamma + kwargs: {gamma: 1.2} diff --git a/satpy/etc/enhancements/mimic.yaml b/satpy/etc/enhancements/mimic.yaml index 3314f0d263..81f4091356 100644 --- a/satpy/etc/enhancements/mimic.yaml +++ b/satpy/etc/enhancements/mimic.yaml @@ -96,5 +96,3 @@ enhancements: ], min_value: 0, max_value: 8 } - - diff --git a/satpy/etc/enhancements/scatterometer.yaml b/satpy/etc/enhancements/scatterometer.yaml index 37315b8163..305c0bfba6 100644 --- a/satpy/etc/enhancements/scatterometer.yaml +++ b/satpy/etc/enhancements/scatterometer.yaml @@ -17,5 +17,3 @@ enhancements: kwargs: palettes: - {colors: rdbu, min_value: 0, max_value: 100} - - diff --git a/satpy/etc/eps_avhrrl1b_6.5.xml b/satpy/etc/eps_avhrrl1b_6.5.xml index 22b95db353..469a3e9705 100644 --- a/satpy/etc/eps_avhrrl1b_6.5.xml +++ b/satpy/etc/eps_avhrrl1b_6.5.xml @@ -7,20 +7,20 @@ File: eps_avhrrl1b_6.5.xml Copyright (c) 2004, 2005 Eumetsat - - This file is part of the EPSXML format specification generated + + This file is part of the EPSXML format specification generated automatically using pfs2xml. - + This XML description is distributed under the GPL license; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. - - This XML description is distributed in the hope that it will be + + This XML description is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. - + You should have received a copy of the GNU General Public License along with the pfs2xml package; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA diff --git a/satpy/etc/readers/abi_l2_nc.yaml b/satpy/etc/readers/abi_l2_nc.yaml index db57c08a26..c309d87595 100644 --- a/satpy/etc/readers/abi_l2_nc.yaml +++ b/satpy/etc/readers/abi_l2_nc.yaml @@ -150,6 +150,16 @@ datasets: file_type: abi_l2_acm file_key: BCM + four_level_cloud_mask: + name: ACM + file_type: abi_l2_acm + file_key: ACM + + cloud_probabilities: + name: Cloud_Probabilities + file_type: abi_l2_acm + file_key: Cloud_Probabilities + # --- Aerosol Detection Products --- aerosol_binary_mask: name: Aerosol @@ -323,184 +333,218 @@ datasets: # ---- file_types: abi_l2_cmip_c01: - file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 - file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C01_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 + file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C01_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + observation_type: "CMIP" abi_l2_cmip_c02: - file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 - file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C02_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 + file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C02_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + observation_type: "CMIP" abi_l2_cmip_c03: - file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 - file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C03_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 + file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C03_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + observation_type: "CMIP" abi_l2_cmip_c04: - file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 - file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C04_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 + file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C04_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + observation_type: "CMIP" abi_l2_cmip_c05: - file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 - file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C05_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 + file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C05_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + observation_type: "CMIP" abi_l2_cmip_c06: - file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 - file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C06_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 + file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C06_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + observation_type: "CMIP" abi_l2_cmip_c07: - file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 - file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C07_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 + file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C07_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + observation_type: "CMIP" abi_l2_cmip_c08: - file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 - file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C08_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 + file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C08_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + observation_type: "CMIP" abi_l2_cmip_c09: - file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 - file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C09_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 + file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C09_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + observation_type: "CMIP" abi_l2_cmip_c10: - file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 - file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C10_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 + file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C10_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + observation_type: "CMIP" abi_l2_cmip_c11: - file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 - file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C11_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 + file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C11_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + observation_type: "CMIP" abi_l2_cmip_c12: - file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 - file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C12_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 + file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C12_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + observation_type: "CMIP" abi_l2_cmip_c13: - file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 - file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C13_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 + file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C13_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + observation_type: "CMIP" abi_l2_cmip_c14: - file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 - file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C14_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 + file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C14_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + observation_type: "CMIP" abi_l2_cmip_c15: - file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 - file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C15_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 + file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C15_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + observation_type: "CMIP" abi_l2_cmip_c16: - file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 - file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{area_code:1s}-{scan_mode:2s}C16_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 + file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:1s}-{scan_mode:2s}C16_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + observation_type: "CMIP" abi_l2_mcmip: - file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 - file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-MCMIP{scene_abbr:1s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 + file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-MCMIP{scene_abbr:1s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + observation_type: "MCMIP" abi_l2_acha: - file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 - file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-ACHA{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 + file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-ACHA{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + observation_type: "ACHA" abi_l2_acht: - file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 - file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-ACHT{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 + file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-ACHT{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + observation_type: "ACHT" abi_l2_acm: - file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 - file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-ACM{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 + file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-ACM{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + observation_type: "ACM" abi_l2_actp: - file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 - file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-ACTP{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 + file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-ACTP{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + observation_type: "ACTP" abi_l2_adp: - file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 - file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-ADP{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 + file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-ADP{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + observation_type: "ADP" abi_l2_aod: - file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 - file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-AOD{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 + file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-AOD{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + observation_type: "AOD" abi_l2_cod: - file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 - file_patterns: - # F (Full Disk) or C (CONUS) - - '{system_environment:2s}_{mission_id:3s}-L2-COD{area_code:1s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc' - # M1 or M2 for mesoscale - - '{system_environment:2s}_{mission_id:3s}-L2-CODM{area_code:1d}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc' + file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 + file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-COD{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + observation_type: "COD" # CSPP Geo keeps Day and Night algorithm outputs separate abi_l2_codd: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: - '{system_environment:2s}_{mission_id:3s}-L2-CODD{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc' + observation_type: "CODD" abi_l2_codn: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: - '{system_environment:2s}_{mission_id:3s}-L2-CODN{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc' + observation_type: "CODN" abi_l2_cps: - file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 - file_patterns: - # F (Full Disk) or C (CONUS) - - '{system_environment:2s}_{mission_id:3s}-L2-CPS{area_code:1s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc' - # M1 or M2 for mesoscale - - '{system_environment:2s}_{mission_id:3s}-L2-CPSM{area_code:1d}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc' + file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 + file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CPS{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + observation_type: "CPS" # CSPP Geo keeps Day and Night algorithm outputs separate abi_l2_cpsd: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: - '{system_environment:2s}_{mission_id:3s}-L2-CPSD{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc' + observation_type: "CPSD" abi_l2_cpsn: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: - '{system_environment:2s}_{mission_id:3s}-L2-CPSN{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc' + observation_type: "CPSN" abi_l2_ctp: - file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 - file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CTP{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 + file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CTP{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + observation_type: "CTP" abi_l2_dsi: - file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 - file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-DSI{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 + file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-DSI{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + observation_type: "DSI" abi_l2_drs: - file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 - file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-DRS{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 + file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-DRS{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + observation_type: "DRS" abi_l2_fdc: - file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 - file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-FDC{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 + file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-FDC{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + observation_type: "FDC" abi_l2_fsc: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-FSC{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + observation_type: "FSC" abi_l2_lst: - file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 - file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-LST{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 + file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-LST{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + observation_type: "LST" abi_l2_rrqpe: - file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 - file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-RRQPE{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 + file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-RRQPE{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + observation_type: "RRQPE" abi_l2_rsr: - file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 - file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-RSR{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 + file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-RSR{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + observation_type: "RSR" abi_l2_dsr: - file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 - file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-DSR{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 + file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-DSR{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + observation_type: "DSR" abi_l2_sst: - file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 - file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-SST{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 + file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-SST{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + observation_type: "SST" abi_l2_tpw: - file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 - file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-TPW{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 + file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-TPW{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + observation_type: "TPW" abi_l2_vaa: - file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 - file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-VAA{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc', '{system_environment:2s}_{mission_id:3s}-L2-VAA{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}-128600_0.nc'] + file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 + file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-VAA{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc', '{system_environment:2s}_{mission_id:3s}-L2-VAA{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}-128600_0.nc'] + observation_type: "VAA" # CSPP - Geo Unofficial product abi_l2_nav: file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2 file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-NAV{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'] + observation_type: "NAV" diff --git a/satpy/etc/readers/acspo.yaml b/satpy/etc/readers/acspo.yaml index d2f6e1f824..196fff942c 100644 --- a/satpy/etc/readers/acspo.yaml +++ b/satpy/etc/readers/acspo.yaml @@ -37,4 +37,3 @@ datasets: name: wind_speed coordinates: [longitude, latitude] file_type: acspo_sst - diff --git a/satpy/etc/readers/agri_l1.yaml b/satpy/etc/readers/agri_l1.yaml index c9cd5fc2a5..47dd783c4d 100755 --- a/satpy/etc/readers/agri_l1.yaml +++ b/satpy/etc/readers/agri_l1.yaml @@ -310,4 +310,4 @@ datasets: standard_name: satellite_azimuth_angle resolution: 4000 file_type: agri_l1_4000m_geo - file_key: NOMSatelliteAzimuth \ No newline at end of file + file_key: NOMSatelliteAzimuth diff --git a/satpy/etc/readers/ahi_hrit.yaml b/satpy/etc/readers/ahi_hrit.yaml index 22d45a33b8..421929f2c7 100644 --- a/satpy/etc/readers/ahi_hrit.yaml +++ b/satpy/etc/readers/ahi_hrit.yaml @@ -17,6 +17,7 @@ file_types: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B01_{start_time:%Y%m%d%H%M}_{segment:03d}' + - 'IMG_DK{area:02d}B01_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2' expected_segments: 10 hrit_b01_fd: @@ -28,6 +29,7 @@ file_types: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B02_{start_time:%Y%m%d%H%M}_{segment:03d}' + - 'IMG_DK{area:02d}B02_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2' expected_segments: 10 hrit_b02_fd: @@ -39,6 +41,7 @@ file_types: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}VIS_{start_time:%Y%m%d%H%M}_{segment:03d}' + - 'IMG_DK{area:02d}VIS_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2' expected_segments: 10 hrit_b03_fd: @@ -50,6 +53,7 @@ file_types: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B04_{start_time:%Y%m%d%H%M}_{segment:03d}' + - 'IMG_DK{area:02d}B04_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2' expected_segments: 10 hrit_b04_fd: @@ -61,6 +65,7 @@ file_types: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B05_{start_time:%Y%m%d%H%M}_{segment:03d}' + - 'IMG_DK{area:02d}B05_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2' expected_segments: 10 hrit_b05_fd: @@ -72,6 +77,7 @@ file_types: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B06_{start_time:%Y%m%d%H%M}_{segment:03d}' + - 'IMG_DK{area:02d}B06_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2' expected_segments: 10 hrit_b06_fd: @@ -86,6 +92,7 @@ file_types: # https://www.data.jma.go.jp/mscweb/en/himawari89/himawari_cast/note/HimawariCast_dataset_20150624_en.pdf file_patterns: - 'IMG_DK{area:02d}B07_{start_time:%Y%m%d%H%M}_{segment:03d}' + - 'IMG_DK{area:02d}B07_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2' expected_segments: 10 hrit_b07_fd: @@ -100,6 +107,7 @@ file_types: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR4_{start_time:%Y%m%d%H%M}_{segment:03d}' + - 'IMG_DK{area:02d}IR4_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2' expected_segments: 10 hrit_b07_ir4_fd: @@ -111,6 +119,7 @@ file_types: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR3_{start_time:%Y%m%d%H%M}_{segment:03d}' + - 'IMG_DK{area:02d}IR3_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2' expected_segments: 10 hrit_b08_fd: @@ -122,6 +131,7 @@ file_types: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B09_{start_time:%Y%m%d%H%M}_{segment:03d}' + - 'IMG_DK{area:02d}B09_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2' expected_segments: 10 hrit_b09_fd: @@ -133,6 +143,7 @@ file_types: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B10_{start_time:%Y%m%d%H%M}_{segment:03d}' + - 'IMG_DK{area:02d}B10_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2' expected_segments: 10 hrit_b10_fd: @@ -144,6 +155,7 @@ file_types: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B11_{start_time:%Y%m%d%H%M}_{segment:03d}' + - 'IMG_DK{area:02d}B11_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2' expected_segments: 10 hrit_b11_fd: @@ -155,6 +167,7 @@ file_types: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B12_{start_time:%Y%m%d%H%M}_{segment:03d}' + - 'IMG_DK{area:02d}B12_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2' expected_segments: 10 hrit_b12_fd: @@ -166,6 +179,7 @@ file_types: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR1_{start_time:%Y%m%d%H%M}_{segment:03d}' + - 'IMG_DK{area:02d}IR1_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2' expected_segments: 10 hrit_b13_fd: @@ -177,6 +191,7 @@ file_types: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B14_{start_time:%Y%m%d%H%M}_{segment:03d}' + - 'IMG_DK{area:02d}B14_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2' expected_segments: 10 hrit_b14_fd: @@ -188,6 +203,7 @@ file_types: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}IR2_{start_time:%Y%m%d%H%M}_{segment:03d}' + - 'IMG_DK{area:02d}IR2_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2' expected_segments: 10 hrit_b15_fd: @@ -199,6 +215,7 @@ file_types: file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler file_patterns: - 'IMG_DK{area:02d}B16_{start_time:%Y%m%d%H%M}_{segment:03d}' + - 'IMG_DK{area:02d}B16_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2' expected_segments: 10 hrit_b16_fd: diff --git a/satpy/etc/readers/ami_l1b.yaml b/satpy/etc/readers/ami_l1b.yaml index 952fea35f7..36692df180 100644 --- a/satpy/etc/readers/ami_l1b.yaml +++ b/satpy/etc/readers/ami_l1b.yaml @@ -338,4 +338,3 @@ datasets: units: K file_type: ir133 file_key: image_pixel_values - diff --git a/satpy/etc/readers/amsub_l1c_aapp.yaml b/satpy/etc/readers/amsub_l1c_aapp.yaml new file mode 100644 index 0000000000..009c935189 --- /dev/null +++ b/satpy/etc/readers/amsub_l1c_aapp.yaml @@ -0,0 +1,166 @@ +reader: + name: amsub_l1c_aapp + description: AAPP l1c Reader for AMSU-B data + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + sensors: [amsub,] + default_channels: [16, 17, 18, 19, 20] + + data_identification_keys: + name: + required: true + frequency_double_sideband: + type: !!python/name:satpy.readers.aapp_mhs_amsub_l1c.FrequencyDoubleSideBand + frequency_range: + type: !!python/name:satpy.readers.aapp_mhs_amsub_l1c.FrequencyRange + resolution: + polarization: + enum: + - H + - V + calibration: + enum: + - brightness_temperature + transitive: true + modifiers: + required: true + default: [] + type: !!python/name:satpy.dataset.ModifierTuple + +datasets: + '16': + name: '16' + frequency_range: + central: 89. + bandwidth: 1.0 + unit: GHz + polarization: 'V' + resolution: 16000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: + - longitude + - latitude + file_type: amsub_aapp_l1c + '17': + name: '17' + frequency_range: + central: 150. + bandwidth: 1.0 + unit: GHz + polarization: 'V' + resolution: 16000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: + - longitude + - latitude + file_type: amsub_aapp_l1c + '18': + name: '18' + frequency_double_sideband: + unit: GHz + central: 183.31 + side: 1.0 + bandwidth: 0.5 + polarization: 'V' + resolution: 16000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: + - longitude + - latitude + file_type: amsub_aapp_l1c + '19': + name: '19' + frequency_double_sideband: + unit: GHz + central: 183.31 + side: 3.0 + bandwidth: 1.0 + polarization: 'V' + resolution: 16000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: + - longitude + - latitude + file_type: amsub_aapp_l1c + '20': + name: '20' + frequency_double_sideband: + unit: GHz + central: 183.31 + side: 7.0 + bandwidth: 2.0 + polarization: 'V' + resolution: 16000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: + - longitude + - latitude + file_type: amsub_aapp_l1c + + solar_zenith_angle: + name: solar_zenith_angle + resolution: 16000 + coordinates: + - longitude + - latitude + file_type: amsub_aapp_l1c + standard_name: solar_zenith_angle + units: degrees + + solar_azimuth_angle: + name: solar_azimuth_angle + resolution: 16000 + coordinates: + - longitude + - latitude + file_type: amsub_aapp_l1c + standard_name: solar_azimuth_angle + units: degrees + + sensor_zenith_angle: + name: sensor_zenith_angle + resolution: 16000 + coordinates: + - longitude + - latitude + file_type: amsub_aapp_l1c + standard_name: sensor_zenith_angle + units: degrees + + sensor_azimuth_angle: + name: sensor_azimuth_angle + resolution: 16000 + coordinates: + - longitude + - latitude + file_type: amsub_aapp_l1c + standard_name: sensor_azimuth_angle + units: degrees + + latitude: + name: latitude + resolution: 16000 + file_type: amsub_aapp_l1c + standard_name: latitude + units: degrees_north + + longitude: + name: longitude + resolution: 16000 + file_type: amsub_aapp_l1c + standard_name: longitude + units: degrees_east + +file_types: + amsub_aapp_l1c: + file_reader: !!python/name:satpy.readers.aapp_mhs_amsub_l1c.MHS_AMSUB_AAPPL1CFile + file_patterns: ['mhsl1c_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit_number:05d}.l1c'] diff --git a/satpy/etc/readers/ascat_l2_soilmoisture_bufr.yaml b/satpy/etc/readers/ascat_l2_soilmoisture_bufr.yaml index 22aada9fe2..1e1d8653c1 100644 --- a/satpy/etc/readers/ascat_l2_soilmoisture_bufr.yaml +++ b/satpy/etc/readers/ascat_l2_soilmoisture_bufr.yaml @@ -11,7 +11,7 @@ reader: file_types: ascat_l2_soilmoisture_bufr: file_reader: !!python/name:satpy.readers.ascat_l2_soilmoisture_bufr.AscatSoilMoistureBufr - file_patterns: ["W_XX-EUMETSAT-{reception_location},SOUNDING+SATELLITE,{platform}+{instrument}_C_EUMC_{start_time:%Y%m%d%H%M%S}_{perigee}_eps_o_{species}_{level}.bin"] + file_patterns: ["W_XX-EUMETSAT-{reception_location},SOUNDING+SATELLITE,{platform}+{instrument}_C_{header}_{start_time:%Y%m%d%H%M%S}_{perigee}_eps_o_{species}_ssm_l2.bin"] datasets: @@ -49,4 +49,3 @@ datasets: coordinates: [longitude, latitude] key: soilMoistureQuality fill_value: -1.e+100 - diff --git a/satpy/etc/readers/avhrr_l1c_eum_gac_fdr_nc.yaml b/satpy/etc/readers/avhrr_l1c_eum_gac_fdr_nc.yaml index 5403e4a64f..bdf14f2799 100644 --- a/satpy/etc/readers/avhrr_l1c_eum_gac_fdr_nc.yaml +++ b/satpy/etc/readers/avhrr_l1c_eum_gac_fdr_nc.yaml @@ -202,4 +202,4 @@ datasets: equator_crossing_time: name: equator_crossing_time file_type: eumetsat_gac_fdr - nc_key: 'equator_crossing_time' \ No newline at end of file + nc_key: 'equator_crossing_time' diff --git a/satpy/etc/readers/fci_l1c_nc.yaml b/satpy/etc/readers/fci_l1c_nc.yaml index 1e80c4aed8..611c73be1b 100644 --- a/satpy/etc/readers/fci_l1c_nc.yaml +++ b/satpy/etc/readers/fci_l1c_nc.yaml @@ -6,13 +6,11 @@ reader: Reader for FCI L1c data in NetCDF4 format. Used to read Meteosat Third Generation (MTG) Flexible Combined Imager (FCI) L1c data. - reader: !!python/name:satpy.readers.yaml_reader.GEOSegmentYAMLReader + reader: !!python/name:satpy.readers.yaml_reader.GEOVariableSegmentYAMLReader sensors: [ fci ] # Source: MTG FCI L1 Product User Guide [FCIL1PUG] -# ftp://ftp.eumetsat.int/pub/OPS/out/test-data/Test-data-for-External-Users/MTG/MTG_FCI_L1C_Enhanced_and_Non-Nominal_Test_Data/PDF_MTG_FCI_L1_PUG.pdf -# and Example Products for Pytroll Workshop Package Description, -# EUM/MTG/DOC/19/1079228 +# https://www.eumetsat.int/media/45923 file_types: fci_l1c_fdhsi: file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler @@ -30,12 +28,12 @@ datasets: counts: standard_name: counts units: "count" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" - radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 file_type: fci_l1c_fdhsi vis_05: @@ -47,12 +45,12 @@ datasets: counts: standard_name: counts units: "count" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" - radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 file_type: fci_l1c_fdhsi vis_06: @@ -64,12 +62,12 @@ datasets: counts: standard_name: counts units: "count" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" - radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 file_type: fci_l1c_fdhsi vis_08: @@ -81,12 +79,12 @@ datasets: counts: standard_name: counts units: "count" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" - radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 file_type: fci_l1c_fdhsi vis_09: @@ -98,12 +96,12 @@ datasets: counts: standard_name: counts units: "count" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" - radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 file_type: fci_l1c_fdhsi nir_13: @@ -115,12 +113,12 @@ datasets: counts: standard_name: counts units: "count" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" - radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 file_type: fci_l1c_fdhsi nir_16: @@ -132,12 +130,12 @@ datasets: counts: standard_name: counts units: "count" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" - radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 file_type: fci_l1c_fdhsi nir_22: @@ -149,12 +147,12 @@ datasets: counts: standard_name: counts units: "count" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 reflectance: standard_name: toa_bidirectional_reflectance units: "%" - radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 file_type: fci_l1c_fdhsi ir_38: @@ -166,12 +164,12 @@ datasets: counts: standard_name: counts units: "count" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" - radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 file_type: fci_l1c_fdhsi wv_63: @@ -183,12 +181,12 @@ datasets: counts: standard_name: counts units: "count" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" - radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 file_type: fci_l1c_fdhsi wv_73: @@ -200,12 +198,12 @@ datasets: counts: standard_name: counts units: "count" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" - radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 file_type: fci_l1c_fdhsi ir_87: @@ -217,12 +215,12 @@ datasets: counts: standard_name: counts units: "count" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" - radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 file_type: fci_l1c_fdhsi ir_97: @@ -234,12 +232,12 @@ datasets: counts: standard_name: counts units: "count" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" - radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 file_type: fci_l1c_fdhsi ir_105: @@ -251,12 +249,12 @@ datasets: counts: standard_name: counts units: "count" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" - radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 file_type: fci_l1c_fdhsi ir_123: @@ -268,12 +266,12 @@ datasets: counts: standard_name: counts units: "count" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" - radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 file_type: fci_l1c_fdhsi ir_133: @@ -285,12 +283,12 @@ datasets: counts: standard_name: counts units: "count" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 brightness_temperature: standard_name: toa_brightness_temperature units: "K" - radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 file_type: fci_l1c_fdhsi vis_04_pixel_quality: diff --git a/satpy/etc/readers/fci_l2_nc.yaml b/satpy/etc/readers/fci_l2_nc.yaml index ed7e8a9325..4650c79c01 100644 --- a/satpy/etc/readers/fci_l2_nc.yaml +++ b/satpy/etc/readers/fci_l2_nc.yaml @@ -4,1047 +4,2731 @@ reader: long_name: MTG FCI L2 (NetCDF4) description: Reader for EUMETSAT MTG FCI L2 files in NetCDF4 format. sensors: [fci] - reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + reader: !!python/name:satpy.readers.yaml_reader.GEOFlippableFileYAMLReader file_types: - # Filename examples - # FCI_SIM_OCA_2L_2KM_{creation_time:%Y%m%d}_1700.nc - # W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-2-ASR--FD------NC4E_C_EUMT_20201105031219_L2PF_DEV_20170410171000_20170410172000_N__T_0104_0000.nc + nc_fci_clm: + file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCFileHandler + file_patterns: + - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-CLM-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' - nc_fci_oca: + nc_fci_ct: file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCFileHandler - file_patterns: ['FCI_SIM_OCA_2L_2KM_{creation_time:%Y%m%d}_1700.nc', - 'W_XX-EUMETSAT-{reception_location},{instrument},{long_platform_id}+{processing_location}-{level}-OCA--{temp_str}_C_EUMT_{creation_time:%Y%m%d%H%M%S}_L2PF_{env}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_N__T_{rep_cycle_in_day}_{rep_cycle_count}.nc'] + file_patterns: + - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-CT-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' - nc_fci_clm: + nc_fci_ctth: file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCFileHandler - file_patterns: ['FCI_SIM_CLM_2KM_{creation_time:%Y%m%d}_1700.nc', - 'W_XX-EUMETSAT-{reception_location},{instrument},{long_platform_id}+{processing_location}-{level}-CLM--{temp_str}_C_EUMT_{creation_time:%Y%m%d%H%M%S}_L2PF_{env}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_N__T_{rep_cycle_in_day}_{rep_cycle_count}.nc'] + file_patterns: + - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-CTTH-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' - nc_fci_test_clm: + nc_fci_oca: file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCFileHandler - file_patterns: [ 'W_XX-EUMETSAT-{reception_location},{instrument},{long_platform_id}+{processing_location}-{level}-CLMTest-{temp_str}_C_EUMT_{creation_time:%Y%m%d%H%M%S}_L2PF_{env}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_N__T_{rep_cycle_in_day}_{rep_cycle_count}.nc' ] + file_patterns: + - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-OCA-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' - nc_fci_ct: + nc_fci_fir: file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCFileHandler - file_patterns: ['W_XX-EUMETSAT-{reception_location},{instrument},{long_platform_id}+{processing_location}-{level}-CT--{temp_str}_C_EUMT_{creation_time:%Y%m%d%H%M%S}_L2PF_{env}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_N__T_{rep_cycle_in_day}_{rep_cycle_count}.nc'] + file_patterns: + - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-FIR-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' - nc_fci_cloud: + nc_fci_olr: file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCFileHandler - file_patterns: ['W_XX-EUMETSAT-{reception_location},{instrument},{long_platform_id}+{processing_location}-{level}-CTTH--{temp_str}_C_EUMT_{creation_time:%Y%m%d%H%M%S}_L2PF_{env}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_N__T_{rep_cycle_in_day}_{rep_cycle_count}.nc' ] + file_patterns: + - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-OLR-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' - nc_fci_asr: - file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCSegmentFileHandler - file_patterns: [ "W_XX-EUMETSAT-{reception_location},{instrument},{long_platform_id}+{processing_location}-{level}-ASR--{temp_str}_C_EUMT_{creation_time:%Y%m%d%H%M%S}_L2PF_{env}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_N__T_{rep_cycle_in_day}_{rep_cycle_count}.nc"] + nc_fci_crm: + file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCFileHandler + file_patterns: + - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-CRM-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' nc_fci_gii: file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCSegmentFileHandler - file_patterns: ["W_XX-EUMETSAT-{reception_location},{instrument},{long_platform_id}+{processing_location}-{level}-GII--{temp_str}_C_EUMT_{creation_time:%Y%m%d%H%M%S}_L2PF_{env}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_N__T_{rep_cycle_in_day}_{rep_cycle_count}.nc"] + file_patterns: + - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-GII-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' nc_fci_toz: file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCSegmentFileHandler - file_patterns: [ "W_XX-EUMETSAT-{reception_location},{instrument},{long_platform_id}+{processing_location}-{level}-TOZ--{temp_str}_C_EUMT_{creation_time:%Y%m%d%H%M%S}_L2PF_{env}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_N__T_{rep_cycle_in_day}_{rep_cycle_count}.nc" ] + file_patterns: + - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-TOZ-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' + + nc_fci_test_clm: + file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCFileHandler + file_patterns: + - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-PAD-CLMTest-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' + + nc_fci_asr: + file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCSegmentFileHandler + file_patterns: + - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-ASR-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' datasets: +# CLM + cloud_state: + name: cloud_state + resolution: 2000 + file_type: nc_fci_clm + file_key: cloud_state + long_name: cloud_mask_classification + + quality_illumination_clm: + name: quality_illumination_clm + resolution: 2000 + file_type: nc_fci_clm + file_key: quality_illumination + long_name: illumination_classification + + quality_nwp_parameters_clm: + name: quality_nwp_parameters_clm + resolution: 2000 + file_type: nc_fci_clm + file_key: quality_nwp_parameters + long_name: quality_index + + quality_MTG_parameters_clm: + name: quality_MTG_parameters_clm + resolution: 2000 + file_type: nc_fci_clm + file_key: quality_MTG_parameters + long_name: quality_index + fill_value: -127 + + quality_overall_processing_clm: + name: quality_overall_processing_clm + resolution: 2000 + file_type: nc_fci_clm + file_key: quality_overall_processing + long_name: quality_index + + product_quality_clm: + name: product_quality_clm + file_type: nc_fci_clm + file_key: product_quality + long_name: product_quality_index + + product_completeness_clm: + name: product_completeness_clm + file_type: nc_fci_clm + file_key: product_completeness + long_name: product_completeness_index + + product_timeliness_clm: + name: product_timeliness_clm + file_type: nc_fci_clm + file_key: product_timeliness + long_name: product_timeliness_index + +# FCI CT L2 + cloud_phase: + name: cloud_phase + resolution: 2000 + file_type: nc_fci_ct + file_key: cloud_phase + long_name: cloud_phase + + cloud_type: + name: cloud_type + resolution: 2000 + file_type: nc_fci_ct + file_key: cloud_type + long_name: cloud_type + + quality_illumination_ct: + name: quality_illumination_ct + resolution: 2000 + file_type: nc_fci_ct + file_key: quality_illumination + long_name: illumination_classification + + quality_nwp_parameters_ct: + name: quality_nwp_parameters_ct + resolution: 2000 + file_type: nc_fci_ct + file_key: quality_nwp_parameters + long_name: quality_index + + quality_MTG_parameters_ct: + name: quality_MTG_parameters_ct + resolution: 2000 + file_type: nc_fci_ct + file_key: quality_MTG_parameters + long_name: quality_index + + quality_overall_processing_ct: + name: quality_overall_processing_ct + resolution: 2000 + file_type: nc_fci_ct + file_key: quality_overall_processing + long_name: quality_index + + product_quality_ct: + name: product_quality_ct + file_type: nc_fci_ct + file_key: product_quality + long_name: product_quality_index + + product_completeness_ct: + name: product_completeness_ct + file_type: nc_fci_ct + file_key: product_completeness + long_name: product_completeness_index + + product_timeliness_ct: + name: product_timeliness_ct + file_type: nc_fci_ct + file_key: product_timeliness + long_name: product_timeliness_index + + # FCI CTTH Product + cloud_top_aviation_height: + name: cloud_top_aviation_height + resolution: 2000 + file_type: nc_fci_ctth + file_key: cloud_top_aviation_height + + cloud_top_height: + name: cloud_top_height + resolution: 2000 + file_type: nc_fci_ctth + file_key: cloud_top_height + fill_value: 32769 + + cloud_top_pressure: + name: cloud_top_pressure + resolution: 2000 + file_type: nc_fci_ctth + file_key: cloud_top_pressure + fill_value: 3276.9001 + + cloud_top_temperature: + name: cloud_top_temperature + resolution: 2000 + file_type: nc_fci_ctth + file_key: cloud_top_temperature + fill_value: 327.69 + + effective_cloudiness: + name: effective_cloudiness + resolution: 2000 + file_type: nc_fci_ctth + file_key: effective_cloudiness + + quality_status_ctth: + name: quality_status_ctth + resolution: 2000 + file_type: nc_fci_ctth + file_key: quality_status + + quality_rtm_ctth: + name: quality_rtm_ctth + resolution: 2000 + file_type: nc_fci_ctth + file_key: quality_rtm + + quality_method_ctth: + name: quality_method_ctth + resolution: 2000 + file_type: nc_fci_ctth + file_key: quality_method + + quality_nwp_parameters_ctth: + name: quality_nwp_parameters_ctth + resolution: 2000 + file_type: nc_fci_ctth + file_key: quality_nwp_parameters + + quality_MTG_parameters_ctth: + name: quality_MTG_parameters_ctth + resolution: 2000 + file_type: nc_fci_ctth + file_key: quality_MTG_parameters + fill_value: -127 + + quality_overall_processing_ctth: + name: quality_overall_processing_ctth + resolution: 2000 + file_type: nc_fci_ctth + file_key: quality_overall_processing + + quality_overall_processing_aviation_ctth: + name: quality_overall_processing_aviation_ctth + resolution: 2000 + file_type: nc_fci_ctth + file_key: quality_overall_processing_aviation + + product_quality_ctth: + name: product_quality_ctth + file_type: nc_fci_ctth + file_key: product_quality + long_name: product_quality_index + + product_completeness_ctth: + name: product_completeness_ctth + file_type: nc_fci_ctth + file_key: product_completeness + long_name: product_completeness_index + + product_timeliness_ctth: + name: product_timeliness_ctth + file_type: nc_fci_ctth + file_key: product_timeliness + long_name: product_timeliness_index + + # OCA retrieved_cloud_phase: name: retrieved_cloud_phase + resolution: 2000 file_type: nc_fci_oca file_key: retrieved_cloud_phase standard_name: thermodynamic_phase_of_cloud_water_particles_at_cloud_top - fill_value: -999. + + retrieved_cloud_optical_thickness: + name: retrieved_cloud_optical_thickness + resolution: 2000 + file_type: nc_fci_oca + file_key: retrieved_cloud_optical_thickness + long_name: cloud_optical_depth retrieved_cloud_optical_thickness_upper_layer: name: retrieved_cloud_optical_thickness_upper_layer + resolution: 2000 file_type: nc_fci_oca file_key: retrieved_cloud_optical_thickness layer: 0 - standard_name: cloud_optical_depth - fill_value: -999. + long_name: cloud_optical_depth retrieved_cloud_optical_thickness_lower_layer: name: retrieved_cloud_optical_thickness_lower_layer + resolution: 2000 file_type: nc_fci_oca file_key: retrieved_cloud_optical_thickness layer: 1 - standard_name: cloud_optical_depth - fill_value: -999. + long_name: cloud_optical_depth retrieved_cloud_particle_effective_radius: name: retrieved_cloud_particle_effective_radius + resolution: 2000 file_type: nc_fci_oca file_key: retrieved_cloud_particle_effective_radius standard_name: effective_radius_of_cloud_condensed_water_particles_at_cloud_top - fill_value: -999. retrieved_cloud_top_temperature: name: retrieved_cloud_top_temperature + resolution: 2000 file_type: nc_fci_oca file_key: retrieved_cloud_top_temperature standard_name: air_temperature_at_cloud_top - fill_value: -999. retrieved_cloud_top_pressure_upper_layer: name: retrieved_cloud_top_pressure_upper_layer + resolution: 2000 file_type: nc_fci_oca file_key: retrieved_cloud_top_pressure layer: 0 standard_name: air_pressure_at_cloud_top - fill_value: -999. retrieved_cloud_top_pressure_lower_layer: name: retrieved_cloud_top_pressure_lower_layer + resolution: 2000 file_type: nc_fci_oca file_key: retrieved_cloud_top_pressure layer: 1 standard_name: air_pressure_at_cloud_top - fill_value: -999. retrieved_cloud_top_height: name: retrieved_cloud_top_height + resolution: 2000 file_type: nc_fci_oca file_key: retrieved_cloud_top_height standard_name: height_at_cloud_top - fill_value: -999. retrieval_error_cloud_optical_thickness_upper_layer: name: retrieval_error_cloud_optical_thickness_upper_layer + resolution: 2000 file_type: nc_fci_oca file_key: retrieval_error_cloud_optical_thickness layer: 0 - standard_name: cloud_optical_depth - fill_value: -999. + long_name: cloud_optical_depth retrieval_error_cloud_optical_thickness_lower_layer: name: retrieval_error_cloud_optical_thickness_lower_layer + resolution: 2000 file_type: nc_fci_oca file_key: retrieval_error_cloud_optical_thickness layer: 1 - standard_name: cloud_optical_depth - fill_value: -999. + long_name: cloud_optical_depth retrieval_error_cloud_particle_effective_radius: name: retrieval_error_cloud_particle_effective_radius + resolution: 2000 file_type: nc_fci_oca file_key: retrieval_error_cloud_particle_effective_radius - standard_name: effective_radius_of_cloud_condensed_water_particles_at_cloud_top - fill_value: -999. + standard_name: effective_radius_of_cloud_condensed_water_particles_at_cloud_top_standard_error retrieval_error_cloud_top_pressure_upper_layer: name: retrieval_error_cloud_top_pressure_upper_layer + resolution: 2000 file_type: nc_fci_oca file_key: retrieval_error_cloud_top_pressure layer: 0 - standard_name: air_pressure_at_cloud_top - fill_value: -999. + standard_name: air_pressure_at_cloud_top_standard_error retrieval_error_cloud_top_pressure_lower_layer: name: retrieval_error_cloud_top_pressure_lower_layer + resolution: 2000 file_type: nc_fci_oca file_key: retrieval_error_cloud_top_pressure layer: 1 - standard_name: air_pressure_at_cloud_top - fill_value: -999. + standard_name: air_pressure_at_cloud_top_standard_error quality_jmeas: name: quality_jmeas + resolution: 2000 file_type: nc_fci_oca file_key: quality_jmeas - standard_name: cost_function - fill_value: -999. - - cloud_state: - name: cloud_state - file_type: nc_fci_clm - file_key: cloud_state - standard_name: cloud_mask_classification - fill_value: -999 - mask_value: 0 + long_name: cost_function - quality_illumination: - name: quality_illumination - file_type: nc_fci_clm - file_key: quality_illumination - standard_name: illumination_classification - fill_value: -999 - mask_value: 0 + product_quality_oca: + name: product_quality_oca + file_type: nc_fci_oca + file_key: product_quality + long_name: product_quality_index - quality_nwp_parameters: - name: quality_nwp_parameters - file_type: nc_fci_clm - file_key: quality_nwp_parameters - standard_name: quality_index - fill_value: -999 - mask_value: 0 + product_completeness_oca: + name: product_completeness_oca + file_type: nc_fci_oca + file_key: product_completeness + long_name: product_completeness_index - quality_MTG_parameters: - name: quality_MTG_parameters - file_type: nc_fci_clm - file_key: quality_MTG_parameters - standard_name: quality_index - fill_value: -999 - mask_value: 0 + product_timeliness_oca: + name: product_timeliness_oca + file_type: nc_fci_oca + file_key: product_timeliness + long_name: product_timeliness_index + + # FIR + fire_probability: + name: fire_probability + resolution: 2000 + file_type: nc_fci_fir + file_key: fire_probability + + fire_result: + name: fire_result + resolution: 2000 + file_type: nc_fci_fir + file_key: fire_result + + product_quality_fir: + name: product_quality_fir + file_type: nc_fci_fir + file_key: product_quality + long_name: product_quality_index + + product_completeness_fir: + name: product_completeness_fir + file_type: nc_fci_fir + file_key: product_completeness + long_name: product_completeness_index + + product_timeliness_fir: + name: product_timeliness_fir + file_type: nc_fci_fir + file_key: product_timeliness + long_name: product_timeliness_index + + # OLR + olr: + name: olr + resolution: 2000 + file_type: nc_fci_olr + file_key: olr_value + long_name: outgoing_longwave_radiation + + cloud_type_olr: + name: cloud_type_olr + resolution: 2000 + file_type: nc_fci_olr + file_key: cloud_type + long_name: cloud_type_olr - quality_overall_processing: - name: quality_overall_processing - file_type: nc_fci_clm + quality_overall_processing_olr: + name: quality_overall_processing_olr + resolution: 2000 + file_type: nc_fci_olr file_key: quality_overall_processing - standard_name: quality_index - fill_value: -999 - mask_value: 0 + long_name: quality_index + + product_quality_olr: + name: product_quality_olr + file_type: nc_fci_olr + file_key: product_quality + long_name: product_quality_index + + product_completeness_olr: + name: product_completeness_olr + file_type: nc_fci_olr + file_key: product_completeness + long_name: product_completeness_index + + product_timeliness_olr: + name: product_timeliness_olr + file_type: nc_fci_olr + file_key: product_timeliness + long_name: product_timeliness_index + + # CRM + crm: + name: crm + resolution: 1000 + file_type: nc_fci_crm + file_key: mean_clear_sky_reflectance + long_name: mean_clear_sky_reflectance + + crm_vis04: + name: crm_vis04 + resolution: 1000 + wavelength: [0.384, 0.444, 0.504] + file_type: nc_fci_crm + file_key: mean_clear_sky_reflectance + long_name: mean_clear_sky_reflectance_vis04 + vis_channel_id: 0 + + crm_vis05: + name: crm_vis05 + resolution: 1000 + wavelength: [0.47, 0.51, 0.55] + file_type: nc_fci_crm + file_key: mean_clear_sky_reflectance + long_name: mean_clear_sky_reflectance_vis05 + vis_channel_id: 1 + + crm_vis06: + name: crm_vis06 + resolution: 1000 + wavelength: [0.59, 0.64, 0.69] + file_type: nc_fci_crm + file_key: mean_clear_sky_reflectance + long_name: mean_clear_sky_reflectance_vis06 + vis_channel_id: 2 + + crm_vis08: + name: crm_vis08 + resolution: 1000 + wavelength: [0.815, 0.865, 0.915] + file_type: nc_fci_crm + file_key: mean_clear_sky_reflectance + long_name: mean_clear_sky_reflectance_vis08 + vis_channel_id: 3 + + crm_vis09: + name: crm_vis09 + resolution: 1000 + wavelength: [0.894, 0.914, 0.934] + file_type: nc_fci_crm + file_key: mean_clear_sky_reflectance + long_name: mean_clear_sky_reflectance_vis09 + vis_channel_id: 4 + + crm_nir13: + name: crm_nir13 + resolution: 1000 + wavelength: [1.35, 1.38, 1.41] + file_type: nc_fci_crm + file_key: mean_clear_sky_reflectance + long_name: mean_clear_sky_reflectance_nir13 + vis_channel_id: 5 + + crm_nir16: + name: crm_nir16 + resolution: 1000 + wavelength: [1.56, 1.61, 1.66] + file_type: nc_fci_crm + file_key: mean_clear_sky_reflectance + long_name: mean_clear_sky_reflectance_nir16 + vis_channel_id: 6 + + crm_nir22: + name: crm_nir22 + resolution: 1000 + wavelength: [2.2, 2.25, 2.3] + file_type: nc_fci_crm + file_key: mean_clear_sky_reflectance + long_name: mean_clear_sky_reflectance_nir22 + vis_channel_id: 7 + + mean_sza: + name: mean_sza + resolution: 1000 + file_type: nc_fci_crm + file_key: mean_solar_zenith + long_name: mean_solar_zenith_angle + + mean_rel_azi: + name: mean_rel_azi + resolution: 1000 + file_type: nc_fci_crm + file_key: mean_rel_solar_sat_azimuth + long_name: mean_relative_solar_satellite_azimuth_angle + + n_acc: + name: n_acc + resolution: 1000 + file_type: nc_fci_crm + file_key: number_of_accumulations + long_name: number_of_accumulations + + historical_data: + name: historical_data + resolution: 1000 + file_type: nc_fci_crm + file_key: historical_data + long_name: historical_data + + product_quality_crm: + name: product_quality_crm + file_type: nc_fci_crm + file_key: product_quality + long_name: product_quality_index + + product_completeness_crm: + name: product_completeness_crm + file_type: nc_fci_crm + file_key: product_completeness + long_name: product_completeness_index + + product_timeliness_crm: + name: product_timeliness_crm + file_type: nc_fci_crm + file_key: product_timeliness + long_name: product_timeliness_index + + + # LAT/LON FOR SEGMENTED PRODUCTS + latitude: + name: latitude + file_key: latitude + resolution: [6000, 6000, 32000] + file_type: [ nc_fci_gii, nc_fci_toz, nc_fci_asr ] + standard_name: latitude + units: degree_north + + longitude: + name: longitude + file_key: longitude + resolution: [6000, 6000, 32000] + file_type: [ nc_fci_gii, nc_fci_toz, nc_fci_asr ] + standard_name: longitude + units: degree_east -# CLM Test + + # GII + k_index: + name: k_index + resolution: 6000 + file_type: nc_fci_gii + file_key: k_index + long_name: k_index + coordinates: + - longitude + - latitude + + lifted_index: + name: lifted_index + resolution: 6000 + file_type: nc_fci_gii + file_key: lifted_index + long_name: lifted_index + coordinates: + - longitude + - latitude + + prec_water_high: + name: prec_water_high + resolution: 6000 + file_type: nc_fci_gii + file_key: prec_water_high + long_name: prec_water_high + coordinates: + - longitude + - latitude + + prec_water_low: + name: prec_water_low + resolution: 6000 + file_type: nc_fci_gii + file_key: prec_water_low + long_name: prec_water_low + coordinates: + - longitude + - latitude + + prec_water_mid: + name: prec_water_mid + resolution: 6000 + file_type: nc_fci_gii + file_key: prec_water_mid + long_name: prec_water_mid + coordinates: + - longitude + - latitude + + prec_water_total: + name: prec_water_total + resolution: 6000 + file_type: nc_fci_gii + file_key: prec_water_total + long_name: prec_water_total + coordinates: + - longitude + - latitude + + percent_cloud_free_gii: + name: percent_cloud_free_gii + resolution: 6000 + file_type: nc_fci_gii + file_key: percent_cloud_free + long_name: percent_cloud_free + coordinates: + - longitude + - latitude + + number_of_iterations_gii: + name: number_of_iterations_gii + resolution: 6000 + file_type: nc_fci_gii + file_key: number_of_iterations + long_name: number_of_iterations + coordinates: + - longitude + - latitude + + product_quality_gii: + name: product_quality_gii + file_type: nc_fci_gii + file_key: product_quality + long_name: product_quality_index + + product_completeness_gii: + name: product_completeness_gii + file_type: nc_fci_gii + file_key: product_completeness + long_name: product_completeness_index + + product_timeliness_gii: + name: product_timeliness_gii + file_type: nc_fci_gii + file_key: product_timeliness + long_name: product_timeliness_index + + +# TOZ + total_ozone: + name: total_ozone + resolution: 6000 + file_type: nc_fci_toz + file_key: total_ozone + long_name: total_ozone + coordinates: + - longitude + - latitude + + percent_pixels_toz: + name: percent_pixels_toz + resolution: 6000 + file_type: nc_fci_toz + file_key: percent_pixels + long_name: percent_pixels + coordinates: + - longitude + - latitude + + number_of_iterations_toz: + name: number_of_iterations_toz + resolution: 6000 + file_type: nc_fci_toz + file_key: number_of_iterations + long_name: number_of_iterations + coordinates: + - longitude + - latitude + + retrieval_type_toz: + name: retrieval_type_toz + resolution: 6000 + file_type: nc_fci_toz + file_key: retrieval_type + long_name: retrieval_type + coordinates: + - longitude + - latitude + + product_quality_toz: + name: product_quality_toz + file_type: nc_fci_toz + file_key: product_quality + long_name: product_quality_index + + product_completeness_toz: + name: product_completeness_toz + file_type: nc_fci_toz + file_key: product_completeness + long_name: product_completeness_index + + product_timeliness_toz: + name: product_timeliness_toz + file_type: nc_fci_toz + file_key: product_timeliness + long_name: product_timeliness_index + + + + # CLM Test cloud_test_sit1_flag: name: cloud_test_sit1_flag + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - standard_name: cloud_mask_test_sit1_flag + long_name: cloud_mask_test_sit1_flag extract_byte: 0 - fill_value: -999 - mask_value: 0 cloud_test_cmt1_flag: name: cloud_test_cmt1_flag + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - standard_name: cloud_mask_test_cmt1_flag + long_name: cloud_mask_test_cmt1_flag extract_byte: 1 - fill_value: -999 - mask_value: 0 cloud_test_cmt2_flag: name: cloud_test_cmt2_flag + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - standard_name: cloud_mask_test_cmt2_flag + long_name: cloud_mask_test_cmt2_flag extract_byte: 2 - fill_value: -999 - mask_value: 0 cloud_test_cmt3_flag: name: cloud_test_cmt3_flag + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - standard_name: cloud_mask_test_cmt3_flag + long_name: cloud_mask_test_cmt3_flag extract_byte: 3 - fill_value: -999 - mask_value: 0 cloud_test_cmt4_flag: name: cloud_test_cmt4_flag + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - standard_name: cloud_mask_test_cmt4_flag + long_name: cloud_mask_test_cmt4_flag extract_byte: 4 - fill_value: -999 - mask_value: 0 cloud_test_cmt5_flag: name: cloud_test_cmt5_flag + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - standard_name: cloud_mask_test_cmt5_flag + long_name: cloud_mask_test_cmt5_flag extract_byte: 5 - fill_value: -999 - mask_value: 0 cloud_test_cmt6_flag: name: cloud_test_cmt6_flag + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - standard_name: cloud_mask_test_cmt6_flag + long_name: cloud_mask_test_cmt6_flag extract_byte: 6 - fill_value: -999 - mask_value: 0 cloud_test_cmt7_flag: name: cloud_test_cmt7_flag + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - standard_name: cloud_mask_test_cmt7_flag + long_name: cloud_mask_test_cmt7_flag extract_byte: 7 - fill_value: -999 - mask_value: 0 cloud_test_cmt8_flag: name: cloud_test_cmt8_flag + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - standard_name: cloud_mask_test_cmt8_flag + long_name: cloud_mask_test_cmt8_flag extract_byte: 8 - fill_value: -999 - mask_value: 0 cloud_test_cmt9_flag: name: cloud_test_cmt9_flag + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - standard_name: cloud_mask_test_cmt9_flag + long_name: cloud_mask_test_cmt9_flag extract_byte: 9 - fill_value: -999 - mask_value: 0 cloud_test_cmt10_flag: name: cloud_test_cmt10_flag + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - standard_name: cloud_mask_test_cmt0_flag + long_name: cloud_mask_test_cmt0_flag extract_byte: 10 - fill_value: -999 - mask_value: 0 cloud_test_cmt11_flag: name: cloud_test_cmt11_flag + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - standard_name: cloud_mask_test_cmt11_flag + long_name: cloud_mask_test_cmt11_flag extract_byte: 11 - fill_value: -999 - mask_value: 0 cloud_test_cmt12_flag: name: cloud_test_cmt12_flag + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - standard_name: cloud_mask_test_cmt12_flag + long_name: cloud_mask_test_cmt12_flag extract_byte: 12 - fill_value: -999 - mask_value: 0 cloud_test_cmt13_flag: name: cloud_test_cmt13_flag + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - standard_name: cloud_mask_test_cmt13_flag + long_name: cloud_mask_test_cmt13_flag extract_byte: 13 - fill_value: -999 - mask_value: 0 cloud_test_cmt14_flag: name: cloud_test_cmt14_flag + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - standard_name: cloud_mask_test_cmt14_flag + long_name: cloud_mask_test_cmt14_flag extract_byte: 14 - fill_value: -999 - mask_value: 0 cloud_test_opqt_flag: name: cloud_test_opqt_flag + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - standard_name: cloud_mask_test_opqt_flag + long_name: cloud_mask_test_opqt_flag extract_byte: 15 - fill_value: -999 - mask_value: 0 cloud_test_cmrt1_flag: name: cloud_test_cmrt1_flag + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - standard_name: cloud_mask_test_cmrt1_flag + long_name: cloud_mask_test_cmrt1_flag extract_byte: 16 - fill_value: -999 - mask_value: 0 cloud_test_cmrt2_flag: name: cloud_test_cmrt2_flag + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - standard_name: cloud_mask_test_cmrt2_flag + long_name: cloud_mask_test_cmrt2_flag extract_byte: 17 - fill_value: -999 - mask_value: 0 cloud_test_cmrt3_flag: name: cloud_test_cmrt3_flag + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - standard_name: cloud_mask_test_cmrt3_flag + long_name: cloud_mask_test_cmrt3_flag extract_byte: 18 - fill_value: -999 - mask_value: 0 cloud_test_cmrt4_flag: name: cloud_test_cmrt4_flag + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - standard_name: cloud_mask_test_cmrt4_flag + long_name: cloud_mask_test_cmrt4_flag extract_byte: 19 - fill_value: -999 - mask_value: 0 cloud_test_cmrt5_flag: name: cloud_test_cmrt5_flag + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - standard_name: cloud_mask_test_cmrt5_flag + long_name: cloud_mask_test_cmrt5_flag extract_byte: 20 - fill_value: -999 - mask_value: 0 cloud_test_cmrt6_flag: name: cloud_test_cmrt6_flag + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - standard_name: cloud_mask_test_cmrt6_flag + long_name: cloud_mask_test_cmrt6_flag extract_byte: 21 - fill_value: -999 - mask_value: 0 cloud_test_dust_flag: name: cloud_test_dust_flag + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - standard_name: cloud_mask_test_dust_flag + long_name: cloud_mask_test_dust_flag extract_byte: 22 - fill_value: -999 - mask_value: 0 cloud_test_ash_flag: name: cloud_test_ash_flag + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - standard_name: cloud_mask_test_ash_flag + long_name: cloud_mask_test_ash_flag extract_byte: 23 - fill_value: -999 - mask_value: 0 cloud_test_dust_ash_flag: name: cloud_test_dust_ash_flag + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_flag - standard_name: cloud_mask_test_dust_ash_flag + long_name: cloud_mask_test_dust_ash_flag extract_byte: 24 - fill_value: -999 - mask_value: 0 cloud_test_sit1: name: cloud_test_sit1 + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - standard_name: cloud_mask_test_sit1 + long_name: cloud_mask_test_sit1 extract_byte: 0 - fill_value: -999 - mask_value: 0 cloud_test_cmt1: name: cloud_test_cmt1 + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - standard_name: cloud_mask_test_cmt1 + long_name: cloud_mask_test_cmt1 extract_byte: 1 - fill_value: -999 - mask_value: 0 cloud_test_cmt2: name: cloud_test_cmt2 + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - standard_name: cloud_mask_test_cmt2 + long_name: cloud_mask_test_cmt2 extract_byte: 2 - fill_value: -999 - mask_value: 0 cloud_test_cmt3: name: cloud_test_cmt3 + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - standard_name: cloud_mask_test_cmt3 + long_name: cloud_mask_test_cmt3 extract_byte: 3 - fill_value: -999 - mask_value: 0 cloud_test_cmt4: name: cloud_test_cmt4 + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - standard_name: cloud_mask_test_cmt4 + long_name: cloud_mask_test_cmt4 extract_byte: 4 - fill_value: -999 - mask_value: 0 cloud_test_cmt5: name: cloud_test_cmt5 + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - standard_name: cloud_mask_test_cmt5 + long_name: cloud_mask_test_cmt5 extract_byte: 5 - fill_value: -999 - mask_value: 0 cloud_test_cmt6: name: cloud_test_cmt6 + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - standard_name: cloud_mask_test_cmt6 + long_name: cloud_mask_test_cmt6 extract_byte: 6 - fill_value: -999 - mask_value: 0 cloud_test_cmt7: name: cloud_test_cmt7 + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - standard_name: cloud_mask_test_cmt7 + long_name: cloud_mask_test_cmt7 extract_byte: 7 - fill_value: -999 - mask_value: 0 - cloud_test_cmt8: name: cloud_test_cmt8 + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - standard_name: cloud_mask_test_cmt8 + long_name: cloud_mask_test_cmt8 extract_byte: 8 - fill_value: -999 - mask_value: 0 cloud_test_cmt9: name: cloud_test_cmt9 + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - standard_name: cloud_mask_test_cmt9 + long_name: cloud_mask_test_cmt9 extract_byte: 9 - fill_value: -999 - mask_value: 0 cloud_test_cmt10: name: cloud_test_cmt10 + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - standard_name: cloud_mask_test_cmt10 + long_name: cloud_mask_test_cmt10 extract_byte: 10 - fill_value: -999 - mask_value: 0 cloud_test_cmt11: name: cloud_test_cmt11 + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - standard_name: cloud_mask_test_cmt11 + long_name: cloud_mask_test_cmt11 extract_byte: 11 - fill_value: -999 - mask_value: 0 cloud_test_cmt12: name: cloud_test_cmt12 + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - standard_name: cloud_mask_test_cmt12 + long_name: cloud_mask_test_cmt12 extract_byte: 12 - fill_value: -999 - mask_value: 0 cloud_test_cmt13: name: cloud_test_cmt13 + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - standard_name: cloud_mask_test_cmt13 + long_name: cloud_mask_test_cmt13 extract_byte: 13 - fill_value: -999 - mask_value: 0 cloud_test_cmt14: name: cloud_test_cmt14 + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - standard_name: cloud_mask_test_cmt14 + long_name: cloud_mask_test_cmt14 extract_byte: 14 - fill_value: -999 - mask_value: 0 cloud_test_opqt: name: cloud_test_opqt + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - standard_name: cloud_mask_test_opqt + long_name: cloud_mask_test_opqt extract_byte: 15 - fill_value: -999 - mask_value: 0 cloud_test_cmrt1: name: cloud_test_cmrt1 + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - standard_name: cloud_mask_test_cmrt1 + long_name: cloud_mask_test_cmrt1 extract_byte: 16 - fill_value: -999 - mask_value: 0 cloud_test_cmrt2: name: cloud_test_cmrt2 + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - standard_name: cloud_mask_test_cmrt2 + long_name: cloud_mask_test_cmrt2 extract_byte: 17 - fill_value: -999 - mask_value: 0 cloud_test_cmrt3: name: cloud_test_cmrt3 + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - standard_name: cloud_mask_test_cmrt3 + long_name: cloud_mask_test_cmrt3 extract_byte: 18 - fill_value: -999 - mask_value: 0 cloud_test_cmrt4: name: cloud_test_cmrt4 + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - standard_name: cloud_mask_test_cmrt4 + long_name: cloud_mask_test_cmrt4 extract_byte: 19 - fill_value: -999 - mask_value: 0 cloud_test_cmrt5: name: cloud_test_cmrt5 + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - standard_name: cloud_mask_test_cmrt5 + long_name: cloud_mask_test_cmrt5 extract_byte: 20 - fill_value: -999 - mask_value: 0 - - cloud_test_cmrt6: - name: cloud_test_cmrt6 - file_type: nc_fci_test_clm - file_key: cloud_mask_test_result - standard_name: cloud_mask_test_cmrt6 - extract_byte: 21 - fill_value: -999 - mask_value: 0 cloud_test_dust: name: cloud_test_dust + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - standard_name: cloud_mask_test_dust - extract_byte: 22 - fill_value: -999 - mask_value: 0 + long_name: cloud_mask_test_dust + extract_byte: 21 cloud_test_ash: name: cloud_test_ash + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - standard_name: cloud_mask_test_ash - extract_byte: 23 - fill_value: -999 - mask_value: 0 + long_name: cloud_mask_test_ash + extract_byte: 22 cloud_test_dust_ash: name: cloud_test_dust_ash + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_test_result - standard_name: cloud_mask_test_dust_ash - extract_byte: 24 - fill_value: -999 - mask_value: 0 + long_name: cloud_mask_test_dust_ash + extract_byte: 23 - cloud_mask_cmrt6_result: - name: cloud_mask_cmrt6_result + cloud_test_cmrt6: + name: cloud_test_cmrt6 + resolution: 2000 file_type: nc_fci_test_clm file_key: cloud_mask_cmrt6_test_result - standard_name: cloud_mask_cmrt6_result - extract_byte: 0 -# fill_value: -999 - mask_value: 0 - - latitude: - name: latitude - file_key: 'latitude' -# resolution: - file_type: [nc_fci_gii, nc_fci_asr, nc_fci_toz] - standard_name: latitude - fill_value: -32767 - mask_value: -32767 - units: degree_north + long_name: cloud_mask_cmrt6_result + product_quality_clmtest: + name: product_quality_clmtest + file_type: nc_fci_test_clm + file_key: product_quality + long_name: product_quality_index - longitude: - name: longitude - file_key: 'longitude' -# resolution: - file_type: [nc_fci_gii, nc_fci_asr, nc_fci_toz] - standard_name: longitude - fill_value: -32767 - mask_value: -32767 - units: degree_east + product_completeness_clmtest: + name: product_completeness_clmtest + file_type: nc_fci_test_clm + file_key: product_completeness + long_name: product_completeness_index + product_timeliness_clmtest: + name: product_timeliness_clmtest + file_type: nc_fci_test_clm + file_key: product_timeliness + long_name: product_timeliness_index - # GII - k_index: - name: k_index - file_type: nc_fci_gii - file_key: k_index - standard_name: k_index - fill_value: -32767 - mask_value: -32767 - coordinates: - - longitude - - latitude - lifted_index: - name: lifted_index - file_type: nc_fci_gii - file_key: lifted_index - standard_name: lifted_index - fill_value: -32767 - mask_value: -32767 + # ASR + bt_max: + name: bt_max + resolution: 32000 + file_type: nc_fci_asr + file_key: bt_max + long_name: maximum_brightness_temperature_in_segment coordinates: - - longitude - - latitude + - longitude + - latitude - percent_cloud_free: - name: percent_cloud_free - file_type: nc_fci_gii - file_key: percent_cloud_free - standard_name: percent_cloud_free - fill_value: -127 - mask_value: -127 + bt_mean: + name: bt_mean + resolution: 32000 + file_type: nc_fci_asr + file_key: bt_mean + long_name: mean_brightness_temperature_in_segment coordinates: - - longitude - - latitude - - prec_water_high: - name: prec_water_high - file_type: nc_fci_gii - file_key: prec_water_high - standard_name: prec_water_high - fill_value: 65535 - mask_value: 65535 - coordinates: - - longitude - - latitude - - prec_water_low: - name: prec_water_low - file_type: nc_fci_gii - file_key: prec_water_low - standard_name: prec_water_low - fill_value: 65535 - mask_value: 65535 - coordinates: - - longitude - - latitude - - prec_water_mid: - name: prec_water_mid - file_type: nc_fci_gii - file_key: prec_water_mid - standard_name: prec_water_mid - fill_value: 65535 - mask_value: 65535 - coordinates: - - longitude - - latitude - - prec_water_total: - name: prec_water_total - file_type: nc_fci_gii - file_key: prec_water_total - standard_name: prec_water_total - fill_value: 65535 - mask_value: 65535 - coordinates: - - longitude - - latitude - -# FCI CT L2 - cloud_phase: - name: cloud_phase - file_type: nc_fci_ct - file_key: cloud_phase -# standard_name: cloud_phase - fill_value: 0 - mask_value: 0 - - cloud_type: - name: cloud_type - file_type: nc_fci_ct - file_key: cloud_type -# standard_name: cloud_type - fill_value: 0 - mask_value: 0 - -# FCI CTTH Product - cloud_top_aviation_height: - name: cloud_top_aviation_height - file_type: nc_fci_cloud - file_key: cloud_top_aviation_height - fill_value: 0 - mask_value: 0 - - cloud_top_height: - name: cloud_top_height - file_type: nc_fci_cloud - file_key: cloud_top_height - fill_value: 0 - mask_value: 0 - - cloud_top_pressure: - name: cloud_top_pressure - file_type: nc_fci_cloud - file_key: cloud_top_pressure - fill_value: 0 - mask_value: 0 - - cloud_top_temperature: - name: cloud_top_temperature - file_type: nc_fci_cloud - file_key: cloud_top_temperature - fill_value: 0 - mask_value: 0 - - effective_cloudiness: - name: effective_cloudiness - file_type: nc_fci_cloud - file_key: effective_cloudiness - fill_value: 0 - mask_value: 0 - -# ASR - bt_max: - name: bt_max - file_type: nc_fci_asr - file_key: bt_max - standard_name: bt_max - fill_value: 65535 - mask_value: 65535 - coordinates: - - longitude - - latitude - - bt_mean: - name: bt_mean - file_type: nc_fci_asr - file_key: bt_mean - standard_name: bt_mean - fill_value: 65535 - mask_value: 65535 - coordinates: - - longitude - - latitude + - longitude + - latitude bt_min: name: bt_min + resolution: 32000 file_type: nc_fci_asr file_key: bt_min - standard_name: bt_min - fill_value: 65535 - mask_value: 65535 + long_name: minimum_brightness_temperature_in_segment coordinates: - - longitude - - latitude + - longitude + - latitude bt_std: name: bt_std + resolution: 32000 file_type: nc_fci_asr file_key: bt_std - standard_name: bt_std - fill_value: 65535 - mask_value: 65535 + long_name: brightness_temperature_standard_deviation_in_segment coordinates: - - longitude - - latitude + - longitude + - latitude radiance_max: name: radiance_max + resolution: 32000 file_type: nc_fci_asr file_key: radiance_max - standard_name: radiance_max - fill_value: 65535 - mask_value: 65535 + long_name: maximum_radiance_in_segment coordinates: - - longitude - - latitude + - longitude + - latitude radiance_mean: name: radiance_mean + resolution: 32000 file_type: nc_fci_asr file_key: radiance_mean - standard_name: radiance_mean - fill_value: 65535 - mask_value: 65535 + long_name: mean_radiance_in_segment coordinates: - - longitude - - latitude + - longitude + - latitude radiance_min: name: radiance_min + resolution: 32000 file_type: nc_fci_asr file_key: radiance_min - standard_name: radiance_min - fill_value: 65535 - mask_value: 65535 + long_name: minimum_radiance_in_segment coordinates: - - longitude - - latitude + - longitude + - latitude radiance_std: name: radiance_std + resolution: 32000 file_type: nc_fci_asr file_key: radiance_std - standard_name: radiance_std - fill_value: 65535 - mask_value: 65535 + long_name: radiance_standard_deviation_in_segment coordinates: - - longitude - - latitude + - longitude + - latitude reflectance_max: name: reflectance_max + resolution: 32000 file_type: nc_fci_asr file_key: reflectance_max - standard_name: reflectance_max - fill_value: 65535 - mask_value: 65535 + long_name: maximum_reflectance_in_segment coordinates: - - longitude - - latitude + - longitude + - latitude reflectance_mean: name: reflectance_mean + resolution: 32000 file_type: nc_fci_asr file_key: reflectance_mean - standard_name: reflectance_mean - fill_value: 65535 - mask_value: 65535 + long_name: mean_reflectance_in_segment coordinates: - - longitude - - latitude + - longitude + - latitude reflectance_min: name: reflectance_min + resolution: 32000 file_type: nc_fci_asr file_key: reflectance_min - standard_name: reflectance_min - fill_value: 65535 - mask_value: 65535 + long_name: minimum_reflectance_in_segment coordinates: - - longitude - - latitude + - longitude + - latitude reflectance_std: name: reflectance_std + resolution: 32000 file_type: nc_fci_asr file_key: reflectance_std - standard_name: reflectance_std - fill_value: 65535 - mask_value: 65535 + long_name: reflectance_standard_deviation_in_segment coordinates: - - longitude - - latitude + - longitude + - latitude quality_bt: name: quality_bt + resolution: 32000 file_type: nc_fci_asr file_key: quality_bt - standard_name: quality_bt - fill_value: 65535 - mask_value: 65535 + long_name: brightness_temperature_quality + fill_value: -1 coordinates: - - longitude - - latitude + - longitude + - latitude quality_reflectance: name: quality_reflectance + resolution: 32000 file_type: nc_fci_asr file_key: quality_reflectance - standard_name: quality_reflectance - fill_value: 65535 - mask_value: 65535 + long_name: reflectance_quality + fill_value: -1 coordinates: - - longitude - - latitude + - longitude + - latitude quality_radiance: name: quality_radiance + resolution: 32000 file_type: nc_fci_asr file_key: quality_radiance - standard_name: quality_radiance - fill_value: 65535 - mask_value: 65535 + long_name: radiance_quality + fill_value: -1 coordinates: - - longitude - - latitude + - longitude + - latitude land_pixel_percent: name: land_pixel_percent + resolution: 32000 file_type: nc_fci_asr file_key: land_pixel_percent - standard_name: land_pixel_percent - fill_value: 65535 - mask_value: 65535 + long_name: land_pixel_percentage_in_segment coordinates: - - longitude - - latitude + - longitude + - latitude water_pixel_percent: name: water_pixel_percent + resolution: 32000 file_type: nc_fci_asr file_key: water_pixel_percent - standard_name: water_pixel_percent - fill_value: 65535 - mask_value: 65535 + long_name: water_pixel_percentage_in_segment coordinates: - - longitude - - latitude + - longitude + - latitude pixel_percentage: name: pixel_percentage + resolution: 32000 file_type: nc_fci_asr file_key: pixel_percentage - standard_name: pixel_percentage - fill_value: 65535 - mask_value: 65535 + long_name: pixel_percentage_used_in_segment coordinates: - - longitude - - latitude + - longitude + - latitude - percent_pixels: - name: percent_pixels - file_type: nc_fci_toz - file_key: percent_pixels - standard_name: percent_pixels - fill_value: 65535 - mask_value: 65535 + reflectance_mean_all_vis04: + name: reflectance_mean_all_vis04 + resolution: 32000 + wavelength: [0.384, 0.444, 0.504] + file_type: nc_fci_asr + file_key: reflectance_mean + long_name: reflectance_mean_all + vis_channel_id: 0 + category_id: 0 coordinates: - - longitude - - latitude + - longitude + - latitude - number_of_iterations: - name: number_of_iterations - file_type: nc_fci_toz - file_key: number_of_iterations - standard_name: number_of_iterations - fill_value: 65535 - mask_value: 65535 + reflectance_mean_clear_vis04: + name: reflectance_mean_clear_vis04 + resolution: 32000 + wavelength: [0.384, 0.444, 0.504] + file_type: nc_fci_asr + file_key: reflectance_mean + long_name: reflectance_mean_clear + vis_channel_id: 0 + category_id: 1 coordinates: - - longitude - - latitude + - longitude + - latitude - retrieval_type: - name: retrieval_type - file_type: nc_fci_toz - file_key: retrieval_type - standard_name: retrieval_type - fill_value: 65535 - mask_value: 65535 + reflectance_mean_cloudy_vis04: + name: reflectance_mean_cloudy_vis04 + resolution: 32000 + wavelength: [0.384, 0.444, 0.504] + file_type: nc_fci_asr + file_key: reflectance_mean + long_name: reflectance_mean_cloudy + vis_channel_id: 0 + category_id: 2 coordinates: - - longitude - - latitude + - longitude + - latitude - total_ozone: - name: total_ozone - file_type: nc_fci_toz - file_key: total_ozone - standard_name: total_ozone - fill_value: 65535 - mask_value: 65535 + reflectance_mean_all_vis05: + name: reflectance_mean_all_vis05 + resolution: 32000 + wavelength: [0.47, 0.51, 0.55] + file_type: nc_fci_asr + file_key: reflectance_mean + long_name: reflectance_mean_all + vis_channel_id: 1 + category_id: 0 + coordinates: + - longitude + - latitude + + reflectance_mean_clear_vis05: + name: reflectance_mean_clear_vis05 + resolution: 32000 + wavelength: [0.47, 0.51, 0.55] + file_type: nc_fci_asr + file_key: reflectance_mean + long_name: reflectance_mean_clear + vis_channel_id: 1 + category_id: 1 + coordinates: + - longitude + - latitude + + reflectance_mean_cloudy_vis05: + name: reflectance_mean_cloudy_vis05 + resolution: 32000 + wavelength: [0.47, 0.51, 0.55] + file_type: nc_fci_asr + file_key: reflectance_mean + long_name: reflectance_mean_cloudy + vis_channel_id: 1 + category_id: 2 + coordinates: + - longitude + - latitude + + reflectance_mean_all_vis06: + name: reflectance_mean_all_vis06 + resolution: 32000 + wavelength: [0.59, 0.64, 0.69] + file_type: nc_fci_asr + file_key: reflectance_mean + long_name: reflectance_mean_all + vis_channel_id: 2 + category_id: 0 + coordinates: + - longitude + - latitude + + reflectance_mean_clear_vis06: + name: reflectance_mean_clear_vis06 + resolution: 32000 + wavelength: [0.59, 0.64, 0.69] + file_type: nc_fci_asr + file_key: reflectance_mean + long_name: reflectance_mean_clear + vis_channel_id: 2 + category_id: 1 + coordinates: + - longitude + - latitude + + reflectance_mean_cloudy_vis06: + name: reflectance_mean_cloudy_vis06 + resolution: 32000 + wavelength: [0.59, 0.64, 0.69] + file_type: nc_fci_asr + file_key: reflectance_mean + long_name: reflectance_mean_cloudy + vis_channel_id: 2 + category_id: 2 + coordinates: + - longitude + - latitude + + reflectance_mean_all_vis08: + name: reflectance_mean_all_vis08 + resolution: 32000 + wavelength: [0.815, 0.865, 0.915] + file_type: nc_fci_asr + file_key: reflectance_mean + long_name: reflectance_mean_all + vis_channel_id: 3 + category_id: 0 + coordinates: + - longitude + - latitude + + reflectance_mean_clear_vis08: + name: reflectance_mean_clear_vis08 + resolution: 32000 + wavelength: [0.815, 0.865, 0.915] + file_type: nc_fci_asr + file_key: reflectance_mean + long_name: reflectance_mean_clear + vis_channel_id: 3 + category_id: 1 + coordinates: + - longitude + - latitude + + reflectance_mean_cloudy_vis08: + name: reflectance_mean_cloudy_vis08 + resolution: 32000 + wavelength: [0.815, 0.865, 0.915] + file_type: nc_fci_asr + file_key: reflectance_mean + long_name: reflectance_mean_cloudy + vis_channel_id: 3 + category_id: 2 + coordinates: + - longitude + - latitude + + reflectance_mean_all_vis09: + name: reflectance_mean_all_vis09 + resolution: 32000 + wavelength: [0.894, 0.914, 0.934] + file_type: nc_fci_asr + file_key: reflectance_mean + long_name: reflectance_mean_all + vis_channel_id: 4 + category_id: 0 + coordinates: + - longitude + - latitude + + reflectance_mean_clear_vis09: + name: reflectance_mean_clear_vis09 + resolution: 32000 + wavelength: [0.894, 0.914, 0.934] + file_type: nc_fci_asr + file_key: reflectance_mean + long_name: reflectance_mean_clear + vis_channel_id: 4 + category_id: 1 + coordinates: + - longitude + - latitude + + reflectance_mean_cloudy_vis09: + name: reflectance_mean_cloudy_vis09 + resolution: 32000 + wavelength: [0.894, 0.914, 0.934] + file_type: nc_fci_asr + file_key: reflectance_mean + long_name: reflectance_mean_cloudy + vis_channel_id: 4 + category_id: 2 + coordinates: + - longitude + - latitude + + reflectance_mean_all_nir13: + name: reflectance_mean_all_nir13 + resolution: 32000 + wavelength: [1.35, 1.38, 1.41] + file_type: nc_fci_asr + file_key: reflectance_mean + long_name: reflectance_mean_all + vis_channel_id: 5 + category_id: 0 + coordinates: + - longitude + - latitude + + reflectance_mean_clear_nir13: + name: reflectance_mean_clear_nir13 + resolution: 32000 + wavelength: [1.35, 1.38, 1.41] + file_type: nc_fci_asr + file_key: reflectance_mean + long_name: reflectance_mean_clear + vis_channel_id: 5 + category_id: 1 + coordinates: + - longitude + - latitude + + reflectance_mean_cloudy_nir13: + name: reflectance_mean_cloudy_nir13 + resolution: 32000 + wavelength: [1.35, 1.38, 1.41] + file_type: nc_fci_asr + file_key: reflectance_mean + long_name: reflectance_mean_cloudy + vis_channel_id: 5 + category_id: 2 + coordinates: + - longitude + - latitude + + reflectance_mean_all_nir16: + name: reflectance_mean_all_nir16 + resolution: 32000 + wavelength: [1.56, 1.61, 1.66] + file_type: nc_fci_asr + file_key: reflectance_mean + long_name: reflectance_mean_all + vis_channel_id: 6 + category_id: 0 + coordinates: + - longitude + - latitude + + reflectance_mean_clear_nir16: + name: reflectance_mean_clear_nir16 + resolution: 32000 + wavelength: [1.56, 1.61, 1.66] + file_type: nc_fci_asr + file_key: reflectance_mean + long_name: reflectance_mean_clear + vis_channel_id: 6 + category_id: 1 + coordinates: + - longitude + - latitude + + reflectance_mean_cloudy_nir16: + name: reflectance_mean_cloudy_nir16 + resolution: 32000 + wavelength: [1.56, 1.61, 1.66] + file_type: nc_fci_asr + file_key: reflectance_mean + long_name: reflectance_mean_cloudy + vis_channel_id: 6 + category_id: 2 + coordinates: + - longitude + - latitude + + reflectance_mean_all_nir22: + name: reflectance_mean_all_nir22 + resolution: 32000 + wavelength: [2.2, 2.25, 2.3] + file_type: nc_fci_asr + file_key: reflectance_mean + long_name: reflectance_mean_all + vis_channel_id: 7 + category_id: 0 + coordinates: + - longitude + - latitude + + reflectance_mean_clear_nir22: + name: reflectance_mean_clear_nir22 + resolution: 32000 + wavelength: [2.2, 2.25, 2.3] + file_type: nc_fci_asr + file_key: reflectance_mean + long_name: reflectance_mean_clear + vis_channel_id: 7 + category_id: 1 + coordinates: + - longitude + - latitude + + reflectance_mean_cloudy_nir22: + name: reflectance_mean_cloudy_nir22 + resolution: 32000 + wavelength: [2.2, 2.25, 2.3] + file_type: nc_fci_asr + file_key: reflectance_mean + long_name: reflectance_mean_cloudy + vis_channel_id: 7 + category_id: 2 + coordinates: + - longitude + - latitude + + bt_mean_all_ir38: + name: bt_mean_all_ir38 + resolution: 32000 + wavelength: [3.4, 3.8, 4.2] + file_type: nc_fci_asr + file_key: bt_mean + long_name: bt_mean_all + ir_channel_id: 0 + category_id: 0 + coordinates: + - longitude + - latitude + + bt_mean_clear_ir38: + name: bt_mean_clear_ir38 + resolution: 32000 + wavelength: [3.4, 3.8, 4.2] + file_type: nc_fci_asr + file_key: bt_mean + long_name: bt_mean_clear + ir_channel_id: 0 + category_id: 1 + coordinates: + - longitude + - latitude + + bt_mean_cloudy_ir38: + name: bt_mean_cloudy_ir38 + resolution: 32000 + wavelength: [3.4, 3.8, 4.2] + file_type: nc_fci_asr + file_key: bt_mean + long_name: bt_mean_cloudy + ir_channel_id: 0 + category_id: 2 coordinates: - - longitude - - latitude \ No newline at end of file + - longitude + - latitude + + bt_mean_all_wv63: + name: bt_mean_all_wv63 + resolution: 32000 + wavelength: [5.3, 6.3, 7.3] + file_type: nc_fci_asr + file_key: bt_mean + long_name: bt_mean_all + ir_channel_id: 1 + category_id: 0 + coordinates: + - longitude + - latitude + + bt_mean_clear_wv63: + name: bt_mean_clear_wv63 + resolution: 32000 + wavelength: [5.3, 6.3, 7.3] + file_type: nc_fci_asr + file_key: bt_mean + long_name: bt_mean_clear + ir_channel_id: 1 + category_id: 1 + coordinates: + - longitude + - latitude + + bt_mean_cloudy_wv63: + name: bt_mean_cloudy_wv63 + resolution: 32000 + wavelength: [5.3, 6.3, 7.3] + file_type: nc_fci_asr + file_key: bt_mean + long_name: bt_mean_cloudy + ir_channel_id: 1 + category_id: 2 + coordinates: + - longitude + - latitude + + bt_mean_all_wv73: + name: bt_mean_all_wv73 + resolution: 32000 + wavelength: [6.85, 7.35, 7.85] + file_type: nc_fci_asr + file_key: bt_mean + long_name: bt_mean_all + ir_channel_id: 2 + category_id: 0 + coordinates: + - longitude + - latitude + + bt_mean_clear_wv73: + name: bt_mean_clear_wv73 + resolution: 32000 + wavelength: [6.85, 7.35, 7.85] + file_type: nc_fci_asr + file_key: bt_mean + long_name: bt_mean_clear + ir_channel_id: 2 + category_id: 1 + coordinates: + - longitude + - latitude + + bt_mean_cloudy_wv73: + name: bt_mean_cloudy_wv73 + resolution: 32000 + wavelength: [6.85, 7.35, 7.85] + file_type: nc_fci_asr + file_key: bt_mean + long_name: bt_mean_cloudy + ir_channel_id: 2 + category_id: 2 + coordinates: + - longitude + - latitude + + bt_mean_all_ir87: + name: bt_mean_all_ir87 + resolution: 32000 + wavelength: [8.3, 8.7, 9.1] + file_type: nc_fci_asr + file_key: bt_mean + long_name: bt_mean_all + ir_channel_id: 3 + category_id: 0 + coordinates: + - longitude + - latitude + + bt_mean_clear_ir87: + name: bt_mean_clear_ir87 + resolution: 32000 + wavelength: [8.3, 8.7, 9.1] + file_type: nc_fci_asr + file_key: bt_mean + long_name: bt_mean_clear + ir_channel_id: 3 + category_id: 1 + coordinates: + - longitude + - latitude + + bt_mean_cloudy_ir87: + name: bt_mean_cloudy_ir87 + resolution: 32000 + wavelength: [8.3, 8.7, 9.1] + file_type: nc_fci_asr + file_key: bt_mean + long_name: bt_mean_cloudy + ir_channel_id: 3 + category_id: 2 + coordinates: + - longitude + - latitude + + bt_mean_all_ir97: + name: bt_mean_all_ir97 + resolution: 32000 + wavelength: [9.36, 9.66, 9.96] + file_type: nc_fci_asr + file_key: bt_mean + long_name: bt_mean_all + ir_channel_id: 4 + category_id: 0 + coordinates: + - longitude + - latitude + + bt_mean_clear_ir97: + name: bt_mean_clear_ir97 + resolution: 32000 + wavelength: [9.36, 9.66, 9.96] + file_type: nc_fci_asr + file_key: bt_mean + long_name: bt_mean_clear + ir_channel_id: 4 + category_id: 1 + coordinates: + - longitude + - latitude + + bt_mean_cloudy_ir97: + name: bt_mean_cloudy_ir97 + resolution: 32000 + wavelength: [9.36, 9.66, 9.96] + file_type: nc_fci_asr + file_key: bt_mean + long_name: bt_mean_cloudy + ir_channel_id: 4 + category_id: 2 + coordinates: + - longitude + - latitude + + bt_mean_all_ir105: + name: bt_mean_all_ir105 + resolution: 32000 + wavelength: [9.8, 10.5, 11.2] + file_type: nc_fci_asr + file_key: bt_mean + long_name: bt_mean_all + ir_channel_id: 5 + category_id: 0 + coordinates: + - longitude + - latitude + + bt_mean_clear_ir105: + name: bt_mean_clear_ir105 + resolution: 32000 + wavelength: [9.8, 10.5, 11.2] + file_type: nc_fci_asr + file_key: bt_mean + long_name: bt_mean_clear + ir_channel_id: 5 + category_id: 1 + coordinates: + - longitude + - latitude + + bt_mean_cloudy_ir105: + name: bt_mean_cloudy_ir105 + resolution: 32000 + wavelength: [9.8, 10.5, 11.2] + file_type: nc_fci_asr + file_key: bt_mean + long_name: bt_mean_cloudy + ir_channel_id: 5 + category_id: 2 + coordinates: + - longitude + - latitude + + bt_mean_all_ir123: + name: bt_mean_all_ir123 + resolution: 32000 + wavelength: [11.8, 12.3, 12.8] + file_type: nc_fci_asr + file_key: bt_mean + long_name: bt_mean_all + ir_channel_id: 6 + category_id: 0 + coordinates: + - longitude + - latitude + + bt_mean_clear_ir123: + name: bt_mean_clear_ir123 + resolution: 32000 + wavelength: [11.8, 12.3, 12.8] + file_type: nc_fci_asr + file_key: bt_mean + long_name: bt_mean_clear + ir_channel_id: 6 + category_id: 1 + coordinates: + - longitude + - latitude + + bt_mean_cloudy_ir123: + name: bt_mean_cloudy_ir123 + resolution: 32000 + wavelength: [11.8, 12.3, 12.8] + file_type: nc_fci_asr + file_key: bt_mean + long_name: bt_mean_cloudy + ir_channel_id: 6 + category_id: 2 + coordinates: + - longitude + - latitude + + bt_mean_all_ir133: + name: bt_mean_all_ir133 + resolution: 32000 + wavelength: [12.7, 13.3, 13.9] + file_type: nc_fci_asr + file_key: bt_mean + long_name: bt_mean_all + ir_channel_id: 7 + category_id: 0 + coordinates: + - longitude + - latitude + + bt_mean_clear_ir133: + name: bt_mean_clear_ir133 + resolution: 32000 + wavelength: [12.7, 13.3, 13.9] + file_type: nc_fci_asr + file_key: bt_mean + long_name: bt_mean_clear + ir_channel_id: 7 + category_id: 1 + coordinates: + - longitude + - latitude + + bt_mean_cloudy_ir133: + name: bt_mean_cloudy_ir133 + resolution: 32000 + wavelength: [12.7, 13.3, 13.9] + file_type: nc_fci_asr + file_key: bt_mean + long_name: bt_mean_cloudy + ir_channel_id: 7 + category_id: 2 + coordinates: + - longitude + - latitude + + quality_reflectance_all_vis04: + name: quality_reflectance_all_vis04 + resolution: 32000 + wavelength: [0.384, 0.444, 0.504] + file_type: nc_fci_asr + file_key: quality_reflectance + long_name: quality_reflectance_all + vis_channel_id: 0 + category_id: 0 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_reflectance_clear_vis04: + name: quality_reflectance_clear_vis04 + resolution: 32000 + wavelength: [0.384, 0.444, 0.504] + file_type: nc_fci_asr + file_key: quality_reflectance + long_name: quality_reflectance_clear + vis_channel_id: 0 + category_id: 1 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_reflectance_cloudy_vis04: + name: quality_reflectance_cloudy_vis04 + resolution: 32000 + wavelength: [0.384, 0.444, 0.504] + file_type: nc_fci_asr + file_key: quality_reflectance + long_name: quality_reflectance_cloudy + vis_channel_id: 0 + category_id: 2 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_reflectance_all_vis05: + name: quality_reflectance_all_vis05 + resolution: 32000 + wavelength: [0.47, 0.51, 0.55] + file_type: nc_fci_asr + file_key: quality_reflectance + long_name: quality_reflectance_all + vis_channel_id: 1 + category_id: 0 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_reflectance_clear_vis05: + name: quality_reflectance_clear_vis05 + resolution: 32000 + wavelength: [0.47, 0.51, 0.55] + file_type: nc_fci_asr + file_key: quality_reflectance + long_name: quality_reflectance_clear + vis_channel_id: 1 + category_id: 1 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_reflectance_cloudy_vis05: + name: quality_reflectance_cloudy_vis05 + resolution: 32000 + wavelength: [0.47, 0.51, 0.55] + file_type: nc_fci_asr + file_key: quality_reflectance + long_name: quality_reflectance_cloudy + vis_channel_id: 1 + category_id: 2 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_reflectance_all_vis06: + name: quality_reflectance_all_vis06 + resolution: 32000 + wavelength: [0.59, 0.64, 0.69] + file_type: nc_fci_asr + file_key: quality_reflectance + long_name: quality_reflectance_all + vis_channel_id: 2 + category_id: 0 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_reflectance_clear_vis06: + name: quality_reflectance_clear_vis06 + resolution: 32000 + wavelength: [0.59, 0.64, 0.69] + file_type: nc_fci_asr + file_key: quality_reflectance + long_name: quality_reflectance_clear + vis_channel_id: 2 + category_id: 1 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_reflectance_cloudy_vis06: + name: quality_reflectance_cloudy_vis06 + resolution: 32000 + wavelength: [0.59, 0.64, 0.69] + file_type: nc_fci_asr + file_key: quality_reflectance + long_name: quality_reflectance_cloudy + vis_channel_id: 2 + category_id: 2 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_reflectance_all_vis08: + name: quality_reflectance_all_vis08 + resolution: 32000 + wavelength: [0.815, 0.865, 0.915] + file_type: nc_fci_asr + file_key: quality_reflectance + long_name: quality_reflectance_all + vis_channel_id: 3 + category_id: 0 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_reflectance_clear_vis08: + name: quality_reflectance_clear_vis08 + resolution: 32000 + wavelength: [0.815, 0.865, 0.915] + file_type: nc_fci_asr + file_key: quality_reflectance + long_name: quality_reflectance_clear + vis_channel_id: 3 + category_id: 1 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_reflectance_cloudy_vis08: + name: quality_reflectance_cloudy_vis08 + resolution: 32000 + wavelength: [0.815, 0.865, 0.915] + file_type: nc_fci_asr + file_key: quality_reflectance + long_name: quality_reflectance_cloudy + vis_channel_id: 3 + category_id: 2 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_reflectance_all_vis09: + name: quality_reflectance_all_vis09 + resolution: 32000 + wavelength: [0.894, 0.914, 0.934] + file_type: nc_fci_asr + file_key: quality_reflectance + long_name: quality_reflectance_all + vis_channel_id: 4 + category_id: 0 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_reflectance_clear_vis09: + name: quality_reflectance_clear_vis09 + resolution: 32000 + wavelength: [0.894, 0.914, 0.934] + file_type: nc_fci_asr + file_key: quality_reflectance + long_name: quality_reflectance_clear + vis_channel_id: 4 + category_id: 1 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_reflectance_cloudy_vis09: + name: quality_reflectance_cloudy_vis09 + resolution: 32000 + wavelength: [0.894, 0.914, 0.934] + file_type: nc_fci_asr + file_key: quality_reflectance + long_name: quality_reflectance_cloudy + vis_channel_id: 4 + category_id: 2 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_reflectance_all_nir13: + name: quality_reflectance_all_nir13 + resolution: 32000 + wavelength: [1.35, 1.38, 1.41] + file_type: nc_fci_asr + file_key: quality_reflectance + long_name: quality_reflectance_all + vis_channel_id: 5 + category_id: 0 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_reflectance_clear_nir13: + name: quality_reflectance_clear_nir13 + resolution: 32000 + wavelength: [1.35, 1.38, 1.41] + file_type: nc_fci_asr + file_key: quality_reflectance + long_name: quality_reflectance_clear + vis_channel_id: 5 + category_id: 1 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_reflectance_cloudy_nir13: + name: quality_reflectance_cloudy_nir13 + resolution: 32000 + wavelength: [1.35, 1.38, 1.41] + file_type: nc_fci_asr + file_key: quality_reflectance + long_name: quality_reflectance_cloudy + vis_channel_id: 5 + category_id: 2 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_reflectance_all_nir16: + name: quality_reflectance_all_nir16 + resolution: 32000 + wavelength: [1.56, 1.61, 1.66] + file_type: nc_fci_asr + file_key: quality_reflectance + long_name: quality_reflectance_all + vis_channel_id: 6 + category_id: 0 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_reflectance_clear_nir16: + name: quality_reflectance_clear_nir16 + resolution: 32000 + wavelength: [1.56, 1.61, 1.66] + file_type: nc_fci_asr + file_key: quality_reflectance + long_name: quality_reflectance_clear + vis_channel_id: 6 + category_id: 1 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_reflectance_cloudy_nir16: + name: quality_reflectance_cloudy_nir16 + resolution: 32000 + wavelength: [1.56, 1.61, 1.66] + file_type: nc_fci_asr + file_key: quality_reflectance + long_name: quality_reflectance_cloudy + vis_channel_id: 6 + category_id: 2 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_reflectance_all_nir22: + name: quality_reflectance_all_nir22 + resolution: 32000 + wavelength: [2.2, 2.25, 2.3] + file_type: nc_fci_asr + file_key: quality_reflectance + long_name: quality_reflectance_all + vis_channel_id: 7 + category_id: 0 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_reflectance_clear_nir22: + name: quality_reflectance_clear_nir22 + resolution: 32000 + wavelength: [2.2, 2.25, 2.3] + file_type: nc_fci_asr + file_key: quality_reflectance + long_name: quality_reflectance_clear + vis_channel_id: 7 + category_id: 1 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_reflectance_cloudy_nir22: + name: quality_reflectance_cloudy_nir22 + resolution: 32000 + wavelength: [2.2, 2.25, 2.3] + file_type: nc_fci_asr + file_key: quality_reflectance + long_name: quality_reflectance_cloudy + vis_channel_id: 7 + category_id: 2 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_bt_all_ir38: + name: quality_bt_all_ir38 + resolution: 32000 + wavelength: [3.4, 3.8, 4.2] + file_type: nc_fci_asr + file_key: quality_bt + long_name: quality_bt_all + ir_channel_id: 0 + category_id: 0 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_bt_clear_ir38: + name: quality_bt_clear_ir38 + resolution: 32000 + wavelength: [3.4, 3.8, 4.2] + file_type: nc_fci_asr + file_key: quality_bt + long_name: quality_bt_clear + ir_channel_id: 0 + category_id: 1 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_bt_cloudy_ir38: + name: quality_bt_cloudy_ir38 + resolution: 32000 + wavelength: [3.4, 3.8, 4.2] + file_type: nc_fci_asr + file_key: quality_bt + long_name: quality_bt_cloudy + ir_channel_id: 0 + category_id: 2 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_bt_all_wv63: + name: quality_bt_all_wv63 + resolution: 32000 + wavelength: [5.3, 6.3, 7.3] + file_type: nc_fci_asr + file_key: quality_bt + long_name: quality_bt_all + ir_channel_id: 1 + category_id: 0 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_bt_clear_wv63: + name: quality_bt_clear_wv63 + resolution: 32000 + wavelength: [5.3, 6.3, 7.3] + file_type: nc_fci_asr + file_key: quality_bt + long_name: quality_bt_clear + ir_channel_id: 1 + category_id: 1 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_bt_cloudy_wv63: + name: quality_bt_cloudy_wv63 + resolution: 32000 + wavelength: [5.3, 6.3, 7.3] + file_type: nc_fci_asr + file_key: quality_bt + long_name: quality_bt_cloudy + ir_channel_id: 1 + category_id: 2 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_bt_all_wv73: + name: quality_bt_all_wv73 + resolution: 32000 + wavelength: [6.85, 7.35, 7.85] + file_type: nc_fci_asr + file_key: quality_bt + long_name: quality_bt_all + ir_channel_id: 2 + category_id: 0 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_bt_clear_wv73: + name: quality_bt_clear_wv73 + resolution: 32000 + wavelength: [6.85, 7.35, 7.85] + file_type: nc_fci_asr + file_key: quality_bt + long_name: quality_bt_clear + ir_channel_id: 2 + category_id: 1 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_bt_cloudy_wv73: + name: quality_bt_cloudy_wv73 + resolution: 32000 + wavelength: [6.85, 7.35, 7.85] + file_type: nc_fci_asr + file_key: quality_bt + long_name: quality_bt_cloudy + ir_channel_id: 2 + category_id: 2 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_bt_all_ir87: + name: quality_bt_all_ir87 + resolution: 32000 + wavelength: [8.3, 8.7, 9.1] + file_type: nc_fci_asr + file_key: quality_bt + long_name: quality_bt_all + ir_channel_id: 3 + category_id: 0 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_bt_clear_ir87: + name: quality_bt_clear_ir87 + resolution: 32000 + wavelength: [8.3, 8.7, 9.1] + file_type: nc_fci_asr + file_key: quality_bt + long_name: quality_bt_clear + ir_channel_id: 3 + category_id: 1 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_bt_cloudy_ir87: + name: quality_bt_cloudy_ir87 + resolution: 32000 + wavelength: [8.3, 8.7, 9.1] + file_type: nc_fci_asr + file_key: quality_bt + long_name: quality_bt_cloudy + ir_channel_id: 3 + category_id: 2 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_bt_all_ir97: + name: quality_bt_all_ir97 + resolution: 32000 + wavelength: [9.36, 9.66, 9.96] + file_type: nc_fci_asr + file_key: quality_bt + long_name: quality_bt_all + ir_channel_id: 4 + category_id: 0 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_bt_clear_ir97: + name: quality_bt_clear_ir97 + resolution: 32000 + wavelength: [9.36, 9.66, 9.96] + file_type: nc_fci_asr + file_key: quality_bt + long_name: quality_bt_clear + ir_channel_id: 4 + category_id: 1 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_bt_cloudy_ir97: + name: quality_bt_cloudy_ir97 + resolution: 32000 + wavelength: [9.36, 9.66, 9.96] + file_type: nc_fci_asr + file_key: quality_bt + long_name: quality_bt_cloudy + ir_channel_id: 4 + category_id: 2 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_bt_all_ir105: + name: quality_bt_all_ir105 + resolution: 32000 + wavelength: [9.8, 10.5, 11.2] + file_type: nc_fci_asr + file_key: quality_bt + long_name: quality_bt_all + ir_channel_id: 5 + category_id: 0 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_bt_clear_ir105: + name: quality_bt_clear_ir105 + resolution: 32000 + wavelength: [9.8, 10.5, 11.2] + file_type: nc_fci_asr + file_key: quality_bt + long_name: quality_bt_clear + ir_channel_id: 5 + category_id: 1 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_bt_cloudy_ir105: + name: quality_bt_cloudy_ir105 + resolution: 32000 + wavelength: [9.8, 10.5, 11.2] + file_type: nc_fci_asr + file_key: quality_bt + long_name: quality_bt_cloudy + ir_channel_id: 5 + category_id: 2 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_bt_all_ir123: + name: quality_bt_all_ir123 + resolution: 32000 + wavelength: [11.8, 12.3, 12.8] + file_type: nc_fci_asr + file_key: quality_bt + long_name: quality_bt_all + ir_channel_id: 6 + category_id: 0 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_bt_clear_ir123: + name: quality_bt_clear_ir123 + resolution: 32000 + wavelength: [11.8, 12.3, 12.8] + file_type: nc_fci_asr + file_key: quality_bt + long_name: quality_bt_clear + ir_channel_id: 6 + category_id: 1 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_bt_cloudy_ir123: + name: quality_bt_cloudy_ir123 + resolution: 32000 + wavelength: [11.8, 12.3, 12.8] + file_type: nc_fci_asr + file_key: quality_bt + long_name: quality_bt_cloudy + ir_channel_id: 6 + category_id: 2 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_bt_all_ir133: + name: quality_bt_all_ir133 + resolution: 32000 + wavelength: [12.7, 13.3, 13.9] + file_type: nc_fci_asr + file_key: quality_bt + long_name: quality_bt_all + ir_channel_id: 7 + category_id: 0 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_bt_clear_ir133: + name: quality_bt_clear_ir133 + resolution: 32000 + wavelength: [12.7, 13.3, 13.9] + file_type: nc_fci_asr + file_key: quality_bt + long_name: quality_bt_clear + ir_channel_id: 7 + category_id: 1 + fill_value: -1 + coordinates: + - longitude + - latitude + + quality_bt_cloudy_ir133: + name: quality_bt_cloudy_ir133 + resolution: 32000 + wavelength: [12.7, 13.3, 13.9] + file_type: nc_fci_asr + file_key: quality_bt + long_name: quality_bt_cloudy + ir_channel_id: 7 + category_id: 2 + fill_value: -1 + coordinates: + - longitude + - latitude + + pixel_percentage_all: + name: pixel_percentage_all + resolution: 32000 + file_type: nc_fci_asr + file_key: pixel_percentage + long_name: pixel_percentage_all + category_id: 0 + coordinates: + - longitude + - latitude + + pixel_percentage_clear: + name: pixel_percentage_clear + resolution: 32000 + file_type: nc_fci_asr + file_key: pixel_percentage + long_name: pixel_percentage_clear + category_id: 1 + coordinates: + - longitude + - latitude + + pixel_percentage_cloudy: + name: pixel_percentage_cloudy + resolution: 32000 + file_type: nc_fci_asr + file_key: pixel_percentage + long_name: pixel_percentage_cloudy + category_id: 2 + coordinates: + - longitude + - latitude + + product_quality_asr: + name: product_quality_asr + file_type: nc_fci_asr + file_key: product_quality + long_name: product_quality_index + + product_completeness_asr: + name: product_completeness_asr + file_type: nc_fci_asr + file_key: product_completeness + long_name: product_completeness_index + + product_timeliness_asr: + name: product_timeliness_asr + file_type: nc_fci_asr + file_key: product_timeliness + long_name: product_timeliness_index diff --git a/satpy/etc/readers/geocat.yaml b/satpy/etc/readers/geocat.yaml index 66e80b5711..d0a44e965c 100644 --- a/satpy/etc/readers/geocat.yaml +++ b/satpy/etc/readers/geocat.yaml @@ -234,4 +234,4 @@ datasets: # radiance: # standard_name: toa_outgoing_radiance_per_unit_wavelength # units: W m-2 um-1 sr-1 - file_type: ahi_level1 \ No newline at end of file + file_type: ahi_level1 diff --git a/satpy/etc/readers/ghrsst_l2.yaml b/satpy/etc/readers/ghrsst_l2.yaml new file mode 100644 index 0000000000..0fbcf3a4b6 --- /dev/null +++ b/satpy/etc/readers/ghrsst_l2.yaml @@ -0,0 +1,97 @@ +reader: + description: NC Reader for GHRSST Level 2 data + name: ghrsst_l2 + sensors: ['slstr', 'avhrr/3', 'viirs'] + default_channels: [] + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + +file_types: + GHRSST_OSISAF: + file_reader: !!python/name:satpy.readers.ghrsst_l2.GHRSSTL2FileHandler + # S-OSI_-FRA_-NPP_-NARSST_FIELD-202010141300Z.nc + file_patterns: ['S-OSI_-{generating_centre:4s}-{satid:s}-{field_type:s}_FIELD-{valid_time:%Y%m%d%H%M}Z.nc'] + + SLSTR: + file_reader: !!python/name:satpy.readers.ghrsst_l2.GHRSSTL2FileHandler + file_patterns: ['{dt1:%Y%m%d%H%M%S}-{generating_centre:3s}-{type_id:3s}_GHRSST-SSTskin-SLSTR{something:1s}-{dt2:%Y%m%d%H%M%S}-{version}.nc', + '{mission_id:3s}_SL_{processing_level:1s}_WST____{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3.tar'] + +datasets: + # SLSTR SST and Sea Ice products + longitude_slstr: + name: longitude_slstr + resolution: 1000 + view: nadir + file_type: SLSTR + standard_name: lon + units: degree + + latitude_slstr: + name: latitude_slstr + resolution: 1000 + view: nadir + file_type: SLSTR + standard_name: lat + units: degree + + sea_surface_temperature_slstr: + name: sea_surface_temperature + sensor: slstr + coordinates: [longitude_slstr, latitude_slstr] + file_type: SLSTR + resolution: 1000 + view: nadir + units: kelvin + standard_name: sea_surface_temperature + + sea_ice_fraction_slstr: + name: sea_ice_fraction + sensor: slstr + coordinates: [longitude_slstr, latitude_slstr] + file_type: SLSTR + resolution: 1000 + view: nadir + units: "%" + standard_name: sea_ice_fraction + + # Quality estimation 0-5: no data, cloud, worst, low, acceptable, best + quality_level_slstr: + name: quality_level + sensor: slstr + coordinates: [longitude_slstr, latitude_slstr] + file_type: SLSTR + resolution: 1000 + view: nadir + standard_name: quality_level + + + # OSISAF SST: + longitude_osisaf: + name: longitude_osisaf + resolution: 2000 + file_type: GHRSST_OSISAF + standard_name: lon + units: degree + + latitude_osisaf: + name: latitude_osisaf + resolution: 2000 + file_type: GHRSST_OSISAF + standard_name: lat + units: degree + + sea_surface_temperature_osisaf: + name: sea_surface_temperature + coordinates: [longitude_osisaf, latitude_osisaf] + file_type: GHRSST_OSISAF + resolution: 2000 + units: kelvin + standard_name: sea_surface_temperature + + sea_ice_fraction_osisaf: + name: sea_ice_fraction + coordinates: [longitude_osisaf, latitude_osisaf] + file_type: GHRSST_OSISAF + resolution: 2000 + units: "%" + standard_name: sea_ice_fraction diff --git a/satpy/etc/readers/ghrsst_l3c_sst.yaml b/satpy/etc/readers/ghrsst_l3c_sst.yaml deleted file mode 100644 index fd3ada064f..0000000000 --- a/satpy/etc/readers/ghrsst_l3c_sst.yaml +++ /dev/null @@ -1,17 +0,0 @@ -reader: - description: OSISAF SST GHRSST netCDF reader - name: ghrsst_l3c_sst - reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader - sensors: [avhrr/3, viirs] - -datasets: - - sea_surface_temperature: - name: sea_surface_temperature - file_type: ghrsst_osisaf_l2 - resolution: 1000 - -file_types: - ghrsst_osisaf_l2: - file_reader: !!python/name:satpy.readers.ghrsst_l3c_sst.GHRSST_OSISAFL2 - file_patterns: ['S-OSI_-FRA_-{satid:3s}_-NARSST_FIELD-{start_time:%Y%m%d%H00}Z.nc'] diff --git a/satpy/etc/readers/iasi_l2_so2_bufr.yaml b/satpy/etc/readers/iasi_l2_so2_bufr.yaml index db8bf61169..aa8e057807 100644 --- a/satpy/etc/readers/iasi_l2_so2_bufr.yaml +++ b/satpy/etc/readers/iasi_l2_so2_bufr.yaml @@ -284,10 +284,3 @@ datasets: coordinates: [longitude, latitude] key: '#1#brightnessTemperatureRealPart' fill_value: -1.e+100 - - - - - - - diff --git a/satpy/etc/readers/jami_hrit.yaml b/satpy/etc/readers/jami_hrit.yaml index 663008034e..17f8dd24bc 100644 --- a/satpy/etc/readers/jami_hrit.yaml +++ b/satpy/etc/readers/jami_hrit.yaml @@ -121,4 +121,4 @@ datasets: brightness_temperature: standard_name: toa_brightness_temperature units: "K" - file_type: hrit_ir4 \ No newline at end of file + file_type: hrit_ir4 diff --git a/satpy/etc/readers/li_l2.yaml b/satpy/etc/readers/li_l2.yaml index 0088db19b2..f36848d6f0 100644 --- a/satpy/etc/readers/li_l2.yaml +++ b/satpy/etc/readers/li_l2.yaml @@ -64,4 +64,3 @@ file_types: li_lfl: file_reader: !!python/name:satpy.readers.li_l2.LIFileHandler file_patterns: ['{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-{processing_evel}-LFL-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day}.nc'] - diff --git a/satpy/etc/readers/maia.yaml b/satpy/etc/readers/maia.yaml index 06e7f6ae6f..3a7ff8317e 100644 --- a/satpy/etc/readers/maia.yaml +++ b/satpy/etc/readers/maia.yaml @@ -43,10 +43,10 @@ datasets: coordinates: [Longitude, Latitude ] CloudMask: - name: CloudMask + name: CloudMask file_type: maia coordinates: [Longitude, Latitude ] - + # CloudType and CloudMask are bitfields # description of sub fields ct: @@ -54,7 +54,7 @@ datasets: file_type: maia coordinates: [Longitude, Latitude ] -# Cloud Mask On Pixel +# Cloud Mask On Pixel cma: name: cma file_type: maia @@ -109,7 +109,7 @@ datasets: units: degrees celcius file_type: maia coordinates: [Longitude, Latitude ] - + Sat_zenith: name: Sat_zenith units: degrees @@ -177,4 +177,3 @@ datasets: file_type: maia coordinates: [Longitude, Latitude ] - diff --git a/satpy/etc/readers/mhs_l1c_aapp.yaml b/satpy/etc/readers/mhs_l1c_aapp.yaml new file mode 100644 index 0000000000..dee4402ee0 --- /dev/null +++ b/satpy/etc/readers/mhs_l1c_aapp.yaml @@ -0,0 +1,165 @@ +reader: + name: mhs_l1c_aapp + description: AAPP l1c Reader for MHS data + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + sensors: [mhs,] + default_channels: [] + + data_identification_keys: + name: + required: true + frequency_double_sideband: + type: !!python/name:satpy.readers.aapp_mhs_amsub_l1c.FrequencyDoubleSideBand + frequency_range: + type: !!python/name:satpy.readers.aapp_mhs_amsub_l1c.FrequencyRange + resolution: + polarization: + enum: + - H + - V + calibration: + enum: + - brightness_temperature + transitive: true + modifiers: + required: true + default: [] + type: !!python/name:satpy.dataset.ModifierTuple + +datasets: + '1': + name: '1' + frequency_range: + central: 89. + bandwidth: 2.8 + unit: GHz + polarization: 'V' + resolution: 16000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: + - longitude + - latitude + file_type: mhs_aapp_l1c + '2': + name: '2' + frequency_range: + central: 157. + bandwidth: 2.8 + unit: GHz + polarization: 'V' + resolution: 16000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: + - longitude + - latitude + file_type: mhs_aapp_l1c + '3': + name: '3' + frequency_double_sideband: + unit: GHz + central: 183.31 + side: 1.0 + bandwidth: 1.0 + polarization: 'H' + resolution: 16000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: + - longitude + - latitude + file_type: mhs_aapp_l1c + '4': + name: '4' + frequency_double_sideband: + unit: GHz + central: 183.31 + side: 3.0 + bandwidth: 2.0 + polarization: 'H' + resolution: 16000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: + - longitude + - latitude + file_type: mhs_aapp_l1c + '5': + name: '5' + frequency_range: + unit: GHz + central: 190.311 + bandwidth: 2.0 + polarization: 'V' + resolution: 16000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: + - longitude + - latitude + file_type: mhs_aapp_l1c + + solar_zenith_angle: + name: solar_zenith_angle + resolution: 16000 + coordinates: + - longitude + - latitude + file_type: mhs_aapp_l1c + standard_name: solar_zenith_angle + units: degrees + + solar_azimuth_angle: + name: solar_azimuth_angle + resolution: 16000 + coordinates: + - longitude + - latitude + file_type: mhs_aapp_l1c + standard_name: solar_azimuth_angle + units: degrees + + sensor_zenith_angle: + name: sensor_zenith_angle + resolution: 16000 + coordinates: + - longitude + - latitude + file_type: mhs_aapp_l1c + standard_name: sensor_zenith_angle + units: degrees + + sensor_azimuth_angle: + name: sensor_azimuth_angle + resolution: 16000 + coordinates: + - longitude + - latitude + file_type: mhs_aapp_l1c + standard_name: sensor_azimuth_angle + units: degrees + + latitude: + name: latitude + resolution: 16000 + file_type: mhs_aapp_l1c + standard_name: latitude + units: degrees_north + + longitude: + name: longitude + resolution: 16000 + file_type: mhs_aapp_l1c + standard_name: longitude + units: degrees_east + +file_types: + mhs_aapp_l1c: + file_reader: !!python/name:satpy.readers.aapp_mhs_amsub_l1c.MHS_AMSUB_AAPPL1CFile + file_patterns: ['mhsl1c_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit_number:05d}.l1c'] diff --git a/satpy/etc/readers/mirs.yaml b/satpy/etc/readers/mirs.yaml index 558f8a2254..eab6baa8fe 100644 --- a/satpy/etc/readers/mirs.yaml +++ b/satpy/etc/readers/mirs.yaml @@ -25,7 +25,7 @@ file_types: file_patterns: - 'IMG_SX.{platform_shortname}.D{start_time:%y%j.S%H%M}.E{end_time:%H%M}.B{num}.WE.HR.ORB.nc' -datasets: +datasets: longitude: name: longitude file_type: metop_amsu diff --git a/satpy/etc/readers/modis_l2.yaml b/satpy/etc/readers/modis_l2.yaml index 5a900f03e0..22c15a1b48 100644 --- a/satpy/etc/readers/modis_l2.yaml +++ b/satpy/etc/readers/modis_l2.yaml @@ -799,7 +799,7 @@ datasets: resolution: 1000: file_key: Cloud_Water_Path_1621_PCL - + cloud_water_path_16: name: cloud_water_path_16 long_name: "Column Water Path two-band retrieval using band 6 and either band 1, 2, or 5 (specified in Quality_Assurance_1km)from best points: not failed in any way, not marked for clear sky restoral" @@ -819,7 +819,7 @@ datasets: resolution: 1000: file_key: Cloud_Water_Path_16_PCL - + cloud_water_path_37: name: cloud_water_path_37 long_name: "Column Water Path two-band retrieval using band 20 and either band 1, 2, or 5 (specified in Quality_Assurance_1km)from best points: not failed in any way, not marked for clear sky restoral" @@ -839,7 +839,7 @@ datasets: resolution: 1000: file_key: Cloud_Water_Path_37_PCL - + cloud_effective_radius_uncertainty: name: cloud_effective_radius_uncertainty long_name: Cloud Effective Particle Radius (from band 7) Relative Uncertainty (Percent)from both best points and points identified as cloud edge at 1km resolution or partly cloudy at 250m diff --git a/satpy/etc/readers/msu_gsa_l1b.yaml b/satpy/etc/readers/msu_gsa_l1b.yaml new file mode 100644 index 0000000000..8ae74e2af4 --- /dev/null +++ b/satpy/etc/readers/msu_gsa_l1b.yaml @@ -0,0 +1,219 @@ +reader: + description: H5 reader for MSG-GS/A data + name: msu_gsa_l1b + sensors: [msu_gsa] + default_channels: [] + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + + +file_types: + msu_gsa_l1b: + file_reader: !!python/name:satpy.readers.msu_gsa_l1b.MSUGSAFileHandler + file_patterns: ['ArcticaM{mission_id:1s}_{start_time:%Y%m%d%H%M}.h5'] + +datasets: + longitude: + name: longitude + units: degrees_east + standard_name: longitude + resolution: + 4000: + file_type: msu_gsa_l1b + file_key: Geolocation/resolution_4km/Longitude + 1000: + file_type: msu_gsa_l1b + file_key: Geolocation/resolution_1km/Longitude + latitude: + name: latitude + units: degrees_north + standard_name: latitude + resolution: + 4000: + file_type: msu_gsa_l1b + file_key: Geolocation/resolution_4km/Latitude + 1000: + file_type: msu_gsa_l1b + file_key: Geolocation/resolution_1km/Latitude + + # The channels C01-C03 (VIS) are available at 1km resolution + C01: + name: C01 + sensor: msu_gsa + wavelength: [0.5, 0.6, 0.65] + resolution: 1000 + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance + units: W m-2 sr-1 + coordinates: [longitude, latitude] + file_type: msu_gsa_l1b + file_key: Data/resolution_1km/Radiance_01 + C02: + name: C02 + sensor: msu_gsa + wavelength: [0.65, 0.7, 0.8] + resolution: 1000 + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance + units: W m-2 sr-1 + coordinates: [longitude, latitude] + file_type: msu_gsa_l1b + file_key: Data/resolution_1km/Radiance_02 + C03: + name: C03 + sensor: msu_gsa + wavelength: [0.8, 0.9, 0.9] + resolution: 1000 + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance + units: W m-2 sr-1 + coordinates: [longitude, latitude] + file_type: msu_gsa_l1b + file_key: Data/resolution_1km/Radiance_03 + + # The channels C04-C10 (IR) are available at 4km resolution + C04: + name: C04 + sensor: msu_gsa + wavelength: [3.5, 3.8, 4.0] + resolution: 4000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + units: K + coordinates: [longitude, latitude] + file_type: msu_gsa_l1b + file_key: Data/resolution_4km/Brightness_Temperature_04 + C05: + name: C05 + sensor: msu_gsa + wavelength: [5.7, 6.4, 7.0] + resolution: 4000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + units: K + coordinates: [longitude, latitude] + file_type: msu_gsa_l1b + file_key: Data/resolution_4km/Brightness_Temperature_05 + C06: + name: C06 + sensor: msu_gsa + wavelength: [7.5, 8.0, 8.5] + resolution: 4000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + units: K + coordinates: [longitude, latitude] + file_type: msu_gsa_l1b + file_key: Data/resolution_4km/Brightness_Temperature_06 + C07: + name: C07 + sensor: msu_gsa + wavelength: [8.2, 8.7, 9.2] + resolution: 4000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + units: K + coordinates: [longitude, latitude] + file_type: msu_gsa_l1b + file_key: Data/resolution_4km/Brightness_Temperature_07 + C08: + name: C08 + sensor: msu_gsa + wavelength: [9.2, 9.7, 10.2] + resolution: 4000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + units: K + coordinates: [longitude, latitude] + file_type: msu_gsa_l1b + file_key: Data/resolution_4km/Brightness_Temperature_08 + C09: + name: C09 + sensor: msu_gsa + wavelength: [10.2, 10.8, 11.2] + resolution: 4000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + units: K + coordinates: [longitude, latitude] + file_type: msu_gsa_l1b + file_key: Data/resolution_4km/Brightness_Temperature_09 + C10: + name: C10 + sensor: msu_gsa + wavelength: [11.2, 11.9, 12.5] + resolution: 4000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + units: K + coordinates: [longitude, latitude] + file_type: msu_gsa_l1b + file_key: Data/resolution_4km/Brightness_Temperature_10 + + # The solar and viewing geometry is available at both resolutions + solar_zenith_angle: + name: solar_zenith_angle + units: degrees + standard_name: solar_zenith_angle + resolution: + 4000: + file_type: msu_gsa_l1b + file_key: Data/resolution_4km/Solar_Zenith_Angle + 1000: + file_type: msu_gsa_l1b + file_key: Data/resolution_1km/Solar_Zenith_Angle + coordinates: [longitude, latitude] + solar_azimuth_angle: + name: solar_azimuth_angle + units: degrees + standard_name: solar_azimuth_angle + resolution: + 4000: + file_type: msu_gsa_l1b + file_key: Data/resolution_4km/Solar_Azimuth_Angle + 1000: + file_type: msu_gsa_l1b + file_key: Data/resolution_1km/Solar_Azimuth_Angle + coordinates: [longitude, latitude] + satellite_zenith_angle: + name: satellite_zenith_angle + units: degrees + standard_name: satellite_zenith_angle + resolution: + 4000: + file_type: msu_gsa_l1b + file_key: Data/resolution_4km/Satellite_Zenith_Angle + 1000: + file_type: msu_gsa_l1b + file_key: Data/resolution_1km/Satellite_Zenith_Angle + coordinates: [longitude, latitude] + satellite_azimuth_angle: + name: satellite_azimuth_angle + units: degrees + standard_name: satellite_azimuth_angle + resolution: + 4000: + file_type: msu_gsa_l1b + file_key: Data/resolution_4km/Satellite_Azimuth_Angle + 1000: + file_type: msu_gsa_l1b + file_key: Data/resolution_1km/Satellite_Azimuth_Angle + coordinates: [longitude, latitude] diff --git a/satpy/etc/readers/nwcsaf-msg2013-hdf5.yaml b/satpy/etc/readers/nwcsaf-msg2013-hdf5.yaml index 3c431ad335..da064f8912 100644 --- a/satpy/etc/readers/nwcsaf-msg2013-hdf5.yaml +++ b/satpy/etc/readers/nwcsaf-msg2013-hdf5.yaml @@ -187,4 +187,3 @@ datasets: resolution: 3000 file_type: h5_nwcsaf_ctth file_key: CTTH_QUALITY - diff --git a/satpy/etc/readers/nwcsaf-pps_nc.yaml b/satpy/etc/readers/nwcsaf-pps_nc.yaml index 7a416972b5..a241922c92 100644 --- a/satpy/etc/readers/nwcsaf-pps_nc.yaml +++ b/satpy/etc/readers/nwcsaf-pps_nc.yaml @@ -35,8 +35,12 @@ file_types: nc_nwcsaf_cpp: file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF file_patterns: ['S_NWC_CPP_{platform_id}_{orbit_number}_{start_time:%Y%m%dT%H%M%S%f}Z_{end_time:%Y%m%dT%H%M%S%f}Z.nc'] + file_key_prefix: cpp_ - + nc_nwcsaf_cmic: + file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF + file_patterns: ['S_NWC_CMIC_{platform_id}_{orbit_number}_{start_time:%Y%m%dT%H%M%S%f}Z_{end_time:%Y%m%dT%H%M%S%f}Z.nc'] + file_key_prefix: cmic_ datasets: @@ -214,63 +218,93 @@ datasets: file_type: nc_nwcsaf_ctth -# ---- CPP products ------------ +# ---- CMIC products (Was CPP in PPS<=2018)------------ + + cmic_phase: + name: cmic_phase + file_key: phase + file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] + coordinates: [lon, lat] + + cmic_phase_pal: + name: [cmic_phase_pal, cpp_phase_pal] + file_key: phase_pal + file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] - cpp_phase: - name: cpp_phase - file_type: nc_nwcsaf_cpp + cmic_reff: + name: cmic_reff + file_key: [cre, reff] + file_type: [nc_nwcsaf_cmic, nc_nwcsaf_cpp] coordinates: [lon, lat] - cpp_phase_pal: - name: cpp_phase_pal - file_type: nc_nwcsaf_cpp + cmic_reff_pal: + name: [cmic_reff_pal, cmic_cre_pal, cpp_reff_pal] + file_key: [cre_pal, reff_pal] + scale_offset_dataset: [reff, cre] + file_type: [nc_nwcsaf_cmic, nc_nwcsaf_cpp] - cpp_reff: - name: cpp_reff - file_type: nc_nwcsaf_cpp + cmic_cot: + name: cmic_cot + file_key: cot + file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] coordinates: [lon, lat] - cpp_reff_pal: - name: cpp_reff_pal - scale_offset_dataset: cpp_reff - file_type: nc_nwcsaf_cpp + cmic_cot_pal: + name: [cmic_cot_pal, cpp_cot_pal] + file_key: cot_pal + scale_offset_dataset: cot + file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] - cpp_cot: - name: cpp_cot - file_type: nc_nwcsaf_cpp + cmic_cwp: + name: cmic_cwp + file_key: cwp + file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] coordinates: [lon, lat] - cpp_cot_pal: - name: cpp_cot_pal - scale_offset_dataset: cpp_cot - file_type: nc_nwcsaf_cpp + cmic_cwp_pal: + name: [cmic_cwp_pal, cpp_cwp_pal] + file_key: cwp_pal + scale_offset_dataset: cwp + file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] - cpp_cwp: - name: cpp_cwp - file_type: nc_nwcsaf_cpp + cmic_iwp: + name: cmic_iwp + file_key: iwp + file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] coordinates: [lon, lat] - cpp_cwp_pal: - name: cpp_cwp_pal - scale_offset_dataset: cpp_cwp - file_type: nc_nwcsaf_cpp + cmic_iwp_pal: + name: [cmic_iwp_pal, cpp_iwp_pal] + file_key: iwp_pal + scale_offset_dataset: iwp + file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] - cpp_iwp: - name: cpp_iwp - file_type: nc_nwcsaf_cpp + cmic_lwp: + name: cmic_lwp + file_key: lwp + file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] coordinates: [lon, lat] - cpp_iwp_pal: - name: cpp_iwp_pal - scale_offset_dataset: cpp_iwp - file_type: nc_nwcsaf_cpp + cmic_lwp_pal: + name: [cmic_lwp_pal, cpp_lwp_pal] + file_key: lwp_pal + scale_offset_dataset: lwp + file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] + + cmic_status_flag: + name: [cmic_status_flag, cpp_status_flag] + file_key: status_flag + file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] + coordinates: [lon, lat] - cpp_lwp: - name: cpp_lwp - file_type: nc_nwcsaf_cpp + cmic_conditions: + name: [cmic_conditions, cpp_conditions] + file_key: conditions + file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] coordinates: [lon, lat] - cpp_lwp_pal: - name: cpp_lwp_pal - scale_offset_dataset: cpp_lwp - file_type: nc_nwcsaf_cpp + cmic_quality: + name: [cmic_quality, cpp_quality] + file_key: quality + file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic] + coordinates: [lon, lat] diff --git a/satpy/etc/readers/omps_edr.yaml b/satpy/etc/readers/omps_edr.yaml index d872a966db..26efaeba04 100644 --- a/satpy/etc/readers/omps_edr.yaml +++ b/satpy/etc/readers/omps_edr.yaml @@ -240,4 +240,3 @@ datasets: coordinates: [longitude_sampo, latitude_sampo] file_type: omps_sampo file_key: SCIENCE_DATA/CloudFraction - diff --git a/satpy/etc/readers/seadas_l2.yaml b/satpy/etc/readers/seadas_l2.yaml new file mode 100644 index 0000000000..f1050a50c3 --- /dev/null +++ b/satpy/etc/readers/seadas_l2.yaml @@ -0,0 +1,51 @@ +reader: + description: MODIS and VIIRS SEADAS Reader + name: seadas_l2 + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + sensors: [modis, viirs] + +file_types: + chlora_seadas: + file_patterns: + # IMAPP-style filenames: + - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.seadas.hdf' + file_reader: !!python/name:satpy.readers.seadas_l2.SEADASL2HDFFileHandler + geo_resolution: 1000 + chlora_seadas_viirs: + # SEADAS_npp_d20211118_t1728125_e1739327.hdf + file_patterns: + - 'SEADAS_{platform_indicator:s}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}.hdf' + file_reader: !!python/name:satpy.readers.seadas_l2.SEADASL2HDFFileHandler + geo_resolution: 750 + +datasets: + longitude: + name: longitude + file_type: [chlora_seadas, seadas_hdf_viirs] + file_key: longitude + resolution: + 1000: + file_type: chlora_seadas + 750: + file_type: chlora_seadas_viirs + + latitude: + name: latitude + file_type: [chlora_seadas, seadas_hdf_viirs] + file_key: latitude + resolution: + 1000: + file_type: chlora_seadas + 750: + file_type: chlora_seadas_viirs + + chlor_a: + name: chlor_a + file_type: [chlora_seadas, seadas_hdf_viirs] + file_key: chlor_a + resolution: + 1000: + file_type: chlora_seadas + 750: + file_type: chlora_seadas_viirs + coordinates: [longitude, latitude] diff --git a/satpy/etc/readers/seviri_l1b_hrit.yaml b/satpy/etc/readers/seviri_l1b_hrit.yaml index e26a5a08ec..43fe31e12e 100644 --- a/satpy/etc/readers/seviri_l1b_hrit.yaml +++ b/satpy/etc/readers/seviri_l1b_hrit.yaml @@ -16,73 +16,73 @@ reader: file_types: HRIT_HRV: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler - file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-HRV______-{segment:06d}___-{start_time:%Y%m%d%H%M}-__'] + file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-HRV______-{segment:06d}___-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-HRV______-{segment:06d}___-{start_time:%Y%m%d%H%M}-__.bz2'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 24 HRIT_IR_016: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler - file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_016___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__'] + file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_016___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_016___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__.bz2'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_039: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler - file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_039___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__'] + file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_039___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_039___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__.bz2'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_087: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler - file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_087___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__'] + file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_087___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_087___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__.bz2'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_097: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler - file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_097___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__'] + file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_097___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_097___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__.bz2'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_108: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler - file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_108___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__'] + file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_108___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_108___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__.bz2'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_120: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler - file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_120___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__'] + file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_120___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_120___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__.bz2'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_IR_134: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler - file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_134___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__'] + file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_134___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_134___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__.bz2'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_VIS006: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler - file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-VIS006___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__'] + file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-VIS006___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-VIS006___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__.bz2'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_VIS008: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler - file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-VIS008___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__'] + file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-VIS008___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-VIS008___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__.bz2'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_WV_062: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler - file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-WV_062___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__'] + file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-WV_062___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-WV_062___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__.bz2'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 HRIT_WV_073: file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler - file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-WV_073___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__'] + file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-WV_073___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-WV_073___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__.bz2'] requires: [HRIT_PRO, HRIT_EPI] expected_segments: 8 diff --git a/satpy/etc/readers/seviri_l1b_native.yaml b/satpy/etc/readers/seviri_l1b_native.yaml index d08c86b0e5..896ef4c245 100644 --- a/satpy/etc/readers/seviri_l1b_native.yaml +++ b/satpy/etc/readers/seviri_l1b_native.yaml @@ -31,8 +31,8 @@ datasets: standard_name: toa_bidirectional_reflectance units: "%" radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count @@ -47,8 +47,8 @@ datasets: standard_name: toa_bidirectional_reflectance units: "%" radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count @@ -63,8 +63,8 @@ datasets: standard_name: toa_brightness_temperature units: K radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count @@ -79,8 +79,8 @@ datasets: standard_name: toa_brightness_temperature units: K radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count @@ -95,8 +95,8 @@ datasets: standard_name: toa_brightness_temperature units: K radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count @@ -111,8 +111,8 @@ datasets: standard_name: toa_brightness_temperature units: K radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count @@ -127,8 +127,8 @@ datasets: standard_name: toa_brightness_temperature units: K radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count @@ -143,8 +143,8 @@ datasets: standard_name: toa_brightness_temperature units: K radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count @@ -159,8 +159,8 @@ datasets: standard_name: toa_bidirectional_reflectance units: "%" radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count @@ -175,8 +175,8 @@ datasets: standard_name: toa_bidirectional_reflectance units: "%" radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count @@ -191,8 +191,8 @@ datasets: standard_name: toa_brightness_temperature units: "K" radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count @@ -207,8 +207,8 @@ datasets: standard_name: toa_brightness_temperature units: "K" radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count diff --git a/satpy/etc/readers/seviri_l1b_nc.yaml b/satpy/etc/readers/seviri_l1b_nc.yaml index aeacf83e56..20d07e0665 100644 --- a/satpy/etc/readers/seviri_l1b_nc.yaml +++ b/satpy/etc/readers/seviri_l1b_nc.yaml @@ -25,8 +25,8 @@ datasets: standard_name: toa_bidirectional_reflectance units: "%" radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count @@ -42,8 +42,8 @@ datasets: standard_name: toa_bidirectional_reflectance units: "%" radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count @@ -59,8 +59,8 @@ datasets: standard_name: toa_brightness_temperature units: K radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count @@ -76,8 +76,8 @@ datasets: standard_name: toa_brightness_temperature units: K radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count @@ -93,8 +93,8 @@ datasets: standard_name: toa_brightness_temperature units: K radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count @@ -110,8 +110,8 @@ datasets: standard_name: toa_brightness_temperature units: K radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count @@ -127,8 +127,8 @@ datasets: standard_name: toa_brightness_temperature units: K radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count @@ -144,8 +144,8 @@ datasets: standard_name: toa_brightness_temperature units: K radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count @@ -161,8 +161,8 @@ datasets: standard_name: toa_bidirectional_reflectance units: "%" radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count @@ -179,8 +179,8 @@ datasets: standard_name: toa_bidirectional_reflectance units: "%" radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count @@ -196,8 +196,8 @@ datasets: standard_name: toa_brightness_temperature units: "K" radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count @@ -213,12 +213,10 @@ datasets: standard_name: toa_brightness_temperature units: "K" radiance: - standard_name: toa_outgoing_radiance_per_unit_wavelength - units: W m-2 um-1 sr-1 + standard_name: toa_outgoing_radiance_per_unit_wavenumber + units: mW m-2 sr-1 (cm-1)-1 counts: standard_name: counts units: count file_type: seviri_l1b_nc nc_key: 'ch6' - - diff --git a/satpy/etc/readers/seviri_l2_bufr.yaml b/satpy/etc/readers/seviri_l2_bufr.yaml index badda828dd..9b115d3ef6 100644 --- a/satpy/etc/readers/seviri_l2_bufr.yaml +++ b/satpy/etc/readers/seviri_l2_bufr.yaml @@ -3,7 +3,7 @@ reader: name: seviri_l2_bufr sensors: [seviri] default_channels: [] - reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + reader: !!python/name:satpy.readers.yaml_reader.GEOFlippableFileYAMLReader file_types: seviri_l2_bufr_asr: @@ -52,7 +52,7 @@ datasets: latitude: name: latitude key: 'latitude' - resolution: [48000,9000] + resolution: [48006.450653072,9001.209497451] file_type: [seviri_l2_bufr_asr,seviri_l2_bufr_cla,seviri_l2_bufr_csr,seviri_l2_bufr_gii,seviri_l2_bufr_thu,seviri_l2_bufr_toz] standard_name: latitude units: degree_north @@ -61,7 +61,7 @@ datasets: longitude: name: longitude key: 'longitude' - resolution: [48000,9000] + resolution: [48006.450653072,9001.209497451] file_type: [seviri_l2_bufr_asr,seviri_l2_bufr_cla,seviri_l2_bufr_csr,seviri_l2_bufr_gii,seviri_l2_bufr_thu,seviri_l2_bufr_toz] standard_name: longitude units: degree_east @@ -71,7 +71,8 @@ datasets: nir39all: name: nir39all key: '#19#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [ 3.48, 3.92, 4.36 ] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -83,7 +84,8 @@ datasets: nir39clr: name: nir39clr key: '#20#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [ 3.48, 3.92, 4.36 ] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -95,7 +97,8 @@ datasets: nir39cld: name: nir39cld key: '#21#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [ 3.48, 3.92, 4.36 ] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -107,7 +110,8 @@ datasets: nir39low: name: nir39low key: '#22#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [ 3.48, 3.92, 4.36 ] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -119,7 +123,8 @@ datasets: nir39med: name: nir39med key: '#23#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [ 3.48, 3.92, 4.36 ] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -131,7 +136,8 @@ datasets: nir39high: name: nir39high key: '#24#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [ 3.48, 3.92, 4.36 ] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -143,7 +149,8 @@ datasets: wv62all: name: wv62all key: '#25#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [5.35, 6.25, 7.15] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -155,7 +162,8 @@ datasets: wv62clr: name: wv62clr key: '#26#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [5.35, 6.25, 7.15] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -167,7 +175,8 @@ datasets: wv62cld: name: wv62cld key: '#27#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [5.35, 6.25, 7.15] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -179,7 +188,8 @@ datasets: wv62low: name: wv62low key: '#28#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [5.35, 6.25, 7.15] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -191,7 +201,8 @@ datasets: wv62med: name: wv62med key: '#29#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [5.35, 6.25, 7.15] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -203,7 +214,8 @@ datasets: wv62high: name: wv62high key: '#30#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [5.35, 6.25, 7.15] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -215,7 +227,8 @@ datasets: wv73all: name: wv73all key: '#31#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [6.85, 7.35, 7.85] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -227,7 +240,8 @@ datasets: wv73clr: name: wv73clr key: '#32#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [6.85, 7.35, 7.85] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -239,7 +253,8 @@ datasets: wv73cld: name: wv73cld key: '#33#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [6.85, 7.35, 7.85] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -251,7 +266,8 @@ datasets: wv73low: name: wv73low key: '#34#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [6.85, 7.35, 7.85] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -263,7 +279,8 @@ datasets: wv73med: name: wv73med key: '#35#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [6.85, 7.35, 7.85] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -275,7 +292,8 @@ datasets: wv73high: name: wv73high key: '#36#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [6.85, 7.35, 7.85] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -287,7 +305,8 @@ datasets: ir87all: name: ir87all key: '#37#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [8.3, 8.7, 9.1] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -299,7 +318,8 @@ datasets: ir87clr: name: ir87clr key: '#38#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [8.3, 8.7, 9.1] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -311,7 +331,8 @@ datasets: ir87cld: name: ir87cld key: '#39#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [8.3, 8.7, 9.1] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -323,7 +344,8 @@ datasets: ir87low: name: ir87low key: '#40#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [8.3, 8.7, 9.1] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -335,7 +357,8 @@ datasets: ir87med: name: ir87med key: '#41#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [8.3, 8.7, 9.1] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -347,7 +370,8 @@ datasets: ir87high: name: ir87high key: '#42#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [8.3, 8.7, 9.1] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -359,7 +383,8 @@ datasets: ir97all: name: ir97all key: '#43#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [9.38, 9.66, 9.94] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -371,7 +396,8 @@ datasets: ir97clr: name: ir97clr key: '#44#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [9.38, 9.66, 9.94] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -383,7 +409,8 @@ datasets: ir97cld: name: ir97cld key: '#45#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [9.38, 9.66, 9.94] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -395,7 +422,8 @@ datasets: ir97low: name: ir97low key: '#46#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [9.38, 9.66, 9.94] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -407,7 +435,8 @@ datasets: ir97med: name: ir97med key: '#47#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [9.38, 9.66, 9.94] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -419,7 +448,8 @@ datasets: ir97high: name: ir97high key: '#48#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [9.38, 9.66, 9.94] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -431,7 +461,8 @@ datasets: ir108all: name: ir108all key: '#49#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [9.8, 10.8, 11.8] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -443,7 +474,8 @@ datasets: ir108clr: name: ir108clr key: '#50#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [9.8, 10.8, 11.8] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -455,7 +487,8 @@ datasets: ir108cld: name: ir108cld key: '#51#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [9.8, 10.8, 11.8] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -467,7 +500,8 @@ datasets: ir108low: name: ir108low key: '#52#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [9.8, 10.8, 11.8] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -479,7 +513,8 @@ datasets: ir108med: name: ir108med key: '#53#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [9.8, 10.8, 11.8] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -491,7 +526,8 @@ datasets: ir108high: name: ir108high key: '#54#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [9.8, 10.8, 11.8] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -503,7 +539,8 @@ datasets: ir120all: name: ir120all key: '#55#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [11.0, 12.0, 13.0] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -515,7 +552,8 @@ datasets: ir120clr: name: ir120clr key: '#56#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [11.0, 12.0, 13.0] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -527,7 +565,8 @@ datasets: ir120cld: name: ir120cld key: '#57#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [11.0, 12.0, 13.0] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -539,7 +578,8 @@ datasets: ir120low: name: ir120low key: '#58#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [11.0, 12.0, 13.0] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -551,7 +591,8 @@ datasets: ir120med: name: ir120med key: '#59#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [11.0, 12.0, 13.0] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -563,7 +604,8 @@ datasets: ir120high: name: ir120high key: '#60#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [11.0, 12.0, 13.0] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -575,7 +617,8 @@ datasets: ir134all: name: ir134all key: '#61#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [12.4, 13.4, 14.4] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -587,7 +630,8 @@ datasets: ir134clr: name: ir134clr key: '#62#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [12.4, 13.4, 14.4] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -599,7 +643,8 @@ datasets: ir134cld: name: ir134cld key: '#63#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [12.4, 13.4, 14.4] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -611,7 +656,8 @@ datasets: ir134low: name: ir134low key: '#64#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [12.4, 13.4, 14.4] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -623,7 +669,8 @@ datasets: ir134med: name: ir134med key: '#65#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [12.4, 13.4, 14.4] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -635,7 +682,8 @@ datasets: ir134high: name: ir134high key: '#66#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [12.4, 13.4, 14.4] standard_name: toa_brightness_temperature units: K file_type: seviri_l2_bufr_asr @@ -647,7 +695,7 @@ datasets: pcld: name: pcld key: '#1#cloudAmountInSegment' - resolution: 48000 + resolution: 48006.450653072 standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_asr @@ -659,7 +707,7 @@ datasets: pclr: name: pclr key: '#1#amountSegmentCloudFree' - resolution: 48000 + resolution: 48006.450653072 standard_name: clear_sky_area_fraction units: '%' file_type: seviri_l2_bufr_asr @@ -671,7 +719,7 @@ datasets: pclrs: name: pclrs key: '#2#amountSegmentCloudFree' - resolution: 48000 + resolution: 48006.450653072 standard_name: clar_sky_area_fraction units: '%' file_type: seviri_l2_bufr_asr @@ -684,7 +732,7 @@ datasets: hca: name: hca key: '#1#amountOfHighClouds' - resolution: 48000 + resolution: 48006.450653072 standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_cla @@ -696,7 +744,7 @@ datasets: lca: name: lca key: '#1#amountOfLowClouds' - resolution: 48000 + resolution: 48006.450653072 standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_cla @@ -708,7 +756,7 @@ datasets: mca: name: mca key: '#1#amountOfMiddleClouds' - resolution: 48000 + resolution: 48006.450653072 standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_cla @@ -720,7 +768,7 @@ datasets: tca: name: tca key: '#1#cloudAmountInSegment' - resolution: 48000 + resolution: 48006.450653072 standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_cla @@ -733,7 +781,8 @@ datasets: nir39: name: nir39 key: '#4#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [3.48, 3.92, 4.36] standard_name: toa_brightness_temperature units: "W/sr-1/m-2" file_type: seviri_l2_bufr_csr @@ -745,7 +794,8 @@ datasets: cld39: name: cld39 key: '#4#cloudAmountInSegment' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [3.48, 3.92, 4.36] standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_csr @@ -757,7 +807,8 @@ datasets: wv62: name: wv62 key: '#5#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [5.35, 6.25, 7.15] standard_name: toa_brightness_temperature units: "W/sr-1/m-2" file_type: seviri_l2_bufr_csr @@ -769,7 +820,8 @@ datasets: cld62: name: cld62 key: '#5#cloudAmountInSegment' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [5.35, 6.25, 7.15] standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_csr @@ -781,7 +833,8 @@ datasets: wv73: name: wv73 key: '#6#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [6.85, 7.35, 7.85] standard_name: toa_brightness_temperature units: "W/sr-1/m-2" file_type: seviri_l2_bufr_csr @@ -793,7 +846,8 @@ datasets: cld73: name: cld73 key: '#6#cloudAmountInSegment' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [6.85, 7.35, 7.85] standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_csr @@ -805,7 +859,8 @@ datasets: ir87: name: ir87 key: '#7#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [8.3, 8.7, 9.1] standard_name: toa_brightness_temperature units: "W/sr-1/m-2" file_type: seviri_l2_bufr_csr @@ -817,7 +872,8 @@ datasets: cld87: name: cld87 key: '#7#cloudAmountInSegment' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [8.3, 8.7, 9.1] standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_csr @@ -829,7 +885,8 @@ datasets: ir97: name: ir97 key: '#8#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [9.38, 9.66, 9.94] standard_name: toa_brightness_temperature units: "W/sr-1/m-2" file_type: seviri_l2_bufr_csr @@ -841,7 +898,8 @@ datasets: cld97: name: cld97 key: '#8#cloudAmountInSegment' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [9.38, 9.66, 9.94] standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_csr @@ -853,7 +911,8 @@ datasets: ir108: name: ir108 key: '#9#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [9.8, 10.8, 11.8] standard_name: toa_brightness_temperature units: "W/sr-1/m-2" file_type: seviri_l2_bufr_csr @@ -865,7 +924,8 @@ datasets: cld108: name: cld108 key: '#9#cloudAmountInSegment' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [9.8, 10.8, 11.8] standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_csr @@ -877,7 +937,8 @@ datasets: ir120: name: ir120 key: '#10#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [11.0, 12.0, 13.0] standard_name: toa_brightness_temperature units: "W/sr-1/m-2" file_type: seviri_l2_bufr_csr @@ -889,7 +950,8 @@ datasets: cld120: name: cld120 key: '#10#cloudAmountInSegment' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [11.0, 12.0, 13.0] standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_csr @@ -901,7 +963,8 @@ datasets: ir134: name: ir134 key: '#11#brightnessTemperature' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [12.4, 13.4, 14.4] standard_name: toa_brightness_temperature units: "W/sr-1/m-2" file_type: seviri_l2_bufr_csr @@ -913,7 +976,8 @@ datasets: cld134: name: cld134 key: '#11#cloudAmountInSegment' - resolution: 48000 + resolution: 48006.450653072 + wavelength: [12.4, 13.4, 14.4] standard_name: cloud_area_fraction units: '%' file_type: seviri_l2_bufr_csr @@ -927,7 +991,7 @@ datasets: ki: name: ki key: '#1#kIndex' - resolution: 9000 + resolution: 9001.209497451 standard_name: atmosphere_stability_k_index coordinates: - longitude @@ -939,7 +1003,7 @@ datasets: ko: name: ko key: '#1#koIndex' - resolution: 9000 + resolution: 9001.209497451 standard_name: atmosphere_stability_ko_index coordinates: - longitude @@ -951,7 +1015,7 @@ datasets: li: name: li key: '#1#parcelLiftedIndexTo500Hpa' - resolution: 9000 + resolution: 9001.209497451 standard_name: atmosphere_stability_lifted_index coordinates: - longitude @@ -963,7 +1027,7 @@ datasets: lpw1: name: lpw1 key: '#2#precipitableWater' - resolution: 9000 + resolution: 9001.209497451 standard_name: lwe_thickness_of_precipitation_amount coordinates: - longitude @@ -975,7 +1039,7 @@ datasets: lpw2: name: lpw2 key: '#3#precipitableWater' - resolution: 9000 + resolution: 9001.209497451 standard_name: lwe_thickness_of_precipitation_amount coordinates: - longitude @@ -987,7 +1051,7 @@ datasets: lpw3: name: lpw3 key: '#4#precipitableWater' - resolution: 9000 + resolution: 9001.209497451 standard_name: lwe_thickness_of_precipitation_amount coordinates: - longitude @@ -999,7 +1063,7 @@ datasets: mb: name: mb key: '#1#maximumBuoyancy' - resolution: 9000 + resolution: 9001.209497451 standard_name: atmosphere_stability_maximum_bouyancy_index coordinates: - longitude @@ -1011,7 +1075,7 @@ datasets: stza: name: stza key: '#1#satelliteZenithAngle' - resolution: 9000 + resolution: 9001.209497451 standard_name: sensor_zenith_angle coordinates: - longitude @@ -1023,7 +1087,7 @@ datasets: tpw: name: tpw key: '#1#precipitableWater' - resolution: 9000 + resolution: 9001.209497451 standard_name: lwe_thickness_of_precipitation_amount coordinates: - longitude @@ -1036,7 +1100,7 @@ datasets: thu62: name: thu62 key: '#1#relativeHumidity' - resolution: 48000 + resolution: 48006.450653072 standard_name: relative_humidity units: '%' file_type: seviri_l2_bufr_thu @@ -1048,7 +1112,7 @@ datasets: thu73: name: thu73 key: '#2#relativeHumidity' - resolution: 48000 + resolution: 48006.450653072 standard_name: relative_humidity units: '%' file_type: seviri_l2_bufr_thu @@ -1061,7 +1125,7 @@ datasets: toz: name: toz key: '#1#totalOzone' - resolution: 9000 + resolution: 9001.209497451 standard_name: atmosphere_mass_content_of_ozone units: dobson file_type: seviri_l2_bufr_toz @@ -1073,7 +1137,7 @@ datasets: qual: name: qual key: '#1#totalOzone->totalOzoneQuality' - resolution: 9000 + resolution: 9001.209497451 standard_name: total_ozone_quality units: "" file_type: seviri_l2_bufr_toz diff --git a/satpy/etc/readers/seviri_l2_grib.yaml b/satpy/etc/readers/seviri_l2_grib.yaml index 3995f60fd8..dd82b02965 100644 --- a/satpy/etc/readers/seviri_l2_grib.yaml +++ b/satpy/etc/readers/seviri_l2_grib.yaml @@ -4,7 +4,8 @@ reader: long_name: MSG SEVIRI L2 (GRIB) description: Reader for EUMETSAT MSG SEVIRI L2 files in GRIB format. sensors: [seviri] - reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + reader: !!python/name:satpy.readers.yaml_reader.GEOFlippableFileYAMLReader + file_types: # EUMETSAT MSG SEVIRI L2 Cloud Mask files in GRIB format @@ -70,7 +71,7 @@ datasets: cloud_mask: name: cloud_mask - resolution: 3000 + resolution: 3000.403165817 file_type: grib_seviri_clm parameter_number: 7 units: "1" @@ -78,7 +79,7 @@ datasets: pixel_scene_type: name: pixel_scene_type - resolution: 3000 + resolution: 3000.403165817 file_type: grib_seviri_oca parameter_number: 8 units: "1" @@ -86,7 +87,7 @@ datasets: measurement_cost: name: measurement_cost - resolution: 3000 + resolution: 3000.403165817 file_type: grib_seviri_oca parameter_number: 30 units: "1" @@ -94,7 +95,7 @@ datasets: upper_layer_cloud_optical_depth: name: upper_layer_cloud_optical_depth - resolution: 3000 + resolution: 3000.403165817 file_type: grib_seviri_oca parameter_number: 31 units: "1" @@ -102,7 +103,7 @@ datasets: upper_layer_cloud_top_pressure: name: upper_layer_cloud_top_pressure - resolution: 3000 + resolution: 3000.403165817 file_type: grib_seviri_oca parameter_number: 32 units: Pa @@ -110,7 +111,7 @@ datasets: upper_layer_cloud_effective_radius: name: upper_layer_cloud_effective_radius - resolution: 3000 + resolution: 3000.403165817 file_type: grib_seviri_oca parameter_number: 33 units: m @@ -118,7 +119,7 @@ datasets: error_in_upper_layer_cloud_optical_depth: name: error_in_upper_layer_cloud_optical_depth - resolution: 3000 + resolution: 3000.403165817 file_type: grib_seviri_oca parameter_number: 34 units: "1" @@ -126,7 +127,7 @@ datasets: error_in_upper_layer_cloud_top_pressure: name: error_in_upper_layer_cloud_top_pressure - resolution: 3000 + resolution: 3000.403165817 file_type: grib_seviri_oca parameter_number: 35 units: Pa @@ -134,7 +135,7 @@ datasets: error_in_upper_layer_cloud_effective_radius: name: error_in_upper_layer_cloud_effective_radius - resolution: 3000 + resolution: 3000.403165817 file_type: grib_seviri_oca parameter_number: 36 units: m @@ -142,7 +143,7 @@ datasets: lower_layer_cloud_optical_depth: name: lower_layer_cloud_optical_depth - resolution: 3000 + resolution: 3000.403165817 file_type: grib_seviri_oca parameter_number: 37 units: "1" @@ -150,7 +151,7 @@ datasets: lower_layer_cloud_top_pressure: name: lower_layer_cloud_top_pressure - resolution: 3000 + resolution: 3000.403165817 file_type: grib_seviri_oca parameter_number: 38 units: Pa @@ -158,7 +159,7 @@ datasets: error_in_lower_layer_cloud_optical_depth: name: error_in_lower_layer_cloud_optical_depth - resolution: 3000 + resolution: 3000.403165817 file_type: grib_seviri_oca parameter_number: 39 units: "1" @@ -166,7 +167,7 @@ datasets: error_in_lower_layer_cloud_top_pressure: name: error_in_lower_layer_cloud_top_pressure - resolution: 3000 + resolution: 3000.403165817 file_type: grib_seviri_oca parameter_number: 40 units: Pa @@ -174,7 +175,7 @@ datasets: fire_probability: name: fire_probability - resolution: 3000 + resolution: 3000.403165817 file_type: grib_seviri_fir parameter_number: 192 units: "%" @@ -182,7 +183,7 @@ datasets: active_fires: name: active_fires - resolution: 3000 + resolution: 3000.403165817 file_type: grib_seviri_fir parameter_number: 9 units: "1" @@ -190,31 +191,31 @@ datasets: aerosol_optical_thickness_vis06: name: aerosol_optical_thickness_vis06 - resolution: 3000 + resolution: 9001.209497451 file_type: grib_seviri_aes parameter_number: 20 - units: "um" + units: "1" long_name: aerosol_optical_thickness_vis06 aerosol_optical_thickness_vis08: name: aerosol_optical_thickness_vis08 - resolution: 3000 + resolution: 9001.209497451 file_type: grib_seviri_aes parameter_number: 21 - units: "um" + units: "1" long_name: aerosol_optical_thickness_vis08 aerosol_optical_thickness_vis16: name: aerosol_optical_thickness_vis16 - resolution: 3000 + resolution: 9001.209497451 file_type: grib_seviri_aes parameter_number: 22 - units: "um" - long_name: aerosol_optical_thickness_vis06 + units: "1" + long_name: aerosol_optical_thickness_vis16 angstroem_coefficient: name: angstroem_coefficient - resolution: 3000 + resolution: 9001.209497451 file_type: grib_seviri_aes parameter_number: 23 units: "1" @@ -222,7 +223,7 @@ datasets: aes_quality: name: aes_quality - resolution: 3000 + resolution: 9001.209497451 file_type: grib_seviri_aes parameter_number: 192 units: "1" @@ -230,15 +231,15 @@ datasets: cloud_top_height: name: cloud_top_height - resolution: 3000 + resolution: 9001.209497451 file_type: grib_seviri_cth parameter_number: 2 - units: Pa + units: m long_name: cloud_top_height cloud_top_quality: name: cloud_top_quality - resolution: 3000 + resolution: 9001.209497451 file_type: grib_seviri_cth parameter_number: 3 units: "1" @@ -246,7 +247,8 @@ datasets: vis_refl_06: name: vis_refl_06 - resolution: 3000 + resolution: 3000.403165817 + wavelength: [0.56, 0.635, 0.71] file_type: grib_seviri_crm parameter_number: 9 units: "%" @@ -254,7 +256,8 @@ datasets: vis_refl_08: name: vis_refl_08 - resolution: 3000 + resolution: 3000.403165817 + wavelength: [0.74, 0.81, 0.88] file_type: grib_seviri_crm parameter_number: 10 units: "%" @@ -262,7 +265,8 @@ datasets: vis_refl_16: name: vis_refl_16 - resolution: 3000 + resolution: 3000.403165817 + wavelength: [1.5, 1.64, 1.78] file_type: grib_seviri_crm parameter_number: 11 units: "%" @@ -270,7 +274,8 @@ datasets: nir_refl_39: name: nir_refl_39 - resolution: 3000 + resolution: 3000.403165817 + wavelength: [3.48, 3.92, 4.36] file_type: grib_seviri_crm parameter_number: 12 units: "%" @@ -278,23 +283,23 @@ datasets: num_accumulations: name: num_accumulations - resolution: 3000 + resolution: 3000.403165817 file_type: grib_seviri_crm parameter_number: 6 units: "1" long_name: num_accumulations - azimuth_angle: - name: azimuth_angle - resolution: 3000 + solar_zenith_angle: + name: solar_zenith_angle + resolution: 3000.403165817 file_type: grib_seviri_crm parameter_number: 7 units: degrees - long_name: azimuth_angle + long_name: solar_zenith_angle relative_azimuth_angle: name: relative_azimuth_angle - resolution: 3000 + resolution: 3000.403165817 file_type: grib_seviri_crm parameter_number: 8 units: degrees @@ -302,7 +307,7 @@ datasets: instantaneous_rain_rate: name: instantaneous_rain_rate - resolution: 3000 + resolution: 3000.403165817 file_type: grib_seviri_mpe parameter_number: 1 units: "kg m-2 s-1" diff --git a/satpy/etc/readers/slstr_l1b.yaml b/satpy/etc/readers/slstr_l1b.yaml index 73af1da9b7..98ff1707b1 100644 --- a/satpy/etc/readers/slstr_l1b.yaml +++ b/satpy/etc/readers/slstr_l1b.yaml @@ -89,6 +89,16 @@ datasets: standard_name: latitude units: degree + elevation: + name: elevation + resolution: [500, 1000] + view: [nadir, oblique] + stripe: [a, b, i, f] + file_type: esa_geo + file_key: elevation_{stripe:1s}{view:1s} + standard_name: elevation + units: m + # The channels S1-S3 are available in nadir (default) and oblique view. S1: name: S1 diff --git a/satpy/etc/readers/tropomi_l2.yaml b/satpy/etc/readers/tropomi_l2.yaml index 7e164c5f82..87ecd6b5e5 100644 --- a/satpy/etc/readers/tropomi_l2.yaml +++ b/satpy/etc/readers/tropomi_l2.yaml @@ -67,4 +67,3 @@ datasets: file_type: tropomi_l2 file_key: 'PRODUCT/time_utc' standard_name: time_utc - diff --git a/satpy/etc/readers/vii_l1b_nc.yaml b/satpy/etc/readers/vii_l1b_nc.yaml index 993878580c..0caea41fc0 100644 --- a/satpy/etc/readers/vii_l1b_nc.yaml +++ b/satpy/etc/readers/vii_l1b_nc.yaml @@ -3,7 +3,7 @@ reader: short_name: VII L1B RAD NetCDF4 long_name: EPS-SG VII L1B Radiance (NetCDF4) description: > - Reader for EUMETSAT EPSG-SG Visual Infrared Imager Level 1B Radiance files in NetCDF4 format. + Reader for EUMETSAT EPS-SG Visual Infrared Imager Level 1B Radiance files in NetCDF4 format per FS V4A. sensors: [vii] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader @@ -11,9 +11,7 @@ file_types: # EUMETSAT EPSG-SG Visual Infrared Imager Level 1B Radiance files in NetCDF4 format nc_vii_l1b_rad: file_reader: !!python/name:satpy.readers.vii_l1b_nc.ViiL1bNCFileHandler - file_patterns: ['W_DE-AIRBUSDS-Friedrichshafen,SAT,{spacecraft_name:s}-VII-1B-RAD_C_EUM_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc', - 'W_xx-eumetsat-darmstadt,SAT,{spacecraft_name:s}-VII-1B-RAD_C_EUM_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc', - 'W_xx-eumetsat-darmstadt,SAT,{spacecraft_name:s}-VII-1B-RAD_C_EUMT_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc'] + file_patterns: ['W_XX-EUMETSAT-Darmstadt,SAT,{spacecraft_name:s}-VII-1B-RAD_C_EUMT_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc'] cached_longitude: data/measurement_data/longitude cached_latitude: data/measurement_data/latitude @@ -51,7 +49,7 @@ datasets: name: vii_443 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_443 - coordinates: [lon_pixels, lat_pixels] + coordinates: [lat_pixels, lon_pixels] calibration: reflectance: standard_name: toa_bidirectional_reflectance @@ -65,7 +63,7 @@ datasets: name: vii_555 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_555 - coordinates: [lon_pixels, lat_pixels] + coordinates: [lat_pixels, lon_pixels] calibration: reflectance: standard_name: toa_bidirectional_reflectance @@ -79,7 +77,7 @@ datasets: name: vii_668 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_668 - coordinates: [lon_pixels, lat_pixels] + coordinates: [lat_pixels, lon_pixels] calibration: reflectance: standard_name: toa_bidirectional_reflectance @@ -93,7 +91,7 @@ datasets: name: vii_752 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_752 - coordinates: [lon_pixels, lat_pixels] + coordinates: [lat_pixels, lon_pixels] calibration: reflectance: standard_name: toa_bidirectional_reflectance @@ -107,7 +105,7 @@ datasets: name: vii_763 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_763 - coordinates: [lon_pixels, lat_pixels] + coordinates: [lat_pixels, lon_pixels] calibration: [reflectance, radiance] chan_solar_index: 4 wavelength: [0.75695, 0.7627, 0.76845] @@ -116,7 +114,7 @@ datasets: name: vii_865 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_865 - coordinates: [lon_pixels, lat_pixels] + coordinates: [lat_pixels, lon_pixels] calibration: reflectance: standard_name: toa_bidirectional_reflectance @@ -130,7 +128,7 @@ datasets: name: vii_914 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_914 - coordinates: [lon_pixels, lat_pixels] + coordinates: [lat_pixels, lon_pixels] calibration: reflectance: standard_name: toa_bidirectional_reflectance @@ -144,7 +142,7 @@ datasets: name: vii_1240 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_1240 - coordinates: [lon_pixels, lat_pixels] + coordinates: [lat_pixels, lon_pixels] calibration: reflectance: standard_name: toa_bidirectional_reflectance @@ -158,7 +156,7 @@ datasets: name: vii_1375 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_1375 - coordinates: [lon_pixels, lat_pixels] + coordinates: [lat_pixels, lon_pixels] calibration: reflectance: standard_name: toa_bidirectional_reflectance @@ -172,7 +170,7 @@ datasets: name: vii_1630 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_1630 - coordinates: [lon_pixels, lat_pixels] + coordinates: [lat_pixels, lon_pixels] calibration: reflectance: standard_name: toa_bidirectional_reflectance @@ -186,7 +184,7 @@ datasets: name: vii_2250 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_2250 - coordinates: [lon_pixels, lat_pixels] + coordinates: [lat_pixels, lon_pixels] calibration: reflectance: standard_name: toa_bidirectional_reflectance @@ -200,7 +198,7 @@ datasets: name: vii_3740 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_3740 - coordinates: [lon_pixels, lat_pixels] + coordinates: [lat_pixels, lon_pixels] calibration: brightness_temperature: standard_name: toa_brightness_temperature @@ -214,7 +212,7 @@ datasets: name: vii_3959 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_3959 - coordinates: [lon_pixels, lat_pixels] + coordinates: [lat_pixels, lon_pixels] calibration: brightness_temperature: standard_name: toa_brightness_temperature @@ -228,7 +226,7 @@ datasets: name: vii_4050 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_4050 - coordinates: [lon_pixels, lat_pixels] + coordinates: [lat_pixels, lon_pixels] calibration: brightness_temperature: standard_name: toa_brightness_temperature @@ -242,7 +240,7 @@ datasets: name: vii_6725 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_6725 - coordinates: [lon_pixels, lat_pixels] + coordinates: [lat_pixels, lon_pixels] calibration: brightness_temperature: standard_name: toa_brightness_temperature @@ -256,7 +254,7 @@ datasets: name: vii_7325 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_7325 - coordinates: [lon_pixels, lat_pixels] + coordinates: [lat_pixels, lon_pixels] calibration: brightness_temperature: standard_name: toa_brightness_temperature @@ -270,7 +268,7 @@ datasets: name: vii_8540 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_8540 - coordinates: [lon_pixels, lat_pixels] + coordinates: [lat_pixels, lon_pixels] calibration: brightness_temperature: standard_name: toa_brightness_temperature @@ -284,7 +282,7 @@ datasets: name: vii_10690 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_10690 - coordinates: [lon_pixels, lat_pixels] + coordinates: [lat_pixels, lon_pixels] calibration: brightness_temperature: standard_name: toa_brightness_temperature @@ -298,7 +296,7 @@ datasets: name: vii_12020 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_12020 - coordinates: [lon_pixels, lat_pixels] + coordinates: [lat_pixels, lon_pixels] calibration: brightness_temperature: standard_name: toa_brightness_temperature @@ -312,7 +310,7 @@ datasets: name: vii_13345 file_type: nc_vii_l1b_rad file_key: data/measurement_data/vii_13345 - coordinates: [lon_pixels, lat_pixels] + coordinates: [lat_pixels, lon_pixels] calibration: brightness_temperature: standard_name: toa_brightness_temperature @@ -329,28 +327,28 @@ datasets: standard_name: solar_zenith_angle file_type: nc_vii_l1b_rad file_key: data/measurement_data/solar_zenith - coordinates: [lon_tie_points, lat_tie_points] + coordinates: [lat_tie_points, lon_tie_points] solar_azimuth_tie_points: name: solar_azimuth_tie_points standard_name: solar_azimuth_angle file_type: nc_vii_l1b_rad file_key: data/measurement_data/solar_azimuth - coordinates: [lon_tie_points, lat_tie_points] + coordinates: [lat_tie_points, lon_tie_points] observation_zenith_tie_points: name: observation_zenith_tie_points standard_name: sensor_zenith_angle file_type: nc_vii_l1b_rad file_key: data/measurement_data/observation_zenith - coordinates: [lon_tie_points, lat_tie_points] + coordinates: [lat_tie_points, lon_tie_points] observation_azimuth_tie_points: name: observation_azimuth_tie_points standard_name: sensor_azimuth_angle file_type: nc_vii_l1b_rad file_key: data/measurement_data/observation_azimuth - coordinates: [lon_tie_points, lat_tie_points] + coordinates: [lat_tie_points, lon_tie_points] solar_zenith: name: solar_zenith @@ -358,7 +356,7 @@ datasets: file_type: nc_vii_l1b_rad file_key: data/measurement_data/solar_zenith interpolate: True - coordinates: [lon_pixels, lat_pixels] + coordinates: [lat_pixels, lon_pixels] solar_azimuth: name: solar_azimuth @@ -366,7 +364,7 @@ datasets: file_type: nc_vii_l1b_rad file_key: data/measurement_data/solar_azimuth interpolate: True - coordinates: [lon_pixels, lat_pixels] + coordinates: [lat_pixels, lon_pixels] observation_zenith: name: observation_zenith @@ -374,7 +372,7 @@ datasets: file_type: nc_vii_l1b_rad file_key: data/measurement_data/observation_zenith interpolate: True - coordinates: [lon_pixels, lat_pixels] + coordinates: [lat_pixels, lon_pixels] observation_azimuth: name: observation_azimuth @@ -382,19 +380,19 @@ datasets: file_type: nc_vii_l1b_rad file_key: data/measurement_data/observation_azimuth interpolate: True - coordinates: [lon_pixels, lat_pixels] + coordinates: [lat_pixels, lon_pixels] # --- Orthorectification data --- delta_lat_N_dem: name: delta_lat_N_dem file_type: nc_vii_l1b_rad file_key: data/measurement_data/delta_lat_N_dem - coordinates: [lon_pixels, lat_pixels] + coordinates: [lat_pixels, lon_pixels] standard_name: parallax_delta_latitude delta_lon_N_dem: name: delta_lon_N_dem file_type: nc_vii_l1b_rad file_key: data/measurement_data/delta_lon_N_dem - coordinates: [lon_pixels, lat_pixels] + coordinates: [lat_pixels, lon_pixels] standard_name: parallax_delta_longitude diff --git a/satpy/etc/readers/viirs_edr_active_fires.yaml b/satpy/etc/readers/viirs_edr_active_fires.yaml index 94dbd95f07..41a7546ac2 100644 --- a/satpy/etc/readers/viirs_edr_active_fires.yaml +++ b/satpy/etc/readers/viirs_edr_active_fires.yaml @@ -81,4 +81,4 @@ datasets: file_key: "{variable_prefix}FP_T4" coordinates: [longitude, latitude] standard_name: toa_brightness_temperature - units: 'K' \ No newline at end of file + units: 'K' diff --git a/satpy/etc/readers/viirs_l1b.yaml b/satpy/etc/readers/viirs_l1b.yaml index a47d00d210..06b5442991 100644 --- a/satpy/etc/readers/viirs_l1b.yaml +++ b/satpy/etc/readers/viirs_l1b.yaml @@ -35,36 +35,42 @@ file_types: - 'VGEOI_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S}_c{creation_time:%Y%m%d%H%M%S}.nc' - 'V{platform_shortname:2s}03IMG.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}{creator}.nc' - 'V{platform_shortname:2s}03IMG_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.nc' + - 'V{platform_shortname:2s}03IMG_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}.nc' vgeom: file_reader: !!python/name:satpy.readers.viirs_l1b.VIIRSL1BFileHandler file_patterns: - 'VGEOM_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S}_c{creation_time:%Y%m%d%H%M%S}.nc' - 'V{platform_shortname:2s}03MOD.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}{creator}.nc' - 'V{platform_shortname:2s}03MOD_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.nc' + - 'V{platform_shortname:2s}03MOD_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}.nc' vgeod: file_reader: !!python/name:satpy.readers.viirs_l1b.VIIRSL1BFileHandler file_patterns: - 'VGEOD_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S}_c{creation_time:%Y%m%d%H%M%S}.nc' - 'V{platform_shortname:2s}03DNB.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}{creator}.nc' - 'V{platform_shortname:2s}03DNB_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.nc' + - 'V{platform_shortname:2s}03DNB_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}.nc' vl1bi: file_reader: !!python/name:satpy.readers.viirs_l1b.VIIRSL1BFileHandler file_patterns: - 'VL1BI_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S}_c{creation_time:%Y%m%d%H%M%S}.nc' - 'V{platform_shortname:2s}02IMG.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}{creator}.nc' - 'V{platform_shortname:2s}02IMG_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.nc' + - 'V{platform_shortname:2s}02IMG_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}.nc' vl1bm: file_reader: !!python/name:satpy.readers.viirs_l1b.VIIRSL1BFileHandler file_patterns: - 'VL1BM_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S}_c{creation_time:%Y%m%d%H%M%S}.nc' - 'V{platform_shortname:2s}02MOD.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}{creator}.nc' - 'V{platform_shortname:2s}02MOD_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.nc' + - 'V{platform_shortname:2s}02MOD_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}.nc' vl1bd: file_reader: !!python/name:satpy.readers.viirs_l1b.VIIRSL1BFileHandler file_patterns: - 'VL1BD_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S}_c{creation_time:%Y%m%d%H%M%S}.nc' - 'V{platform_shortname:2s}02DNB.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}{creator}.nc' - 'V{platform_shortname:2s}02DNB_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.nc' + - 'V{platform_shortname:2s}02DNB_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}.nc' datasets: i_lon: diff --git a/satpy/etc/writers/awips_tiled.yaml b/satpy/etc/writers/awips_tiled.yaml index c86abd8af9..33d81fd16c 100644 --- a/satpy/etc/writers/awips_tiled.yaml +++ b/satpy/etc/writers/awips_tiled.yaml @@ -111,6 +111,7 @@ templates: attributes: physical_element: raw_value: ACSPO SST + units: {} # CLAVR-x Products default_clavrx: @@ -216,36 +217,42 @@ templates: attributes: physical_element: raw_value: 0.63 um + units: {} avhrr_band2_vis: name: band2_vis var_name: data attributes: physical_element: raw_value: 0.86 um + units: {} avhrr_band3a_vis: name: band3a_vis var_name: data attributes: physical_element: raw_value: 1.61 um + units: {} avhrr_band3b_bt: name: band3b_bt var_name: data attributes: physical_element: raw_value: 3.74 um + units: {} avhrr_band4_bt: name: band4_bt var_name: data attributes: physical_element: raw_value: 10.8 um + units: {} avhrr_band5_bt: name: band5_bt var_name: data attributes: physical_element: raw_value: 12.0 um + units: {} # VIIRS SDRs viirs_i01: @@ -254,156 +261,182 @@ templates: attributes: physical_element: raw_value: 0.64 um + units: {} viirs_i02: name: I02 var_name: data attributes: physical_element: raw_value: 0.86 um + units: {} viirs_i03: name: I03 var_name: data attributes: physical_element: raw_value: 1.61 um + units: {} viirs_i04: name: I04 var_name: data attributes: physical_element: raw_value: 3.74 um + units: {} viirs_i05: name: I05 var_name: data attributes: physical_element: raw_value: 11.5 um + units: {} viirs_histogram_dnb: name: histogram_dnb var_name: data attributes: physical_element: raw_value: Histogram DNB + units: {} viirs_adaptive_dnb: name: adaptive_dnb var_name: data attributes: physical_element: raw_value: Adaptive DNB + units: {} viirs_dynamic_dnb: name: dynamic_dnb var_name: data attributes: physical_element: raw_value: Dynamic DNB + units: {} viirs_hncc_dnb: name: hncc_dnb var_name: data attributes: physical_element: raw_value: HNCC DNB + units: {} viirs_ifog: name: ssec_fog var_name: data attributes: physical_element: raw_value: Fog + units: {} viirs_m01: name: M01 var_name: data attributes: physical_element: raw_value: 0.41 um + units: {} viirs_m02: name: M02 var_name: data attributes: physical_element: raw_value: 0.45 um + units: {} viirs_m03: name: M03 var_name: data attributes: physical_element: raw_value: 0.49 um + units: {} viirs_m04: name: M04 var_name: data attributes: physical_element: raw_value: 0.56 um + units: {} viirs_m05: name: M05 var_name: data attributes: physical_element: raw_value: 0.67 um + units: {} viirs_m06: name: M06 var_name: data attributes: physical_element: raw_value: 0.75 um + units: {} viirs_m07: name: M07 var_name: data attributes: physical_element: raw_value: 0.86 um + units: {} viirs_m08: name: M08 var_name: data attributes: physical_element: raw_value: 1.24 um + units: {} viirs_m09: name: M09 var_name: data attributes: physical_element: raw_value: 1.38 um + units: {} viirs_m10: name: M10 var_name: data attributes: physical_element: raw_value: 1.61 um + units: {} viirs_m11: name: M11 var_name: data attributes: physical_element: raw_value: 2.25 um + units: {} viirs_m12: name: M12 var_name: data attributes: physical_element: raw_value: 3.70 um + units: {} viirs_m13: name: M13 var_name: data attributes: physical_element: raw_value: 4.05 um + units: {} viirs_m14: name: M14 var_name: data attributes: physical_element: raw_value: 8.6 um + units: {} viirs_m15: name: M15 var_name: data attributes: physical_element: raw_value: 10.8 um + units: {} viirs_m16: name: M16 var_name: data attributes: physical_element: raw_value: 12.0 um + units: {} # VIIRS Corrected Reflectance # viirs_viirs_crefl01: @@ -746,36 +779,42 @@ templates: attributes: physical_element: raw_value: 36.5 GHz H + units: {} amsr2_btemp_36.5v: name: btemp_36.5v var_name: data attributes: physical_element: raw_value: 36.5 GHz V + units: {} amsr2_btemp_89.0ah: name: btemp_89.0ah var_name: data attributes: physical_element: raw_value: 89.0 GHz AH + units: {} amsr2_btemp_89.0av: name: btemp_89.0av var_name: data attributes: physical_element: raw_value: 89.0 GHz AV + units: {} amsr2_btemp_89.0bh: name: btemp_89.0bh var_name: data attributes: physical_element: raw_value: 89.0 GHz BH + units: {} amsr2_btemp_89.0bv: name: btemp_89.0bv var_name: data attributes: physical_element: raw_value: 89.0 GHz BV + units: {} # GEOCAT Level 1 Products geocat_surface_type: @@ -784,6 +823,7 @@ templates: attributes: physical_element: raw_value: Surface Type + units: {} # GEOCAT Level 2 Products glm_l2_radc: single_variable: false @@ -851,6 +891,10 @@ templates: attributes: # physical_element: # raw_value: "GLM_Flash_Extent_Density" + # units are actually "Count per nominal 3136 microradian^2 pixel per 1.0 min" + # but this is not understood by AWIPS + units: + raw_value: "1" standard_name: value: "{standard_name}" long_name: @@ -864,6 +908,10 @@ templates: attributes: # physical_element: # raw_value: "GLM_Flash_Extent_Density_Window" + # units are actually "Count per nominal 3136 microradian^2 pixel per 1.0 min" + # but this is not understood by AWIPS + units: + raw_value: "1" standard_name: value: "{standard_name}" long_name: @@ -1024,6 +1072,10 @@ templates: attributes: # physical_element: # raw_value: "GLM_Flash_Extent_Density" + # units are actually "Count per nominal 3136 microradian^2 pixel per 1.0 min" + # but this is not understood by AWIPS + units: + raw_value: "1" standard_name: value: "{standard_name}" long_name: @@ -1040,6 +1092,10 @@ templates: attributes: # physical_element: # raw_value: "GLM_Flash_Extent_Density_Window" + # units are actually "Count per nominal 3136 microradian^2 pixel per 1.0 min" + # but this is not understood by AWIPS + units: + raw_value: "1" standard_name: value: "{standard_name}" long_name: diff --git a/satpy/etc/writers/geotiff.yaml b/satpy/etc/writers/geotiff.yaml index 9c8f2b6b07..e2f16daa9d 100644 --- a/satpy/etc/writers/geotiff.yaml +++ b/satpy/etc/writers/geotiff.yaml @@ -4,4 +4,4 @@ writer: writer: !!python/name:satpy.writers.geotiff.GeoTIFFWriter filename: '{name}_{start_time:%Y%m%d_%H%M%S}.tif' compress: DEFLATE - zlevel: 6 \ No newline at end of file + zlevel: 6 diff --git a/satpy/modifiers/_crefl.py b/satpy/modifiers/_crefl.py index 6748b6a7b4..de71d96147 100644 --- a/satpy/modifiers/_crefl.py +++ b/satpy/modifiers/_crefl.py @@ -72,34 +72,21 @@ def _get_registered_dem_cache_key(self): def __call__(self, datasets, optional_datasets, **info): """Create modified DataArray object by applying the crefl algorithm.""" - from satpy.modifiers._crefl_utils import get_coefficients refl_data, angles = self._extract_angle_data_arrays(datasets, optional_datasets) - coefficients = get_coefficients(refl_data.attrs["sensor"], - refl_data.attrs["wavelength"], - refl_data.attrs["resolution"]) - results = self._call_crefl(refl_data, coefficients, angles) + results = self._call_crefl(refl_data, angles) info.update(refl_data.attrs) info["rayleigh_corrected"] = True results.attrs = info self.apply_modifier_info(refl_data, results) return results - def _call_crefl(self, refl_data, coefficients, angles): + def _call_crefl(self, refl_data, angles): from satpy.modifiers._crefl_utils import run_crefl avg_elevation = self._get_average_elevation() - lons, lats = refl_data.attrs['area'].get_lonlats(chunks=refl_data.chunks) - is_percent = refl_data.attrs["units"] == "%" - use_abi = refl_data.attrs['sensor'] == 'abi' results = run_crefl(refl_data, - coefficients, - lons, - lats, *angles, avg_elevation=avg_elevation, - percent=is_percent, - use_abi=use_abi) - factor = 100. if is_percent else 1. - results = results * factor + ) return results def _get_average_elevation(self): diff --git a/satpy/modifiers/_crefl_utils.py b/satpy/modifiers/_crefl_utils.py index be9c8681f8..ddde8c9765 100644 --- a/satpy/modifiers/_crefl_utils.py +++ b/satpy/modifiers/_crefl_utils.py @@ -17,25 +17,66 @@ # satpy. If not, see . """Shared utilities for correcting reflectance data using the 'crefl' algorithm. -Original code written by Ralph Kuehn with modifications by David Hoese and Martin Raspaud. -Ralph's code was originally based on the C crefl code distributed for VIIRS and MODIS. +The CREFL algorithm in this module is based on the `NASA CREFL SPA`_ software, +the `NASA CVIIRS SPA`_, and customizations of these algorithms for ABI/AHI by +Ralph Kuehn and Min Oo at the Space Science and Engineering Center (SSEC). + +The CREFL SPA documentation page describes the algorithm by saying: + + The CREFL_SPA processes MODIS Aqua and Terra Level 1B DB data to create the + MODIS Level 2 Corrected Reflectance product. The algorithm performs a simple + atmospheric correction with MODIS visible, near-infrared, and short-wave + infrared bands (bands 1 through 16). + + It corrects for molecular (Rayleigh) scattering and gaseous absorption (water + vapor and ozone) using climatological values for gas contents. It requires no + real-time input of ancillary data. The algorithm performs no aerosol + correction. The Corrected Reflectance products created by CREFL_SPA are very + similar to the MODIS Land Surface Reflectance product (MOD09) in clear + atmospheric conditions, since the algorithms used to derive both are based on + the 6S Radiative Transfer Model. The products show differences in the presence + of aerosols, however, because the MODIS Land Surface Reflectance product uses + a more complex atmospheric correction algorithm that includes a correction for + aerosols. + +The additional logic to support ABI (AHI support not included) was originally +written by Ralph Kuehn and Min Oo at SSEC. Additional modifications were +performed by Martin Raspaud, David Hoese, and Will Roberts to make the code +work together and be more dask compatible. + +The AHI/ABI implementation is based on the MODIS collection 6 algorithm, where +a spherical-shell atmosphere was assumed rather than a plane-parallel. See +Appendix A in: "The Collection 6 MODIS aerosol products over land and ocean" +Atmos. Meas. Tech., 6, 2989–3034, 2013 www.atmos-meas-tech.net/6/2989/2013/ +:doi:`10.5194/amt-6-2989-2013`. + + +The original CREFL code is similar to what is described in appendix A1 (page +74) of the ATBD for the `MODIS MOD04/MYD04`_ data product. + +.. _NASA CREFL SPA: https://directreadout.sci.gsfc.nasa.gov/?id=dspContent&cid=92&type=software +.. _NASA CVIIRS SPA: https://directreadout.sci.gsfc.nasa.gov/?id=dspContent&cid=277&type=software +.. _MODIS MOD04/MYD04: https://modis.gsfc.nasa.gov/data/atbd/atbd_mod02.pdf + + """ +from __future__ import annotations + import logging +from typing import Optional, Type, Union import dask.array as da import numpy as np import xarray as xr -LOG = logging.getLogger(__name__) +from satpy.dataset.dataid import WavelengthRange -bUseV171 = False +LOG = logging.getLogger(__name__) -if bUseV171: - UO3 = 0.319 - UH2O = 2.93 -else: - UO3 = 0.285 - UH2O = 2.93 +UO3_MODIS = 0.319 +UH2O_MODIS = 2.93 +UO3_VIIRS = 0.285 +UH2O_VIIRS = 2.93 MAXSOLZ = 86.5 MAXAIRMASS = 18 @@ -49,6 +90,446 @@ REFLMAX = 1.6 +class _Coefficients: + LUTS: list[np.ndarray] = [] + # resolution -> wavelength -> coefficient index + # resolution -> band name -> coefficient index + COEFF_INDEX_MAP: dict[int, dict[Union[tuple, str], int]] = {} + + def __init__(self, wavelength_range, resolution=0): + self._wv_range = wavelength_range + self._resolution = resolution + + def __call__(self): + idx = self._find_coefficient_index(self._wv_range, resolution=self._resolution) + band_luts = [lut_array[idx] for lut_array in self.LUTS] + return band_luts + + def _find_coefficient_index(self, wavelength_range, resolution=0): + """Return index in to coefficient arrays for this band's wavelength. + + This function search through the `COEFF_INDEX_MAP` dictionary and + finds the first key where the nominal wavelength of `wavelength_range` + falls between the minimum wavelength and maximum wavelength of the key. + `wavelength_range` can also be the standard name of the band. For + example, "M05" for VIIRS or "1" for MODIS. + + Args: + wavelength_range: 3-element tuple of + (min wavelength, nominal wavelength, max wavelength) or the + string name of the band. + resolution: resolution of the band to be corrected + + Returns: + index in to coefficient arrays like `aH2O`, `aO3`, etc. + None is returned if no matching wavelength is found + + """ + index_map = self.COEFF_INDEX_MAP + # Find the best resolution of coefficients + for res in sorted(index_map.keys()): + if resolution <= res: + index_map = index_map[res] + break + else: + raise ValueError("Unrecognized data resolution: {}", resolution) + # Find the best wavelength of coefficients + if isinstance(wavelength_range, str): + # wavelength range is actually a band name + return index_map[wavelength_range] + for lut_wvl_range, v in index_map.items(): + if isinstance(lut_wvl_range, str): + # we are analyzing wavelengths and ignoring dataset names + continue + if wavelength_range[1] in lut_wvl_range: + return v + raise ValueError(f"Can't find LUT for {wavelength_range}.") + + +class _ABICoefficients(_Coefficients): + RG_FUDGE = .55 # This number is what Ralph says "looks good" for ABI/AHI + LUTS = [ + # aH2O + np.array([2.4111e-003, 7.8454e-003 * RG_FUDGE, 7.9258e-3, 9.3392e-003, 2.53e-2]), + # aO2 (bH2O for other instruments) + np.array([1.2360e-003, 3.7296e-003, 177.7161e-006, 10.4899e-003, 1.63e-2]), + # aO3 + np.array([4.2869e-003, 25.6509e-003 * RG_FUDGE, 802.4319e-006, 0.0000e+000, 2e-5]), + # taur0 + np.array([184.7200e-003, 52.3490e-003, 15.8450e-003, 1.3074e-003, 311.2900e-006]), + ] + # resolution -> wavelength -> coefficient index + # resolution -> band name -> coefficient index + COEFF_INDEX_MAP = { + 2000: { + WavelengthRange(0.450, 0.470, 0.490): 0, # C01 + "C01": 0, + WavelengthRange(0.590, 0.640, 0.690): 1, # C02 + "C02": 1, + WavelengthRange(0.8455, 0.865, 0.8845): 2, # C03 + "C03": 2, + # WavelengthRange((1.3705, 1.378, 1.3855)): None, # C04 - No coefficients yet + # "C04": None, + WavelengthRange(1.580, 1.610, 1.640): 3, # C05 + "C05": 3, + WavelengthRange(2.225, 2.250, 2.275): 4, # C06 + "C06": 4 + }, + } + + +class _VIIRSCoefficients(_Coefficients): + # Values from crefl 1.7.1 + LUTS = [ + # aH2O + np.array([0.000406601, 0.0015933, 0, 1.78644e-05, 0.00296457, 0.000617252, 0.000996563, 0.00222253, 0.00094005, + 0.000563288, 0, 0, 0, 0, 0, 0]), + # bH2O + np.array([0.812659, 0.832931, 1., 0.8677850, 0.806816, 0.944958, 0.78812, 0.791204, 0.900564, 0.942907, 0, 0, + 0, 0, 0, 0]), + # aO3 + np.array([0.0433461, 0.0, 0.0178299, 0.0853012, 0, 0, 0, 0.0813531, 0, 0, 0.0663, 0.0836, 0.0485, 0.0395, + 0.0119, 0.00263]), + # taur0 + np.array([0.04350, 0.01582, 0.16176, 0.09740, 0.00369, 0.00132, 0.00033, 0.05373, 0.01561, 0.00129, 0.1131, + 0.0994, 0.0446, 0.0416, 0.0286, 0.0155]), + ] + # resolution -> wavelength -> coefficient index + # resolution -> band name -> coefficient index + COEFF_INDEX_MAP = { + 1000: { + WavelengthRange(0.662, 0.6720, 0.682): 0, # M05 + "M05": 0, + WavelengthRange(0.846, 0.8650, 0.885): 1, # M07 + "M07": 1, + WavelengthRange(0.478, 0.4880, 0.498): 2, # M03 + "M03": 2, + WavelengthRange(0.545, 0.5550, 0.565): 3, # M04 + "M04": 3, + WavelengthRange(1.230, 1.2400, 1.250): 4, # M08 + "M08": 4, + WavelengthRange(1.580, 1.6100, 1.640): 5, # M10 + "M10": 5, + WavelengthRange(2.225, 2.2500, 2.275): 6, # M11 + "M11": 6, + }, + 500: { + WavelengthRange(0.600, 0.6400, 0.680): 7, # I01 + "I01": 7, + WavelengthRange(0.845, 0.8650, 0.884): 8, # I02 + "I02": 8, + WavelengthRange(1.580, 1.6100, 1.640): 9, # I03 + "I03": 9, + }, + } + + +class _MODISCoefficients(_Coefficients): + # Values from crefl 1.7.1 + LUTS = [ + # aH2O + np.array([-5.60723, -5.25251, 0, 0, -6.29824, -7.70944, -3.91877, 0, 0, 0, 0, 0, 0, 0, 0, 0]), + # bH2O + np.array([0.820175, 0.725159, 0, 0, 0.865732, 0.966947, 0.745342, 0, 0, 0, 0, 0, 0, 0, 0, 0]), + # aO3 + np.array([0.0715289, 0, 0.00743232, 0.089691, 0, 0, 0, 0.001, 0.00383, 0.0225, 0.0663, + 0.0836, 0.0485, 0.0395, 0.0119, 0.00263]), + # taur0 + np.array([0.05100, 0.01631, 0.19325, 0.09536, 0.00366, 0.00123, 0.00043, 0.3139, 0.2375, 0.1596, 0.1131, + 0.0994, 0.0446, 0.0416, 0.0286, 0.0155]), + ] + # Map of pixel resolutions -> wavelength -> coefficient index + # Map of pixel resolutions -> band name -> coefficient index + COEFF_INDEX_MAP = { + 1000: { + WavelengthRange(0.620, 0.6450, 0.670): 0, + "1": 0, + WavelengthRange(0.841, 0.8585, 0.876): 1, + "2": 1, + WavelengthRange(0.459, 0.4690, 0.479): 2, + "3": 2, + WavelengthRange(0.545, 0.5550, 0.565): 3, + "4": 3, + WavelengthRange(1.230, 1.2400, 1.250): 4, + "5": 4, + WavelengthRange(1.628, 1.6400, 1.652): 5, + "6": 5, + WavelengthRange(2.105, 2.1300, 2.155): 6, + "7": 6, + } + } + COEFF_INDEX_MAP[500] = COEFF_INDEX_MAP[1000] + COEFF_INDEX_MAP[250] = COEFF_INDEX_MAP[1000] + + +def run_crefl(refl, + sensor_azimuth, + sensor_zenith, + solar_azimuth, + solar_zenith, + avg_elevation=None, + ): + """Run main crefl algorithm. + + All input parameters are per-pixel values meaning they are the same size + and shape as the input reflectance data, unless otherwise stated. + + :param refl: tuple of reflectance band arrays + :param sensor_azimuth: input swath sensor azimuth angle array + :param sensor_zenith: input swath sensor zenith angle array + :param solar_azimuth: input swath solar azimuth angle array + :param solar_zenith: input swath solar zenith angle array + :param avg_elevation: average elevation (usually pre-calculated and stored in CMGDEM.hdf) + + """ + runner_cls = _runner_class_for_sensor(refl.attrs['sensor']) + runner = runner_cls(refl) + corr_refl = runner(sensor_azimuth, sensor_zenith, solar_azimuth, solar_zenith, avg_elevation) + return corr_refl + + +class _CREFLRunner: + def __init__(self, refl_data_arr): + self._is_percent = refl_data_arr.attrs["units"] == "%" + if self._is_percent: + attrs = refl_data_arr.attrs + refl_data_arr = refl_data_arr / 100.0 + refl_data_arr.attrs = attrs + self._refl = refl_data_arr + + @property + def coeffs_cls(self) -> Type[_Coefficients]: + raise NotImplementedError() + + def __call__(self, sensor_azimuth, sensor_zenith, solar_azimuth, solar_zenith, avg_elevation): + refl = self._refl + height = self._height_from_avg_elevation(avg_elevation) + coeffs_helper = self.coeffs_cls(refl.attrs["wavelength"], refl.attrs["resolution"]) + coeffs = coeffs_helper() + mus = np.cos(np.deg2rad(solar_zenith)) + mus = mus.where(mus >= 0) + muv = np.cos(np.deg2rad(sensor_zenith)) + phi = solar_azimuth - sensor_azimuth + corr_refl = self._run_crefl(mus, muv, phi, solar_zenith, sensor_zenith, height, coeffs) + if self._is_percent: + corr_refl = corr_refl * 100.0 + return xr.DataArray(corr_refl, dims=refl.dims, coords=refl.coords, attrs=refl.attrs) + + def _run_crefl(self, mus, muv, phi, solar_zenith, sensor_zenith, height, coeffs): + raise NotImplementedError() + + def _height_from_avg_elevation(self, avg_elevation: Optional[np.ndarray]) -> da.Array: + """Get digital elevation map data for our granule with ocean fill value set to 0.""" + if avg_elevation is None: + LOG.debug("No average elevation information provided in CREFL") + # height = np.zeros(lon.shape, dtype=np.float64) + height = 0. + else: + LOG.debug("Using average elevation information provided to CREFL") + lon, lat = self._refl.attrs['area'].get_lonlats(chunks=self._refl.chunks) + height = da.map_blocks(_space_mask_height, lon, lat, avg_elevation, + chunks=lon.chunks, dtype=avg_elevation.dtype) + return height + + +class _ABICREFLRunner(_CREFLRunner): + @property + def coeffs_cls(self) -> Type[_Coefficients]: + return _ABICoefficients + + def _run_crefl(self, mus, muv, phi, solar_zenith, sensor_zenith, height, coeffs): + LOG.debug("Using ABI CREFL algorithm") + return da.map_blocks(_run_crefl_abi, self._refl.data, mus.data, muv.data, phi.data, + solar_zenith.data, sensor_zenith.data, height, *coeffs, + meta=np.ndarray((), dtype=self._refl.dtype), + chunks=self._refl.chunks, dtype=self._refl.dtype, + ) + + +class _VIIRSMODISCREFLRunner(_CREFLRunner): + def _run_crefl(self, mus, muv, phi, solar_zenith, sensor_zenith, height, coeffs): + return da.map_blocks(_run_crefl, self._refl.data, mus.data, muv.data, phi.data, + height, self._refl.attrs.get("sensor"), *coeffs, + meta=np.ndarray((), dtype=self._refl.dtype), + chunks=self._refl.chunks, dtype=self._refl.dtype, + ) + + +class _VIIRSCREFLRunner(_VIIRSMODISCREFLRunner): + @property + def coeffs_cls(self) -> Type[_Coefficients]: + return _VIIRSCoefficients + + def _run_crefl(self, mus, muv, phi, solar_zenith, sensor_zenith, height, coeffs): + LOG.debug("Using VIIRS CREFL algorithm") + return super()._run_crefl(mus, muv, phi, solar_zenith, sensor_zenith, height, coeffs) + + +class _MODISCREFLRunner(_VIIRSMODISCREFLRunner): + @property + def coeffs_cls(self) -> Type[_Coefficients]: + return _MODISCoefficients + + def _run_crefl(self, mus, muv, phi, solar_zenith, sensor_zenith, height, coeffs): + LOG.debug("Using MODIS CREFL algorithm") + return super()._run_crefl(mus, muv, phi, solar_zenith, sensor_zenith, height, coeffs) + + +_SENSOR_TO_RUNNER = { + "abi": _ABICREFLRunner, + "viirs": _VIIRSCREFLRunner, + "modis": _MODISCREFLRunner, +} + + +def _runner_class_for_sensor(sensor_name: str) -> Type[_CREFLRunner]: + try: + return _SENSOR_TO_RUNNER[sensor_name] + except KeyError: + raise NotImplementedError(f"Don't know how to apply CREFL to data from sensor {sensor_name}.") + + +def _space_mask_height(lon, lat, avg_elevation): + lat[(lat <= -90) | (lat >= 90)] = np.nan + lon[(lon <= -180) | (lon >= 180)] = np.nan + row = ((90.0 - lat) * avg_elevation.shape[0] / 180.0).astype(np.int32) + col = ((lon + 180.0) * avg_elevation.shape[1] / 360.0).astype(np.int32) + space_mask = np.isnan(lon) | np.isnan(lat) + row[space_mask] = 0 + col[space_mask] = 0 + + height = avg_elevation[row, col] + # negative heights aren't allowed, clip to 0 + height[(height < 0.0) | np.isnan(height) | space_mask] = 0.0 + return height + + +def _run_crefl(refl, mus, muv, phi, height, sensor_name, *coeffs): + atm_vars_cls = _VIIRSAtmosphereVariables if sensor_name.lower() == "viirs" else _MODISAtmosphereVariables + atm_vars = atm_vars_cls(mus, muv, phi, height, *coeffs) + sphalb, rhoray, TtotraytH2O, tOG = atm_vars() + return _correct_refl(refl, tOG, rhoray, TtotraytH2O, sphalb) + + +def _run_crefl_abi(refl, mus, muv, phi, solar_zenith, sensor_zenith, height, + *coeffs): + a_O3 = [268.45, 0.5, 115.42, -3.2922] + a_H2O = [0.0311, 0.1, 92.471, -1.3814] + a_O2 = [0.4567, 0.007, 96.4884, -1.6970] + G_O3 = _G_calc(solar_zenith, a_O3) + _G_calc(sensor_zenith, a_O3) + G_H2O = _G_calc(solar_zenith, a_H2O) + _G_calc(sensor_zenith, a_H2O) + G_O2 = _G_calc(solar_zenith, a_O2) + _G_calc(sensor_zenith, a_O2) + # Note: bh2o values are actually ao2 values for abi + atm_vars = _ABIAtmosphereVariables(G_O3, G_H2O, G_O2, + mus, muv, phi, height, *coeffs) + sphalb, rhoray, TtotraytH2O, tOG = atm_vars() + return _correct_refl(refl, tOG, rhoray, TtotraytH2O, sphalb) + + +def _G_calc(zenith, a_coeff): + return (np.cos(np.deg2rad(zenith))+(a_coeff[0]*(zenith**a_coeff[1])*(a_coeff[2]-zenith)**a_coeff[3]))**-1 + + +def _correct_refl(refl, tOG, rhoray, TtotraytH2O, sphalb): + corr_refl = (refl / tOG - rhoray) / TtotraytH2O + corr_refl /= (1.0 + corr_refl * sphalb) + return corr_refl.clip(REFLMIN, REFLMAX) + + +class _AtmosphereVariables: + def __init__(self, mus, muv, phi, height, ah2o, bh2o, ao3, tau): + self._mus = mus + self._muv = muv + self._phi = phi + self._height = height + self._ah2o = ah2o + self._bh2o = bh2o + self._ao3 = ao3 + self._tau = tau + self._taustep4sphalb = TAUSTEP4SPHALB + + def __call__(self): + tau_step = np.linspace( + self._taustep4sphalb, + MAXNUMSPHALBVALUES * self._taustep4sphalb, + MAXNUMSPHALBVALUES) + sphalb0 = _csalbr(tau_step) + taur = self._tau * np.exp(-self._height / SCALEHEIGHT) + rhoray, trdown, trup = _chand(self._phi, self._muv, self._mus, taur) + sphalb = sphalb0[(taur / self._taustep4sphalb + 0.5).astype(np.int32)] + Ttotrayu = ((2 / 3. + self._muv) + (2 / 3. - self._muv) * trup) / (4 / 3. + taur) + Ttotrayd = ((2 / 3. + self._mus) + (2 / 3. - self._mus) * trdown) / (4 / 3. + taur) + + tH2O = self._get_th2o() + TtotraytH2O = Ttotrayu * Ttotrayd * tH2O + + tO2 = self._get_to2() + tO3 = self._get_to3() + tOG = tO3 * tO2 + return sphalb, rhoray, TtotraytH2O, tOG + + def _get_to2(self): + return 1.0 + + def _get_to3(self): + raise NotImplementedError() + + def _get_th2o(self): + raise NotImplementedError() + + +class _ABIAtmosphereVariables(_AtmosphereVariables): + def __init__(self, G_O3, G_H2O, G_O2, *args): + super().__init__(*args) + self._G_O3 = G_O3 + self._G_H2O = G_H2O + self._G_O2 = G_O2 + self._taustep4sphalb = TAUSTEP4SPHALB_ABI + + def _get_to2(self): + # NOTE: bh2o is actually ao2 for ABI + return np.exp(-self._G_O2 * self._bh2o) + + def _get_to3(self): + return np.exp(-self._G_O3 * self._ao3) if self._ao3 != 0 else 1.0 + + def _get_th2o(self): + return np.exp(-self._G_H2O * self._ah2o) if self._ah2o != 0 else 1.0 + + +class _VIIRSAtmosphereVariables(_AtmosphereVariables): + def __init__(self, *args): + super().__init__(*args) + self._airmass = self._compute_airmass() + + def _compute_airmass(self): + air_mass = 1.0 / self._mus + 1 / self._muv + air_mass[air_mass > MAXAIRMASS] = -1.0 + return air_mass + + def _get_to3(self): + if self._ao3 == 0: + return 1.0 + return np.exp(-self._airmass * UO3_VIIRS * self._ao3) + + def _get_th2o(self): + if self._bh2o == 0: + return 1.0 + return np.exp(-(self._ah2o * ((self._airmass * UH2O_VIIRS) ** self._bh2o))) + + +class _MODISAtmosphereVariables(_VIIRSAtmosphereVariables): + def _get_to3(self): + if self._ao3 == 0: + return 1.0 + return np.exp(-self._airmass * UO3_MODIS * self._ao3) + + def _get_th2o(self): + if self._bh2o == 0: + return 1.0 + return np.exp(-np.exp(self._ah2o + self._bh2o * np.log(self._airmass * UH2O_MODIS))) + + def _csalbr(tau): # Previously 3 functions csalbr fintexp1, fintexp3 a = [-.57721566, 0.99999193, -0.24991055, 0.05519968, -0.00976004, @@ -68,184 +549,6 @@ def _csalbr(tau): (4.0 + 2.0 * tau) + 2.0 * np.exp(-tau)) / (4.0 + 3.0 * tau) -# From crefl.1.7.1 -if bUseV171: - aH2O = np.array([-5.60723, -5.25251, 0, 0, -6.29824, -7.70944, -3.91877, 0, - 0, 0, 0, 0, 0, 0, 0, 0]) - bH2O = np.array([0.820175, 0.725159, 0, 0, 0.865732, 0.966947, 0.745342, 0, - 0, 0, 0, 0, 0, 0, 0, 0]) - # const float aO3[Nbands]={ 0.0711, 0.00313, 0.0104, 0.0930, 0, - # 0, 0, 0.00244, 0.00383, 0.0225, 0.0663, 0.0836, 0.0485, 0.0395, 0.0119, - # 0.00263};*/ - aO3 = np.array( - [0.0715289, 0, 0.00743232, 0.089691, 0, 0, 0, 0.001, 0.00383, 0.0225, - 0.0663, 0.0836, 0.0485, 0.0395, 0.0119, 0.00263]) - # const float taur0[Nbands] = { 0.0507, 0.0164, 0.1915, 0.0948, - # 0.0036, 0.0012, 0.0004, 0.3109, 0.2375, 0.1596, 0.1131, 0.0994, - # 0.0446, 0.0416, 0.0286, 0.0155};*/ - taur0 = np.array( - [0.05100, 0.01631, 0.19325, 0.09536, 0.00366, 0.00123, 0.00043, 0.3139, - 0.2375, 0.1596, 0.1131, 0.0994, 0.0446, 0.0416, 0.0286, 0.0155]) -else: - # From polar2grid cviirs.c - # This number is what Ralph says "looks good" - rg_fudge = .55 - aH2O = np.array( - [0.000406601, 0.0015933, 0, 1.78644e-05, 0.00296457, 0.000617252, - 0.000996563, 0.00222253, 0.00094005, 0.000563288, 0, 0, 0, 0, 0, 0, - 2.4111e-003, 7.8454e-003*rg_fudge, 7.9258e-3, 9.3392e-003, 2.53e-2]) - bH2O = np.array([0.812659, 0.832931, 1., 0.8677850, 0.806816, 0.944958, - 0.78812, 0.791204, 0.900564, 0.942907, 0, 0, 0, 0, 0, 0, - # These are actually aO2 values for abi calculations - 1.2360e-003, 3.7296e-003, 177.7161e-006, 10.4899e-003, 1.63e-2]) - # /*const float aO3[Nbands]={ 0.0711, 0.00313, 0.0104, 0.0930, 0, 0, 0, 0.00244, - # 0.00383, 0.0225, 0.0663, 0.0836, 0.0485, 0.0395, 0.0119, 0.00263};*/ - aO3 = np.array([0.0433461, 0.0, 0.0178299, 0.0853012, 0, 0, 0, 0.0813531, - 0, 0, 0.0663, 0.0836, 0.0485, 0.0395, 0.0119, 0.00263, - 4.2869e-003, 25.6509e-003*rg_fudge, 802.4319e-006, 0.0000e+000, 2e-5]) - # /*const float taur0[Nbands] = { 0.0507, 0.0164, 0.1915, 0.0948, 0.0036, 0.0012, 0.0004, - # 0.3109, 0.2375, 0.1596, 0.1131, 0.0994, 0.0446, 0.0416, 0.0286, 0.0155};*/ - taur0 = np.array([0.04350, 0.01582, 0.16176, 0.09740, 0.00369, 0.00132, - 0.00033, 0.05373, 0.01561, 0.00129, 0.1131, 0.0994, - 0.0446, 0.0416, 0.0286, 0.0155, - 184.7200e-003, 52.3490e-003, 15.8450e-003, 1.3074e-003, 311.2900e-006]) - # add last 5 from bH2O to aO2 - aO2 = 0 - -# Map of pixel resolutions -> wavelength -> coefficient index -# Map of pixel resolutions -> band name -> coefficient index -# Index is used in aH2O, bH2O, aO3, and taur0 arrays above -MODIS_COEFF_INDEX_MAP = { - 1000: { - (0.620, 0.6450, 0.670): 0, - "1": 0, - (0.841, 0.8585, 0.876): 1, - "2": 1, - (0.459, 0.4690, 0.479): 2, - "3": 2, - (0.545, 0.5550, 0.565): 3, - "4": 3, - (1.230, 1.2400, 1.250): 4, - "5": 4, - (1.628, 1.6400, 1.652): 5, - "6": 5, - (2.105, 2.1300, 2.155): 6, - "7": 6, - } -} -MODIS_COEFF_INDEX_MAP[500] = MODIS_COEFF_INDEX_MAP[1000] -MODIS_COEFF_INDEX_MAP[250] = MODIS_COEFF_INDEX_MAP[1000] - -# resolution -> wavelength -> coefficient index -# resolution -> band name -> coefficient index -VIIRS_COEFF_INDEX_MAP = { - 1000: { - (0.662, 0.6720, 0.682): 0, # M05 - "M05": 0, - (0.846, 0.8650, 0.885): 1, # M07 - "M07": 1, - (0.478, 0.4880, 0.498): 2, # M03 - "M03": 2, - (0.545, 0.5550, 0.565): 3, # M04 - "M04": 3, - (1.230, 1.2400, 1.250): 4, # M08 - "M08": 4, - (1.580, 1.6100, 1.640): 5, # M10 - "M10": 5, - (2.225, 2.2500, 2.275): 6, # M11 - "M11": 6, - }, - 500: { - (0.600, 0.6400, 0.680): 7, # I01 - "I01": 7, - (0.845, 0.8650, 0.884): 8, # I02 - "I02": 8, - (1.580, 1.6100, 1.640): 9, # I03 - "I03": 9, - }, -} - - -# resolution -> wavelength -> coefficient index -# resolution -> band name -> coefficient index -ABI_COEFF_INDEX_MAP = { - 2000: { - (0.450, 0.470, 0.490): 16, # C01 - "C01": 16, - (0.590, 0.640, 0.690): 17, # C02 - "C02": 17, - (0.8455, 0.865, 0.8845): 18, # C03 - "C03": 18, - # (1.3705, 1.378, 1.3855): None, # C04 - # "C04": None, - (1.580, 1.610, 1.640): 19, # C05 - "C05": 19, - (2.225, 2.250, 2.275): 20, # C06 - "C06": 20 - }, -} - - -COEFF_INDEX_MAP = { - "viirs": VIIRS_COEFF_INDEX_MAP, - "modis": MODIS_COEFF_INDEX_MAP, - "abi": ABI_COEFF_INDEX_MAP, -} - - -def find_coefficient_index(sensor, wavelength_range, resolution=0): - """Return index in to coefficient arrays for this band's wavelength. - - This function search through the `COEFF_INDEX_MAP` dictionary and - finds the first key where the nominal wavelength of `wavelength_range` - falls between the minimum wavelength and maximum wavelength of the key. - `wavelength_range` can also be the standard name of the band. For - example, "M05" for VIIRS or "1" for MODIS. - - :param sensor: sensor of band to be corrected - :param wavelength_range: 3-element tuple of (min wavelength, nominal wavelength, max wavelength) - :param resolution: resolution of the band to be corrected - :return: index in to coefficient arrays like `aH2O`, `aO3`, etc. - None is returned if no matching wavelength is found - """ - index_map = COEFF_INDEX_MAP[sensor.lower()] - # Find the best resolution of coefficients - for res in sorted(index_map.keys()): - if resolution <= res: - index_map = index_map[res] - break - else: - raise ValueError("Unrecognized data resolution: {}", resolution) - # Find the best wavelength of coefficients - if isinstance(wavelength_range, str): - # wavelength range is actually a band name - return index_map[wavelength_range] - for k, v in index_map.items(): - if isinstance(k, str): - # we are analyzing wavelengths and ignoring dataset names - continue - if k[0] <= wavelength_range[1] <= k[2]: - return v - - -def get_coefficients(sensor, wavelength_range, resolution=0): - """Get coefficients used in CREFL correction. - - Args: - sensor: sensor of the band to be corrected - wavelength_range: 3-element tuple of (min wavelength, nominal wavelength, max wavelength) - resolution: resolution of the band to be corrected - - Returns: - aH2O, bH2O, aO3, taur0 coefficient values - - """ - idx = find_coefficient_index(sensor, - wavelength_range, - resolution=resolution) - return aH2O[idx], bH2O[idx], aO3[idx], taur0[idx] - - def _chand(phi, muv, mus, taur): # FROM FUNCTION CHAND # phi: azimuthal difference between sun and observation in degree @@ -283,9 +586,9 @@ def _chand(phi, muv, mus, taur): # pl[4] = mus * mus * muv * muv fs01 = as0[0] + (mus + muv) * as0[1] + (mus * muv) * as0[2] + ( - mus * mus + muv * muv) * as0[3] + (mus * mus * muv * muv) * as0[4] + mus * mus + muv * muv) * as0[3] + (mus * mus * muv * muv) * as0[4] fs02 = as0[5] + (mus + muv) * as0[6] + (mus * muv) * as0[7] + ( - mus * mus + muv * muv) * as0[8] + (mus * mus * muv * muv) * as0[9] + mus * mus + muv * muv) * as0[8] + (mus * mus * muv * muv) * as0[9] # for (i = 0; i < 5; i++) { # fs01 += (double) (pl[i] * as0[i]); # fs02 += (double) (pl[i] * as0[5 + i]); @@ -293,7 +596,7 @@ def _chand(phi, muv, mus, taur): # for refl, (ah2o, bh2o, ao3, tau) in zip(reflectance_bands, coefficients): - # ib = find_coefficient_index(center_wl) + # ib = _find_coefficient_index(center_wl) # if ib is None: # raise ValueError("Can't handle band with wavelength '{}'".format(center_wl)) @@ -322,154 +625,3 @@ def _chand(phi, muv, mus, taur): rhoray = xitot1 * xcos1 + xitot2 * xcos2 * 2.0 + xitot3 * xcos3 * 2.0 return rhoray, trdown, trup - - -def _atm_variables_finder(mus, muv, phi, height, tau, tO3, tH2O, taustep4sphalb, tO2=1.0): - tau_step = np.linspace(taustep4sphalb, MAXNUMSPHALBVALUES * taustep4sphalb, MAXNUMSPHALBVALUES) - sphalb0 = _csalbr(tau_step) - taur = tau * np.exp(-height / SCALEHEIGHT) - rhoray, trdown, trup = _chand(phi, muv, mus, taur) - sphalb = sphalb0[(taur / taustep4sphalb + 0.5).astype(np.int32)] - Ttotrayu = ((2 / 3. + muv) + (2 / 3. - muv) * trup) / (4 / 3. + taur) - Ttotrayd = ((2 / 3. + mus) + (2 / 3. - mus) * trdown) / (4 / 3. + taur) - TtotraytH2O = Ttotrayu * Ttotrayd * tH2O - tOG = tO3 * tO2 - return sphalb, rhoray, TtotraytH2O, tOG - - -def get_atm_variables(mus, muv, phi, height, ah2o, bh2o, ao3, tau): - """Get atmospheric variables for non-ABI instruments.""" - air_mass = 1.0 / mus + 1 / muv - air_mass[air_mass > MAXAIRMASS] = -1.0 - tO3 = 1.0 - tH2O = 1.0 - if ao3 != 0: - tO3 = np.exp(-air_mass * UO3 * ao3) - if bh2o != 0: - if bUseV171: - tH2O = np.exp(-np.exp(ah2o + bh2o * np.log(air_mass * UH2O))) - else: - tH2O = np.exp(-(ah2o * ((air_mass * UH2O) ** bh2o))) - # Returns sphalb, rhoray, TtotraytH2O, tOG - return _atm_variables_finder(mus, muv, phi, height, tau, tO3, tH2O, TAUSTEP4SPHALB) - - -def get_atm_variables_abi(mus, muv, phi, height, G_O3, G_H2O, G_O2, ah2o, ao2, ao3, tau): - """Get atmospheric variables for ABI.""" - tO3 = 1.0 - tH2O = 1.0 - if ao3 != 0: - tO3 = np.exp(-G_O3 * ao3) - if ah2o != 0: - tH2O = np.exp(-G_H2O * ah2o) - tO2 = np.exp(-G_O2 * ao2) - # Returns sphalb, rhoray, TtotraytH2O, tOG. - return _atm_variables_finder(mus, muv, phi, height, tau, tO3, tH2O, TAUSTEP4SPHALB_ABI, tO2=tO2) - - -def _G_calc(zenith, a_coeff): - return (np.cos(np.deg2rad(zenith))+(a_coeff[0]*(zenith**a_coeff[1])*(a_coeff[2]-zenith)**a_coeff[3]))**-1 - - -def _avg_elevation_index(avg_elevation, row, col): - return avg_elevation[row, col] - - -def run_crefl(refl, coeffs, - lon, - lat, - sensor_azimuth, - sensor_zenith, - solar_azimuth, - solar_zenith, - avg_elevation=None, - percent=False, - use_abi=False): - """Run main crefl algorithm. - - All input parameters are per-pixel values meaning they are the same size - and shape as the input reflectance data, unless otherwise stated. - - :param reflectance_bands: tuple of reflectance band arrays - :param coefficients: tuple of coefficients for each band (see `get_coefficients`) - :param lon: input swath longitude array - :param lat: input swath latitude array - :param sensor_azimuth: input swath sensor azimuth angle array - :param sensor_zenith: input swath sensor zenith angle array - :param solar_azimuth: input swath solar azimuth angle array - :param solar_zenith: input swath solar zenith angle array - :param avg_elevation: average elevation (usually pre-calculated and stored in CMGDEM.hdf) - :param percent: True if input reflectances are on a 0-100 scale instead of 0-1 scale (default: False) - - """ - # FUTURE: Find a way to compute the average elevation before hand - # Get digital elevation map data for our granule, set ocean fill value to 0 - if avg_elevation is None: - LOG.debug("No average elevation information provided in CREFL") - # height = np.zeros(lon.shape, dtype=np.float64) - height = 0. - else: - LOG.debug("Using average elevation information provided to CREFL") - height = da.map_blocks(_space_mask_height, lon, lat, avg_elevation, - chunks=lon.chunks, dtype=avg_elevation.dtype) - mus = np.cos(np.deg2rad(solar_zenith)) - mus = mus.where(mus >= 0) - muv = np.cos(np.deg2rad(sensor_zenith)) - phi = solar_azimuth - sensor_azimuth - - if use_abi: - LOG.debug("Using ABI CREFL algorithm") - corr_refl = da.map_blocks(_run_crefl_abi, refl.data, mus.data, muv.data, phi.data, - solar_zenith, sensor_zenith, height, *coeffs, percent=percent) - else: - LOG.debug("Using original VIIRS CREFL algorithm") - corr_refl = da.map_blocks(_run_crefl, refl.data, mus.data, muv.data, phi.data, - height, *coeffs, chunks=refl.chunks, dtype=refl.dtype, - percent=percent) - return xr.DataArray(corr_refl, dims=refl.dims, coords=refl.coords, attrs=refl.attrs) - - -def _space_mask_height(lon, lat, avg_elevation): - lat[(lat <= -90) | (lat >= 90)] = np.nan - lon[(lon <= -180) | (lon >= 180)] = np.nan - row = ((90.0 - lat) * avg_elevation.shape[0] / 180.0).astype(np.int32) - col = ((lon + 180.0) * avg_elevation.shape[1] / 360.0).astype(np.int32) - space_mask = np.isnan(lon) | np.isnan(lat) - row[space_mask] = 0 - col[space_mask] = 0 - - height = avg_elevation[row, col] - # negative heights aren't allowed, clip to 0 - height[(height < 0.0) | np.isnan(height) | space_mask] = 0.0 - return height - - -def _run_crefl(refl, mus, muv, phi, height, *coeffs, percent=True, computing_meta=False): - if computing_meta: - return refl - sphalb, rhoray, TtotraytH2O, tOG = get_atm_variables(mus, muv, phi, height, *coeffs) - return _correct_refl(refl, tOG, rhoray, TtotraytH2O, sphalb, percent) - - -def _run_crefl_abi(refl, mus, muv, phi, solar_zenith, sensor_zenith, height, - *coeffs, percent=True, computing_meta=False): - if computing_meta: - return refl - a_O3 = [268.45, 0.5, 115.42, -3.2922] - a_H2O = [0.0311, 0.1, 92.471, -1.3814] - a_O2 = [0.4567, 0.007, 96.4884, -1.6970] - G_O3 = _G_calc(solar_zenith, a_O3) + _G_calc(sensor_zenith, a_O3) - G_H2O = _G_calc(solar_zenith, a_H2O) + _G_calc(sensor_zenith, a_H2O) - G_O2 = _G_calc(solar_zenith, a_O2) + _G_calc(sensor_zenith, a_O2) - # Note: bh2o values are actually ao2 values for abi - sphalb, rhoray, TtotraytH2O, tOG = get_atm_variables_abi(mus, muv, phi, height, G_O3, G_H2O, G_O2, *coeffs) - return _correct_refl(refl, tOG, rhoray, TtotraytH2O, sphalb, percent) - - -def _correct_refl(refl, tOG, rhoray, TtotraytH2O, sphalb, percent): - if percent: - corr_refl = ((refl / 100.) / tOG - rhoray) / TtotraytH2O - else: - corr_refl = (refl / tOG - rhoray) / TtotraytH2O - corr_refl /= (1.0 + corr_refl * sphalb) - return corr_refl.clip(REFLMIN, REFLMAX) diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py index 22627c7bf5..34b2f9cf2d 100644 --- a/satpy/modifiers/angles.py +++ b/satpy/modifiers/angles.py @@ -21,29 +21,33 @@ import hashlib import os import shutil +import warnings from datetime import datetime from functools import update_wrapper from glob import glob -from typing import Any, Callable, Optional, Union, cast +from typing import Any, Callable, Optional, Union import dask -import dask.array as da import numpy as np import xarray as xr -from pyorbital.astronomy import get_alt_az, sun_zenith_angle +from dask import array as da +from pyorbital.astronomy import cos_zen as pyob_cos_zen +from pyorbital.astronomy import get_alt_az from pyorbital.orbital import get_observer_look -from pyresample.geometry import AreaDefinition, SwathDefinition +from pyresample.geometry import AreaDefinition, StackedAreaDefinition, SwathDefinition import satpy -from satpy.utils import get_satpos, ignore_invalid_float_warnings +from satpy.utils import PerformanceWarning, get_satpos, ignore_invalid_float_warnings -PRGeometry = Union[SwathDefinition, AreaDefinition] +PRGeometry = Union[SwathDefinition, AreaDefinition, StackedAreaDefinition] # Arbitrary time used when computing sensor angles that is passed to # pyorbital's get_observer_look function. # The difference is on the order of 1e-10 at most as time changes so we force # it to a single time for easier caching. It is *only* used if caching. STATIC_EARTH_INERTIAL_DATETIME = datetime(2000, 1, 1, 12, 0, 0) +DEFAULT_UNCACHE_TYPES = (SwathDefinition, xr.DataArray, da.Array) +HASHABLE_GEOMETRIES = (AreaDefinition, StackedAreaDefinition) class ZarrCacheHelper: @@ -59,6 +63,13 @@ class ZarrCacheHelper: The cache value to use is purely based on the hash value of all of the provided arguments along with the "cache version" (see below). + Note that the zarr format requires regular chunking of data. That is, + chunks must be all the same size per dimension except for the last chunk. + To work around this limitation, this class will determine a good regular + chunking based on the existing chunking scheme, rechunk the input + arguments, and then rechunk the results before returning them to the user. + This rechunking is only done if caching is enabled. + Args: func: Function that will be called to generate the value to cache. cache_config_key: Name of the boolean ``satpy.config`` parameter to @@ -99,7 +110,7 @@ def generate_my_stuff(area_def: AreaDefinition, some_factor: int) -> da.Array: def __init__(self, func: Callable, cache_config_key: str, - uncacheable_arg_types=(SwathDefinition, xr.DataArray, da.Array), + uncacheable_arg_types=DEFAULT_UNCACHE_TYPES, sanitize_args_func: Callable = None, cache_version: int = 1, ): @@ -115,16 +126,10 @@ def cache_clear(self, cache_dir: Optional[str] = None): Intended to mimic the :func:`functools.cache` behavior. """ - if cache_dir is None: - cache_dir = satpy.config.get("cache_dir") - if cache_dir is None: - raise RuntimeError("No 'cache_dir' configured.") + cache_dir = self._get_cache_dir_from_config(cache_dir) zarr_pattern = self._zarr_pattern("*", cache_version="*").format("*") for zarr_dir in glob(os.path.join(cache_dir, zarr_pattern)): - try: - shutil.rmtree(zarr_dir) - except OSError: - continue + shutil.rmtree(zarr_dir, ignore_errors=True) def _zarr_pattern(self, arg_hash, cache_version: Union[int, str] = None) -> str: if cache_version is None: @@ -134,37 +139,54 @@ def _zarr_pattern(self, arg_hash, cache_version: Union[int, str] = None) -> str: def __call__(self, *args, cache_dir: Optional[str] = None) -> Any: """Call the decorated function.""" new_args = self._sanitize_args_func(*args) if self._sanitize_args_func is not None else args - arg_hash = _hash_args(*new_args) + arg_hash = _hash_args(*new_args, unhashable_types=self._uncacheable_arg_types) should_cache, cache_dir = self._get_should_cache_and_cache_dir(new_args, cache_dir) zarr_fn = self._zarr_pattern(arg_hash) zarr_format = os.path.join(cache_dir, zarr_fn) zarr_paths = glob(zarr_format.format("*")) if not should_cache or not zarr_paths: # use sanitized arguments if we are caching, otherwise use original arguments - args = new_args if should_cache else args - res = self._func(*args) + args_to_use = new_args if should_cache else args + res = self._func(*args_to_use) if should_cache and not zarr_paths: + self._warn_if_irregular_input_chunks(args, args_to_use) self._cache_results(res, zarr_format) # if we did any caching, let's load from the zarr files if should_cache: # re-calculate the cached paths - zarr_paths = glob(zarr_format.format("*")) + zarr_paths = sorted(glob(zarr_format.format("*"))) if not zarr_paths: raise RuntimeError("Data was cached to disk but no files were found") - res = tuple(da.from_zarr(zarr_path) for zarr_path in zarr_paths) + new_chunks = _get_output_chunks_from_func_arguments(args) + res = tuple(da.from_zarr(zarr_path, chunks=new_chunks) for zarr_path in zarr_paths) return res def _get_should_cache_and_cache_dir(self, args, cache_dir: Optional[str]) -> tuple[bool, str]: should_cache: bool = satpy.config.get(self._cache_config_key, False) can_cache = not any(isinstance(arg, self._uncacheable_arg_types) for arg in args) should_cache = should_cache and can_cache - if cache_dir is None: - cache_dir = satpy.config.get("cache_dir") - if cache_dir is None: - should_cache = False - cache_dir = cast(str, cache_dir) + cache_dir = self._get_cache_dir_from_config(cache_dir) return should_cache, cache_dir + @staticmethod + def _get_cache_dir_from_config(cache_dir: Optional[str]) -> str: + cache_dir = cache_dir or satpy.config.get("cache_dir") + if cache_dir is None: + raise RuntimeError("Can't use zarr caching. No 'cache_dir' configured.") + return cache_dir + + @staticmethod + def _warn_if_irregular_input_chunks(args, modified_args): + arg_chunks = _get_output_chunks_from_func_arguments(args) + new_chunks = _get_output_chunks_from_func_arguments(modified_args) + if _chunks_are_irregular(arg_chunks): + warnings.warn( + "Calling cached function with irregular dask chunks. The data " + "has been rechunked for caching, but this is not optimal for " + "future calculations. " + f"Original chunks: {arg_chunks}; New chunks: {new_chunks}", + PerformanceWarning) + def _cache_results(self, res, zarr_format): os.makedirs(os.path.dirname(zarr_format), exist_ok=True) new_res = [] @@ -175,17 +197,31 @@ def _cache_results(self, res, zarr_format): zarr_path = zarr_format.format(idx) # See https://github.com/dask/dask/issues/8380 with dask.config.set({"optimization.fuse.active": False}): - new_sub_res = sub_res.to_zarr(zarr_path, - return_stored=True, - compute=False) + new_sub_res = sub_res.to_zarr(zarr_path, compute=False) new_res.append(new_sub_res) # actually compute the storage to zarr da.compute(new_res) +def _get_output_chunks_from_func_arguments(args): + """Determine what the desired output chunks are. + + It is assumed a tuple of tuples of integers is defining chunk sizes. If + a tuple like this is not found then arguments are checked for array-like + objects with a ``.chunks`` attribute. + + """ + chunked_args = [arg for arg in args if hasattr(arg, "chunks")] + tuple_args = [arg for arg in args if _is_chunk_tuple(arg)] + if not tuple_args and not chunked_args: + raise RuntimeError("Cannot determine desired output chunksize for cached function.") + new_chunks = tuple_args[-1] if tuple_args else chunked_args[0].chunks + return new_chunks + + def cache_to_zarr_if( cache_config_key: str, - uncacheable_arg_types=(SwathDefinition, xr.DataArray, da.Array), + uncacheable_arg_types=DEFAULT_UNCACHE_TYPES, sanitize_args_func: Callable = None, ) -> Callable: """Decorate a function and cache the results as a zarr array on disk. @@ -207,18 +243,18 @@ def _decorator(func: Callable) -> Callable: return _decorator -def _hash_args(*args): +def _hash_args(*args, unhashable_types=DEFAULT_UNCACHE_TYPES): import json hashable_args = [] for arg in args: - if isinstance(arg, (xr.DataArray, da.Array, SwathDefinition)): + if isinstance(arg, unhashable_types): continue - if isinstance(arg, AreaDefinition): + if isinstance(arg, HASHABLE_GEOMETRIES): arg = hash(arg) elif isinstance(arg, datetime): arg = arg.isoformat(" ") hashable_args.append(arg) - arg_hash = hashlib.sha1() + arg_hash = hashlib.sha1() # nosec arg_hash.update(json.dumps(tuple(hashable_args)).encode('utf8')) return arg_hash.hexdigest() @@ -231,11 +267,52 @@ def _sanitize_observer_look_args(*args): elif isinstance(arg, (float, np.float64, np.float32)): # round floating point numbers to nearest tenth new_args.append(round(arg, 1)) + elif _is_chunk_tuple(arg) and _chunks_are_irregular(arg): + new_chunks = _regular_chunks_from_irregular_chunks(arg) + new_args.append(new_chunks) else: new_args.append(arg) return new_args +def _sanitize_args_with_chunks(*args): + new_args = [] + for arg in args: + if _is_chunk_tuple(arg) and _chunks_are_irregular(arg): + new_chunks = _regular_chunks_from_irregular_chunks(arg) + new_args.append(new_chunks) + else: + new_args.append(arg) + return new_args + + +def _is_chunk_tuple(some_obj: Any) -> bool: + if not isinstance(some_obj, tuple): + return False + if not all(isinstance(sub_obj, tuple) for sub_obj in some_obj): + return False + sub_elements = [sub_obj_elem for sub_obj in some_obj for sub_obj_elem in sub_obj] + return all(isinstance(sub_obj_elem, int) for sub_obj_elem in sub_elements) + + +def _regular_chunks_from_irregular_chunks( + old_chunks: tuple[tuple[int, ...], ...] +) -> tuple[tuple[int, ...], ...]: + shape = tuple(sum(dim_chunks) for dim_chunks in old_chunks) + new_dim_chunks = tuple(max(dim_chunks) for dim_chunks in old_chunks) + return da.core.normalize_chunks(new_dim_chunks, shape=shape) + + +def _chunks_are_irregular(chunks_tuple: tuple) -> bool: + """Determine if an array is irregularly chunked. + + Zarr does not support saving data in irregular chunks. Regular chunking + is when all chunks are the same size (except for the last one). + + """ + return any(len(set(chunks[:-1])) > 1 for chunks in chunks_tuple) + + def _geo_dask_to_data_array(arr: da.Array) -> xr.DataArray: return xr.DataArray(arr, dims=('y', 'x')) @@ -250,13 +327,17 @@ def get_angles(data_arr: xr.DataArray) -> tuple[xr.DataArray, xr.DataArray, xr.D Args: data_arr: DataArray to get angles for. Information extracted from this - object are ``.attrs["area"]`` and ``.attrs["start_time"]``. + object are ``.attrs["area"]``,``.attrs["start_time"]``, and + ``.attrs["orbital_parameters"]``. See :func:`satpy.utils.get_satpos` + and :ref:`dataset_metadata` for more information. Additionally, the dask array chunk size is used when generating new arrays. The actual data of the object is not used. Returns: Four DataArrays representing sensor azimuth angle, sensor zenith angle, - solar azimuth angle, and solar zenith angle. + solar azimuth angle, and solar zenith angle. All values are in degrees. + Sensor angles are provided in the [0, 360] degree range. + Solar angles are provided in the [-180, 180] degree range. """ sata, satz = _get_sensor_angles(data_arr) @@ -270,15 +351,28 @@ def get_satellite_zenith_angle(data_arr: xr.DataArray) -> xr.DataArray: Note that this function can benefit from the ``satpy.config`` parameters :ref:`cache_lonlats ` and :ref:`cache_sensor_angles ` - being set to ``True``. + being set to ``True``. Values are in degrees. """ satz = _get_sensor_angles(data_arr)[1] return satz -@cache_to_zarr_if("cache_lonlats") -def _get_valid_lonlats(area: PRGeometry, chunks: Union[int, str] = "auto") -> tuple[da.Array, da.Array]: +def get_cos_sza(data_arr: xr.DataArray) -> xr.DataArray: + """Generate the cosine of the solar zenith angle for the provided data. + + Returns: + DataArray with the same shape as ``data_arr``. + + """ + chunks = _geo_chunks_from_data_arr(data_arr) + lons, lats = _get_valid_lonlats(data_arr.attrs["area"], chunks) + cos_sza = _get_cos_sza(data_arr.attrs["start_time"], lons, lats) + return _geo_dask_to_data_array(cos_sza) + + +@cache_to_zarr_if("cache_lonlats", sanitize_args_func=_sanitize_args_with_chunks) +def _get_valid_lonlats(area: PRGeometry, chunks: Union[int, str, tuple] = "auto") -> tuple[da.Array, da.Array]: with ignore_invalid_float_warnings(): lons, lats = area.get_lonlats(chunks=chunks) lons = da.where(lons >= 1e30, np.nan, lons) @@ -287,45 +381,77 @@ def _get_valid_lonlats(area: PRGeometry, chunks: Union[int, str] = "auto") -> tu def _get_sun_angles(data_arr: xr.DataArray) -> tuple[xr.DataArray, xr.DataArray]: - lons, lats = _get_valid_lonlats(data_arr.attrs["area"], data_arr.data.chunks) - res = da.map_blocks(_get_sun_angles_wrapper, lons, lats, - data_arr.attrs["start_time"], - dtype=lons.dtype, meta=np.array((), dtype=lons.dtype), - new_axis=[0], chunks=(2,) + lons.chunks) - suna = _geo_dask_to_data_array(res[0]) - sunz = _geo_dask_to_data_array(res[1]) + chunks = _geo_chunks_from_data_arr(data_arr) + lons, lats = _get_valid_lonlats(data_arr.attrs["area"], chunks) + suna = da.map_blocks(_get_sun_azimuth_ndarray, lons, lats, + data_arr.attrs["start_time"], + dtype=lons.dtype, meta=np.array((), dtype=lons.dtype), + chunks=lons.chunks) + cos_sza = _get_cos_sza(data_arr.attrs["start_time"], lons, lats) + sunz = np.rad2deg(np.arccos(cos_sza)) + suna = _geo_dask_to_data_array(suna) + sunz = _geo_dask_to_data_array(sunz) return suna, sunz -def _get_sun_angles_wrapper(lons: da.Array, lats: da.Array, start_time: datetime) -> tuple[da.Array, da.Array]: +def _get_cos_sza(utc_time, lons, lats): + cos_sza = da.map_blocks(_cos_zen_ndarray, + lons, lats, utc_time, + meta=np.array((), dtype=lons.dtype), + dtype=lons.dtype, + chunks=lons.chunks) + return cos_sza + + +def _cos_zen_ndarray(lons, lats, utc_time): + with ignore_invalid_float_warnings(): + return pyob_cos_zen(utc_time, lons, lats) + + +def _get_sun_azimuth_ndarray(lons: np.ndarray, lats: np.ndarray, start_time: datetime) -> np.ndarray: with ignore_invalid_float_warnings(): suna = get_alt_az(start_time, lons, lats)[1] suna = np.rad2deg(suna) - sunz = sun_zenith_angle(start_time, lons, lats) - return np.stack([suna, sunz]) + return suna def _get_sensor_angles(data_arr: xr.DataArray) -> tuple[xr.DataArray, xr.DataArray]: - sat_lon, sat_lat, sat_alt = get_satpos(data_arr) + preference = satpy.config.get('sensor_angles_position_preference', 'actual') + sat_lon, sat_lat, sat_alt = get_satpos(data_arr, preference=preference) area_def = data_arr.attrs["area"] + chunks = _geo_chunks_from_data_arr(data_arr) sata, satz = _get_sensor_angles_from_sat_pos(sat_lon, sat_lat, sat_alt, data_arr.attrs["start_time"], - area_def, data_arr.data.chunks) + area_def, chunks) sata = _geo_dask_to_data_array(sata) satz = _geo_dask_to_data_array(satz) return sata, satz +def _geo_chunks_from_data_arr(data_arr: xr.DataArray) -> tuple: + x_dim_index = _dim_index_with_default(data_arr.dims, "x", -1) + y_dim_index = _dim_index_with_default(data_arr.dims, "y", -2) + chunks = (data_arr.chunks[y_dim_index], data_arr.chunks[x_dim_index]) + return chunks + + +def _dim_index_with_default(dims: tuple, dim_name: str, default: int) -> int: + try: + return dims.index(dim_name) + except ValueError: + return default + + @cache_to_zarr_if("cache_sensor_angles", sanitize_args_func=_sanitize_observer_look_args) def _get_sensor_angles_from_sat_pos(sat_lon, sat_lat, sat_alt, start_time, area_def, chunks): lons, lats = _get_valid_lonlats(area_def, chunks) - res = da.map_blocks(_get_sensor_angles_wrapper, lons, lats, start_time, sat_lon, sat_lat, sat_alt, + res = da.map_blocks(_get_sensor_angles_ndarray, lons, lats, start_time, sat_lon, sat_lat, sat_alt, dtype=lons.dtype, meta=np.array((), dtype=lons.dtype), new_axis=[0], chunks=(2,) + lons.chunks) return res[0], res[1] -def _get_sensor_angles_wrapper(lons, lats, start_time, sat_lon, sat_lat, sat_alt): +def _get_sensor_angles_ndarray(lons, lats, start_time, sat_lon, sat_lat, sat_alt) -> np.ndarray: with ignore_invalid_float_warnings(): sata, satel = get_observer_look( sat_lon, @@ -335,3 +461,51 @@ def _get_sensor_angles_wrapper(lons, lats, start_time, sat_lon, sat_lat, sat_alt lons, lats, 0) satz = 90 - satel return np.stack([sata, satz]) + + +def sunzen_corr_cos(data: da.Array, + cos_zen: da.Array, + limit: float = 88., + max_sza: Optional[float] = 95.) -> da.Array: + """Perform Sun zenith angle correction. + + The correction is based on the provided cosine of the zenith + angle (``cos_zen``). The correction is limited + to ``limit`` degrees (default: 88.0 degrees). For larger zenith + angles, the correction is the same as at the ``limit`` if ``max_sza`` + is `None`. The default behavior is to gradually reduce the correction + past ``limit`` degrees up to ``max_sza`` where the correction becomes + 0. Both ``data`` and ``cos_zen`` should be 2D arrays of the same shape. + + """ + return da.map_blocks(_sunzen_corr_cos_ndarray, + data, cos_zen, limit, max_sza, + meta=np.array((), dtype=data.dtype), + chunks=data.chunks) + + +def _sunzen_corr_cos_ndarray(data: np.ndarray, + cos_zen: np.ndarray, + limit: float, + max_sza: Optional[float]) -> np.ndarray: + # Convert the zenith angle limit to cosine of zenith angle + limit_rad = np.deg2rad(limit) + limit_cos = np.cos(limit_rad) + max_sza_rad = np.deg2rad(max_sza) if max_sza is not None else max_sza + + # Cosine correction + corr = 1. / cos_zen + if max_sza is not None: + # gradually fall off for larger zenith angle + grad_factor = (np.arccos(cos_zen) - limit_rad) / (max_sza_rad - limit_rad) + # invert the factor so maximum correction is done at `limit` and falls off later + grad_factor = 1. - np.log(grad_factor + 1) / np.log(2) + # make sure we don't make anything negative + grad_factor = grad_factor.clip(0.) + else: + # Use constant value (the limit) for larger zenith angles + grad_factor = 1. + corr = np.where(cos_zen > limit_cos, corr, grad_factor / limit_cos) + # Force "night" pixels to 0 (where SZA is invalid) + corr[np.isnan(cos_zen)] = 0 + return data * corr diff --git a/satpy/modifiers/atmosphere.py b/satpy/modifiers/atmosphere.py index 431b303f40..30d16cb715 100644 --- a/satpy/modifiers/atmosphere.py +++ b/satpy/modifiers/atmosphere.py @@ -18,7 +18,6 @@ """Modifiers related to atmospheric corrections or adjustments.""" import logging -from weakref import WeakValueDictionary import dask.array as da import numpy as np @@ -34,8 +33,6 @@ class PSPRayleighReflectance(ModifierBase): """Pyspectral-based rayleigh corrector for visible channels.""" - _rayleigh_cache: "WeakValueDictionary[tuple, object]" = WeakValueDictionary() - def __call__(self, projectables, optional_datasets=None, **info): """Get the corrected reflectance when removing Rayleigh scattering. @@ -45,11 +42,9 @@ def __call__(self, projectables, optional_datasets=None, **info): if not optional_datasets or len(optional_datasets) != 4: vis, red = self.match_data_arrays(projectables) sata, satz, suna, sunz = get_angles(vis) - red.data = da.rechunk(red.data, vis.data.chunks) else: vis, red, sata, satz, suna, sunz = self.match_data_arrays( projectables + optional_datasets) - sata, satz, suna, sunz = optional_datasets # get the dask array underneath sata = sata.data @@ -66,18 +61,12 @@ def __call__(self, projectables, optional_datasets=None, **info): atmosphere = self.attrs.get('atmosphere', 'us-standard') aerosol_type = self.attrs.get('aerosol_type', 'marine_clean_aerosol') - rayleigh_key = (vis.attrs['platform_name'], - vis.attrs['sensor'], atmosphere, aerosol_type) logger.info("Removing Rayleigh scattering with atmosphere '%s' and " "aerosol type '%s' for '%s'", atmosphere, aerosol_type, vis.attrs['name']) - if rayleigh_key not in self._rayleigh_cache: - corrector = Rayleigh(vis.attrs['platform_name'], vis.attrs['sensor'], - atmosphere=atmosphere, - aerosol_type=aerosol_type) - self._rayleigh_cache[rayleigh_key] = corrector - else: - corrector = self._rayleigh_cache[rayleigh_key] + corrector = Rayleigh(vis.attrs['platform_name'], vis.attrs['sensor'], + atmosphere=atmosphere, + aerosol_type=aerosol_type) try: refl_cor_band = corrector.get_reflectance(sunz, satz, ssadiff, diff --git a/satpy/modifiers/geometry.py b/satpy/modifiers/geometry.py index 0913fb42d1..ecd83f80e5 100644 --- a/satpy/modifiers/geometry.py +++ b/satpy/modifiers/geometry.py @@ -17,17 +17,15 @@ # satpy. If not, see . """Modifier classes for corrections based on sun and other angles.""" +from __future__ import annotations + import logging -import time -from datetime import datetime -from typing import Optional -from weakref import WeakValueDictionary import numpy as np -import xarray as xr from satpy.modifiers import ModifierBase -from satpy.utils import atmospheric_path_length_correction, sunzen_corr_cos +from satpy.modifiers.angles import sunzen_corr_cos +from satpy.utils import atmospheric_path_length_correction logger = logging.getLogger(__name__) @@ -35,8 +33,6 @@ class SunZenithCorrectorBase(ModifierBase): """Base class for sun zenith correction modifiers.""" - coszen_cache: "WeakValueDictionary[tuple[datetime, str], Optional[xr.DataArray]]" = WeakValueDictionary() - def __init__(self, max_sza=95.0, **kwargs): """Collect custom configuration values. @@ -54,38 +50,24 @@ def __call__(self, projectables, **info): projectables = self.match_data_arrays(list(projectables) + list(info.get('optional_datasets', []))) vis = projectables[0] if vis.attrs.get("sunz_corrected"): - logger.debug("Sun zen correction already applied") + logger.debug("Sun zenith correction already applied") return vis - area_name = hash(vis.attrs['area']) - key = (vis.attrs["start_time"], area_name) - tic = time.time() logger.debug("Applying sun zen correction") - coszen = self.coszen_cache.get(key) - if coszen is None and not info.get('optional_datasets'): - # we were not given SZA, generate SZA then calculate cos(SZA) - from pyorbital.astronomy import cos_zen + if not info.get('optional_datasets'): + # we were not given SZA, generate cos(SZA) logger.debug("Computing sun zenith angles.") - lons, lats = vis.attrs["area"].get_lonlats(chunks=vis.data.chunks) - - coords = {} - if 'y' in vis.coords and 'x' in vis.coords: - coords['y'] = vis['y'] - coords['x'] = vis['x'] - coszen = xr.DataArray(cos_zen(vis.attrs["start_time"], lons, lats), - dims=['y', 'x'], coords=coords) + from .angles import get_cos_sza + coszen = get_cos_sza(vis) if self.max_sza is not None: coszen = coszen.where(coszen >= self.max_sza_cos) - self.coszen_cache[key] = coszen - elif coszen is None: + else: # we were given the SZA, calculate the cos(SZA) coszen = np.cos(np.deg2rad(projectables[1])) - self.coszen_cache[key] = coszen proj = self._apply_correction(vis, coszen) proj.attrs = vis.attrs.copy() self.apply_modifier_info(vis, proj) - logger.debug("Sun-zenith correction applied. Computation time: %5.1f (sec)", time.time() - tic) return proj def _apply_correction(self, proj, coszen): @@ -107,7 +89,7 @@ class SunZenithCorrector(SunZenithCorrectorBase): .. code-block:: yaml sunz_corrected: - compositor: !!python/name:satpy.composites.SunZenithCorrector + modifier: !!python/name:satpy.modifiers.SunZenithCorrector max_sza: !!null optional_prerequisites: - solar_zenith_angle @@ -130,7 +112,9 @@ def __init__(self, correction_limit=88., **kwargs): def _apply_correction(self, proj, coszen): logger.debug("Apply the standard sun-zenith correction [1/cos(sunz)]") - return sunzen_corr_cos(proj, coszen, limit=self.correction_limit, max_sza=self.max_sza) + res = proj.copy() + res.data = sunzen_corr_cos(proj.data, coszen.data, limit=self.correction_limit, max_sza=self.max_sza) + return res class EffectiveSolarPathLengthCorrector(SunZenithCorrectorBase): @@ -151,7 +135,7 @@ class EffectiveSolarPathLengthCorrector(SunZenithCorrectorBase): .. code-block:: yaml effective_solar_pathlength_corrected: - compositor: !!python/name:satpy.composites.EffectiveSolarPathLengthCorrector + modifier: !!python/name:satpy.modifiers.EffectiveSolarPathLengthCorrector max_sza: !!null optional_prerequisites: - solar_zenith_angle diff --git a/satpy/multiscene.py b/satpy/multiscene.py index 75a50161b3..752b945834 100644 --- a/satpy/multiscene.py +++ b/satpy/multiscene.py @@ -69,29 +69,72 @@ def timeseries(datasets): return res -def add_group_aliases(scenes, groups): - """Add aliases for the groups datasets belong to.""" +def group_datasets_in_scenes(scenes, groups): + """Group different datasets in multiple scenes by adding aliases. + + Args: + scenes (iterable): Scenes to be processed. + groups (dict): Groups of datasets that shall be treated equally by + MultiScene. Keys specify the groups, values specify the dataset + names to be grouped. For example:: + + from satpy import DataQuery + groups = {DataQuery(name='odd'): ['ds1', 'ds3'], + DataQuery(name='even'): ['ds2', 'ds4']} + """ for scene in scenes: - scene = scene.copy() - for group_id, member_names in groups.items(): - # Find out whether one of the datasets in this scene belongs - # to this group - member_ids = [scene[name].attrs['_satpy_id'] - for name in member_names if name in scene] - - # Add an alias for the group it belongs to - if len(member_ids) == 1: - member_id = member_ids[0] - new_ds = scene[member_id].copy() - new_ds.attrs.update(group_id.to_dict()) - scene[group_id] = new_ds - elif len(member_ids) > 1: - raise ValueError('Cannot add multiple datasets from the same ' - 'scene to a group') - else: - # Datasets in this scene don't belong to any group - pass - yield scene + grp = GroupAliasGenerator(scene, groups) + yield grp.duplicate_datasets_with_group_alias() + + +class GroupAliasGenerator: + """Add group aliases to a scene.""" + + def __init__(self, scene, groups): + """Initialize the alias generator.""" + self.scene = scene.copy() + self.groups = groups + + def duplicate_datasets_with_group_alias(self): + """Duplicate datasets to be grouped with a group alias.""" + for group_id, group_members in self.groups.items(): + self._duplicate_dataset_with_group_alias(group_id, group_members) + return self.scene + + def _duplicate_dataset_with_group_alias(self, group_id, group_members): + member_ids = self._get_dataset_id_of_group_members_in_scene(group_members) + if len(member_ids) == 1: + self._duplicate_dataset_with_different_id( + dataset_id=member_ids[0], + alias_id=group_id, + ) + elif len(member_ids) > 1: + raise ValueError('Cannot add multiple datasets from a scene ' + 'to the same group') + + def _get_dataset_id_of_group_members_in_scene(self, group_members): + return [ + self.scene[member].attrs['_satpy_id'] + for member in group_members if member in self.scene + ] + + def _duplicate_dataset_with_different_id(self, dataset_id, alias_id): + dataset = self.scene[dataset_id].copy() + self._prepare_dataset_for_duplication(dataset, alias_id) + self.scene[alias_id] = dataset + + def _prepare_dataset_for_duplication(self, dataset, alias_id): + # Drop all identifier attributes from the original dataset. Otherwise + # they might invalidate the dataset ID of the alias. + self._drop_id_attrs(dataset) + dataset.attrs.update(alias_id.to_dict()) + + def _drop_id_attrs(self, dataset): + for drop_key in self._get_id_attrs(dataset): + dataset.attrs.pop(drop_key) + + def _get_id_attrs(self, dataset): + return dataset.attrs["_satpy_id"].to_dict().keys() class _SceneGenerator(object): @@ -338,7 +381,7 @@ def group(self, groups): DataQuery('my_group', wavelength=(10, 11, 12)): ['IR_108', 'B13', 'C13'] } """ - self._scenes = add_group_aliases(self._scenes, groups) + self._scenes = group_datasets_in_scenes(self._scenes, groups) def _distribute_save_datasets(self, scenes_iter, client, batch_size=1, **kwargs): """Distribute save_datasets across a cluster.""" diff --git a/satpy/readers/__init__.py b/satpy/readers/__init__.py index 95c73b426a..dd2f9886ae 100644 --- a/satpy/readers/__init__.py +++ b/satpy/readers/__init__.py @@ -20,7 +20,7 @@ import logging import os -import pickle +import pickle # nosec B403 import warnings from datetime import datetime, timedelta from functools import total_ordering @@ -647,7 +647,10 @@ def _get_reader_kwargs(reader, reader_kwargs): class FSFile(os.PathLike): """Implementation of a PathLike file object, that can be opened. - This is made to be used in conjuction with fsspec or s3fs. For example:: + Giving the filenames to :class:`Scene` with valid transfer protocols will automatically + use this class so manual usage of this class is needed mainly for fine-grained control. + + This class is made to be used in conjuction with fsspec or s3fs. For example:: from satpy import Scene @@ -695,15 +698,15 @@ def __repr__(self): """Representation of the object.""" return '' - def open(self): + def open(self, *args, **kwargs): """Open the file. This is read-only. """ try: - return self._fs.open(self._file) + return self._fs.open(self._file, *args, **kwargs) except AttributeError: - return open(self._file) + return open(self._file, *args, **kwargs) def __lt__(self, other): """Implement ordering. @@ -739,7 +742,7 @@ def __hash__(self): try: fshash = hash(self._fs) except TypeError: # fsspec < 0.8.8 for CachingFileSystem - fshash = hash(pickle.dumps(self._fs)) + fshash = hash(pickle.dumps(self._fs)) # nosec B403 return hash(self._file) ^ fshash diff --git a/satpy/readers/aapp_l1b.py b/satpy/readers/aapp_l1b.py index e74bed1518..aff1109ff5 100644 --- a/satpy/readers/aapp_l1b.py +++ b/satpy/readers/aapp_l1b.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2012-2020 Satpy developers +# Copyright (c) 2012-2021 Satpy developers # # This file is part of satpy. # @@ -33,28 +33,40 @@ import xarray as xr from dask import delayed -from satpy import CHUNK_SIZE from satpy.readers.file_handlers import BaseFileHandler +from satpy.utils import get_chunk_size_limit + +CHANNEL_DTYPE = np.float64 + + +def get_avhrr_lac_chunks(shape, dtype): + """Get chunks from a given shape adapted for full-resolution AVHRR data.""" + limit = get_chunk_size_limit(dtype) + return da.core.normalize_chunks(("auto", 2048), shape=shape, limit=limit, dtype=dtype) + + +def get_aapp_chunks(shape): + """Get chunks from a given shape adapted for AAPP data.""" + return get_avhrr_lac_chunks(shape, dtype=CHANNEL_DTYPE) -LINE_CHUNK = CHUNK_SIZE ** 2 // 2048 logger = logging.getLogger(__name__) -CHANNEL_NAMES = ['1', '2', '3a', '3b', '4', '5'] +AVHRR_CHANNEL_NAMES = ["1", "2", "3a", "3b", "4", "5"] -ANGLES = ['sensor_zenith_angle', - 'solar_zenith_angle', - 'sun_sensor_azimuth_difference_angle'] +AVHRR_ANGLE_NAMES = ['sensor_zenith_angle', + 'solar_zenith_angle', + 'sun_sensor_azimuth_difference_angle'] -PLATFORM_NAMES = {4: 'NOAA-15', - 2: 'NOAA-16', - 6: 'NOAA-17', - 7: 'NOAA-18', - 8: 'NOAA-19', - 11: 'Metop-B', - 12: 'Metop-A', - 13: 'Metop-C', - 14: 'Metop simulator'} +AVHRR_PLATFORM_IDS2NAMES = {4: 'NOAA-15', + 2: 'NOAA-16', + 6: 'NOAA-17', + 7: 'NOAA-18', + 8: 'NOAA-19', + 11: 'Metop-B', + 12: 'Metop-A', + 13: 'Metop-C', + 14: 'Metop simulator'} def create_xarray(arr): @@ -63,55 +75,29 @@ def create_xarray(arr): return res -class AVHRRAAPPL1BFile(BaseFileHandler): - """Reader for AVHRR L1B files created from the AAPP software.""" +class AAPPL1BaseFileHandler(BaseFileHandler): + """A base file handler for the AAPP level-1 formats.""" def __init__(self, filename, filename_info, filetype_info): - """Initialize object information by reading the input file.""" - super(AVHRRAAPPL1BFile, self).__init__(filename, filename_info, - filetype_info) - self.channels = {i: None for i in AVHRR_CHANNEL_NAMES} - self.units = {i: 'counts' for i in AVHRR_CHANNEL_NAMES} + """Initialize AAPP level-1 file handler object.""" + super().__init__(filename, filename_info, filetype_info) + + self.channels = None + self.units = None + self.sensor = "unknown" self._data = None self._header = None - self._is3b = None - self._is3a = None - self._shape = None self.area = None - self.sensor = 'avhrr-3' - self.read() - self.active_channels = self._get_active_channels() - - self.platform_name = PLATFORM_NAMES.get(self._header['satid'][0], None) - - if self.platform_name is None: - raise ValueError("Unsupported platform ID: %d" % self.header['satid']) - - def _get_active_channels(self): - status = self._get_channel_binary_status_from_header() - return self._convert_binary_channel_status_to_activation_dict(status) + self._channel_names = [] + self._angle_names = [] - def _get_channel_binary_status_from_header(self): - status = self._header['inststat1'].item() - change_line = self._header['statchrecnb'] - if change_line > 0: - status |= self._header['inststat2'].item() - return status - - @staticmethod - def _convert_binary_channel_status_to_activation_dict(status): - bits_channels = ((13, '1'), - (12, '2'), - (11, '3a'), - (10, '3b'), - (9, '4'), - (8, '5')) - activated = dict() - for bit, channel_name in bits_channels: - activated[channel_name] = bool(status >> bit & 1) - return activated + def _set_filedata_layout(self): + """Set the file data type/layout.""" + self._header_offset = 0 + self._scan_type = np.dtype([("siteid", " 0: + status |= self._header['inststat2'].item() + return status + + @staticmethod + def _convert_binary_channel_status_to_activation_dict(status): + bits_channels = ((13, '1'), + (12, '2'), + (11, '3a'), + (10, '3b'), + (9, '4'), + (8, '5')) + activated = dict() + for bit, channel_name in bits_channels: + activated[channel_name] = bool(status >> bit & 1) + return activated def available_datasets(self, configured_datasets=None): """Get the available datasets.""" for _, mda in configured_datasets: - if mda['name'] in CHANNEL_NAMES: + if mda['name'] in self._channel_names: yield self.active_channels[mda['name']], mda else: yield True, mda @@ -180,11 +238,10 @@ def get_angles(self, angle_id): """Get sun-satellite viewing angles.""" sunz, satz, azidiff = self._get_all_interpolated_angles() - name_to_variable = dict(zip(ANGLES, (satz, sunz, azidiff))) + name_to_variable = dict(zip(self._angle_names, (satz, sunz, azidiff))) return create_xarray(name_to_variable[angle_id]) - @functools.lru_cache(maxsize=10) - def _get_all_interpolated_angles(self): + def _get_all_interpolated_angles_uncached(self): sunz40km, satz40km, azidiff40km = self._get_tiepoint_angles_in_degrees() return self._interpolate_arrays(sunz40km, satz40km, azidiff40km) @@ -194,10 +251,10 @@ def _get_tiepoint_angles_in_degrees(self): azidiff40km = self._data["ang"][:, :, 2] * 1e-2 return sunz40km, satz40km, azidiff40km - def _interpolate_arrays(self, *input_arrays): + def _interpolate_arrays(self, *input_arrays, geolocation=False): lines = input_arrays[0].shape[0] try: - interpolator = self._create_40km_interpolator(lines, *input_arrays) + interpolator = self._create_40km_interpolator(lines, *input_arrays, geolocation=geolocation) except ImportError: logger.warning("Could not interpolate, python-geotiepoints missing.") output_arrays = input_arrays @@ -208,8 +265,12 @@ def _interpolate_arrays(self, *input_arrays): return output_arrays @staticmethod - def _create_40km_interpolator(lines, *arrays_40km): - from geotiepoints.interpolator import Interpolator + def _create_40km_interpolator(lines, *arrays_40km, geolocation=False): + if geolocation: + # Slower but accurate at datum line + from geotiepoints.geointerpolator import GeoInterpolator as Interpolator + else: + from geotiepoints.interpolator import Interpolator cols40km = np.arange(24, 2048, 40) cols1km = np.arange(2048) rows40km = np.arange(lines) @@ -226,15 +287,14 @@ def navigate(self, coordinate_id): lons, lats = self._get_all_interpolated_coordinates() if coordinate_id == 'longitude': return create_xarray(lons) - elif coordinate_id == 'latitude': + if coordinate_id == 'latitude': return create_xarray(lats) - else: - raise KeyError("Coordinate {} unknown.".format(coordinate_id)) - @functools.lru_cache(maxsize=10) - def _get_all_interpolated_coordinates(self): + raise KeyError("Coordinate {} unknown.".format(coordinate_id)) + + def _get_all_interpolated_coordinates_uncached(self): lons40km, lats40km = self._get_coordinates_in_degrees() - return self._interpolate_arrays(lons40km, lats40km) + return self._interpolate_arrays(lons40km, lats40km, geolocation=True) def _get_coordinates_in_degrees(self): lons40km = self._data["pos"][:, :, 1] * 1e-4 @@ -256,10 +316,11 @@ def calibrate(self, if dataset_id['name'] in ("3a", "3b") and self._is3b is None: # Is it 3a or 3b: + line_chunks = get_aapp_chunks((self._data.shape[0], 2048))[0] self._is3a = da.bitwise_and(da.from_array(self._data['scnlinbit'], - chunks=LINE_CHUNK), 3) == 0 + chunks=line_chunks), 3) == 0 self._is3b = da.bitwise_and(da.from_array(self._data['scnlinbit'], - chunks=LINE_CHUNK), 3) == 1 + chunks=line_chunks), 3) == 1 try: vis_idx = ['1', '2', '3a'].index(dataset_id['name']) @@ -295,8 +356,6 @@ def calibrate(self, return ds -AVHRR_CHANNEL_NAMES = ("1", "2", "3a", "3b", "4", "5") - # AAPP 1b header _HEADERTYPE = np.dtype([("siteid", "S3"), @@ -489,13 +548,16 @@ def _vis_calibrate(data, if calib_type not in ['counts', 'radiance', 'reflectance']: raise ValueError('Calibration ' + calib_type + ' unknown!') - channel = da.from_array(data["hrpt"][:, :, chn], chunks=(LINE_CHUNK, 2048)) + channel_data = data["hrpt"][:, :, chn] + chunks = get_aapp_chunks(channel_data.shape) + line_chunks = chunks[0] + channel = da.from_array(channel_data, chunks=chunks) mask &= channel != 0 if calib_type == 'counts': return channel - channel = channel.astype(np.float64) + channel = channel.astype(CHANNEL_DTYPE) if calib_type == 'radiance': logger.info("Radiances are not yet supported for " + @@ -513,23 +575,23 @@ def _vis_calibrate(data, coeff_idx = 0 intersection = da.from_array(data["calvis"][:, chn, coeff_idx, 4], - chunks=LINE_CHUNK) + chunks=line_chunks) if calib_coeffs is not None: logger.info("Updating from external calibration coefficients.") - slope1 = da.from_array(calib_coeffs[0], chunks=LINE_CHUNK) - intercept1 = da.from_array(calib_coeffs[1], chunks=LINE_CHUNK) - slope2 = da.from_array(calib_coeffs[2], chunks=LINE_CHUNK) - intercept2 = da.from_array(calib_coeffs[3], chunks=LINE_CHUNK) + slope1 = da.from_array(calib_coeffs[0], chunks=line_chunks) + intercept1 = da.from_array(calib_coeffs[1], chunks=line_chunks) + slope2 = da.from_array(calib_coeffs[2], chunks=line_chunks) + intercept2 = da.from_array(calib_coeffs[3], chunks=line_chunks) else: slope1 = da.from_array(data["calvis"][:, chn, coeff_idx, 0], - chunks=LINE_CHUNK) * 1e-10 + chunks=line_chunks) * 1e-10 intercept1 = da.from_array(data["calvis"][:, chn, coeff_idx, 1], - chunks=LINE_CHUNK) * 1e-7 + chunks=line_chunks) * 1e-7 slope2 = da.from_array(data["calvis"][:, chn, coeff_idx, 2], - chunks=LINE_CHUNK) * 1e-10 + chunks=line_chunks) * 1e-10 intercept2 = da.from_array(data["calvis"][:, chn, coeff_idx, 3], - chunks=LINE_CHUNK) * 1e-7 + chunks=line_chunks) * 1e-7 if chn == 1: # In the level 1b file, the visible coefficients are stored as 4-byte integers. Scaling factors then convert @@ -556,18 +618,22 @@ def _ir_calibrate(header, data, irchn, calib_type, mask=True): *calib_type* in brightness_temperature, radiance, count """ - count = da.from_array(data["hrpt"][:, :, irchn + 2], chunks=(LINE_CHUNK, 2048)) + channel_data = data["hrpt"][:, :, irchn + 2] + chunks = get_aapp_chunks(channel_data.shape) + line_chunks = chunks[0] + + count = da.from_array(channel_data, chunks=chunks) if calib_type == 0: return count # Mask unnaturally low values mask &= count != 0 - count = count.astype(np.float64) + count = count.astype(CHANNEL_DTYPE) - k1_ = da.from_array(data['calir'][:, irchn, 0, 0], chunks=LINE_CHUNK) / 1.0e9 - k2_ = da.from_array(data['calir'][:, irchn, 0, 1], chunks=LINE_CHUNK) / 1.0e6 - k3_ = da.from_array(data['calir'][:, irchn, 0, 2], chunks=LINE_CHUNK) / 1.0e6 + k1_ = da.from_array(data['calir'][:, irchn, 0, 0], chunks=line_chunks) / 1.0e9 + k2_ = da.from_array(data['calir'][:, irchn, 0, 1], chunks=line_chunks) / 1.0e6 + k3_ = da.from_array(data['calir'][:, irchn, 0, 2], chunks=line_chunks) / 1.0e6 # Count to radiance conversion: rad = k1_[:, None] * count * count + k2_[:, None] * count + k3_[:, None] diff --git a/satpy/readers/aapp_mhs_amsub_l1c.py b/satpy/readers/aapp_mhs_amsub_l1c.py new file mode 100644 index 0000000000..4a2acd25d2 --- /dev/null +++ b/satpy/readers/aapp_mhs_amsub_l1c.py @@ -0,0 +1,462 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright (c) 2020, 2021 Pytroll developers + +# Author(s): + +# Adam Dybbroe + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. + +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +"""Reader for the AAPP AMSU-B/MHS level-1c data. + +https://nwp-saf.eumetsat.int/site/download/documentation/aapp/NWPSAF-MF-UD-003_Formats_v8.0.pdf + +""" + +import logging +import numbers +from contextlib import suppress +from typing import NamedTuple + +import dask.array as da +import numpy as np + +from satpy import CHUNK_SIZE +from satpy.readers.aapp_l1b import AAPPL1BaseFileHandler, create_xarray + +logger = logging.getLogger(__name__) + + +LINE_CHUNK = CHUNK_SIZE ** 2 // 90 + +MHS_AMSUB_CHANNEL_NAMES = ['1', '2', '3', '4', '5'] +MHS_AMSUB_ANGLE_NAMES = ['sensor_zenith_angle', 'sensor_azimuth_angle', + 'solar_zenith_angle', 'solar_azimuth_difference_angle'] + +MHS_AMSUB_PLATFORM_IDS2NAMES = {15: 'NOAA-15', + 16: 'NOAA-16', + 17: 'NOAA-17', + 18: 'NOAA-18', + 19: 'NOAA-19', + 1: 'Metop-B', + 2: 'Metop-A', + 3: 'Metop-C', + 4: 'Metop simulator'} + +MHS_AMSUB_PLATFORMS = ['Metop-A', 'Metop-B', 'Metop-C', 'NOAA-18', 'NOAA-19'] + + +class FrequencyDoubleSideBandBase(NamedTuple): + """Base class for a frequency double side band. + + Frequency Double Side Band is supposed to describe the special type of bands + commonly used in humidty sounding from Passive Microwave Sensors. When the + absorption band being observed is symmetrical it is advantageous (giving + better NeDT) to sense in a band both right and left of the central + absorption frequency. + + This is needed because of this bug: https://bugs.python.org/issue41629 + + """ + + central: float + side: float + bandwidth: float + unit: str = "GHz" + + +class FrequencyDoubleSideBand(FrequencyDoubleSideBandBase): + """The frequency double side band class. + + The elements of the double-side-band type frequency band are the central + frquency, the relative side band frequency (relative to the center - left + and right) and their bandwidths, and optionally a unit (defaults to + GHz). No clever unit conversion is done here, it's just used for checking + that two ranges are comparable. + + Frequency Double Side Band is supposed to describe the special type of bands + commonly used in humidty sounding from Passive Microwave Sensors. When the + absorption band being observed is symmetrical it is advantageous (giving + better NeDT) to sense in a band both right and left of the central + absorption frequency. + + """ + + def __eq__(self, other): + """Return if two channel frequencies are equal. + + Args: + other (tuple or scalar): (central frq, side band frq and band width frq) or scalar frq + + Return: + True if other is a scalar and min <= other <= max, or if other is + a tuple equal to self, False otherwise. + + """ + if other is None: + return False + if isinstance(other, numbers.Number): + return other in self + if isinstance(other, (tuple, list)) and len(other) == 3: + return other in self + return super().__eq__(other) + + def __ne__(self, other): + """Return the opposite of `__eq__`.""" + return not self == other + + def __lt__(self, other): + """Compare to another frequency.""" + if other is None: + return False + return super().__lt__(other) + + def __gt__(self, other): + """Compare to another frequency.""" + if other is None: + return True + return super().__gt__(other) + + def __hash__(self): + """Hash this tuple.""" + return tuple.__hash__(self) + + def __str__(self): + """Format for print out.""" + return "{0.central} {0.unit} ({0.side}_{0.bandwidth} {0.unit})".format(self) + + def __contains__(self, other): + """Check if this double-side-band 'contains' *other*.""" + if other is None: + return False + if isinstance(other, numbers.Number): + if (self.central + self.side - self.bandwidth/2. <= other + <= self.central + self.side + self.bandwidth/2.): + return True + if (self.central - self.side - self.bandwidth/2. <= other + <= self.central - self.side + self.bandwidth/2.): + return True + return False + + if isinstance(other, (tuple, list)) and len(other) == 3: + return ((self.central - self.side - self.bandwidth/2. <= + other[0] - other[1] - other[2]/2. and + self.central - self.side + self.bandwidth/2. >= + other[0] - other[1] + other[2]/2.) or + (self.central + self.side - self.bandwidth/2. <= + other[0] + other[1] - other[2]/2. and + self.central + self.side + self.bandwidth/2. >= + other[0] + other[1] + other[2]/2.)) + + with suppress(AttributeError): + if self.unit != other.unit: + raise NotImplementedError("Can't compare frequency ranges with different units.") + return ((self.central - self.side - self.bandwidth/2. <= + other.central - other.side - other.bandwidth/2. and + self.central - self.side + self.bandwidth/2. >= + other.central - other.side + other.bandwidth/2.) or + (self.central + self.side - self.bandwidth/2. <= + other.central + other.side - other.bandwidth/2. and + self.central + self.side + self.bandwidth/2. >= + other.central + other.side + other.bandwidth/2.)) + + return False + + def distance(self, value): + """Get the distance from value.""" + if self == value: + try: + left_side_dist = abs(value.central - value.side - (self.central - self.side)) + right_side_dist = abs(value.central + value.side - (self.central + self.side)) + return min(left_side_dist, right_side_dist) + except AttributeError: + if isinstance(value, (tuple, list)): + return abs((value[0] - value[1]) - (self.central - self.side)) + + left_side_dist = abs(value - (self.central - self.side)) + right_side_dist = abs(value - (self.central + self.side)) + return min(left_side_dist, right_side_dist) + else: + return np.inf + + @classmethod + def convert(cls, frq): + """Convert `frq` to this type if possible.""" + if isinstance(frq, dict): + return cls(**frq) + return frq + + +class FrequencyRangeBase(NamedTuple): + """Base class for frequency ranges. + + This is needed because of this bug: https://bugs.python.org/issue41629 + """ + + central: float + bandwidth: float + unit: str = "GHz" + + +class FrequencyRange(FrequencyRangeBase): + """The Frequency range class. + + The elements of the range are central and bandwidth values, and optionally + a unit (defaults to GHz). No clever unit conversion is done here, it's just + used for checking that two ranges are comparable. + + This type is used for passive microwave sensors. + + """ + + def __eq__(self, other): + """Return if two channel frequencies are equal. + + Args: + other (tuple or scalar): (central frq, band width frq) or scalar frq + + Return: + True if other is a scalar and min <= other <= max, or if other is + a tuple equal to self, False otherwise. + + """ + if other is None: + return False + if isinstance(other, numbers.Number): + return other in self + if isinstance(other, (tuple, list)) and len(other) == 2: + return self[:2] == other + return super().__eq__(other) + + def __ne__(self, other): + """Return the opposite of `__eq__`.""" + return not self == other + + def __lt__(self, other): + """Compare to another frequency.""" + if other is None: + return False + return super().__lt__(other) + + def __gt__(self, other): + """Compare to another frequency.""" + if other is None: + return True + return super().__gt__(other) + + def __hash__(self): + """Hash this tuple.""" + return tuple.__hash__(self) + + def __str__(self): + """Format for print out.""" + return "{0.central} {0.unit} ({0.bandwidth} {0.unit})".format(self) + + def __contains__(self, other): + """Check if this range contains *other*.""" + if other is None: + return False + if isinstance(other, numbers.Number): + return self.central - self.bandwidth/2. <= other <= self.central + self.bandwidth/2. + + with suppress(AttributeError): + if self.unit != other.unit: + raise NotImplementedError("Can't compare frequency ranges with different units.") + return (self.central - self.bandwidth/2. <= other.central - other.bandwidth/2. and + self.central + self.bandwidth/2. >= other.central + other.bandwidth/2.) + return False + + def distance(self, value): + """Get the distance from value.""" + if self == value: + try: + return abs(value.central - self.central) + except AttributeError: + if isinstance(value, (tuple, list)): + return abs(value[0] - self.central) + return abs(value - self.central) + else: + return np.inf + + @classmethod + def convert(cls, frq): + """Convert `frq` to this type if possible.""" + if isinstance(frq, dict): + return cls(**frq) + return frq + + +class MHS_AMSUB_AAPPL1CFile(AAPPL1BaseFileHandler): + """Reader for AMSU-B/MHS L1C files created from the AAPP software.""" + + def __init__(self, filename, filename_info, filetype_info): + """Initialize object information by reading the input file.""" + super(MHS_AMSUB_AAPPL1CFile, self).__init__(filename, filename_info, + filetype_info) + + self.channels = {i: None for i in MHS_AMSUB_CHANNEL_NAMES} + self.units = {i: 'brightness_temperature' for i in MHS_AMSUB_CHANNEL_NAMES} + + self._channel_names = MHS_AMSUB_CHANNEL_NAMES + self._angle_names = MHS_AMSUB_ANGLE_NAMES + + self._set_filedata_layout() + self.read() + + self._get_platform_name(MHS_AMSUB_PLATFORM_IDS2NAMES) + self._get_sensorname() + + def _set_filedata_layout(self): + """Set the file data type/layout.""" + self._header_offset = HEADER_LENGTH + self._scan_type = _SCANTYPE + self._header_type = _HEADERTYPE + + def _get_sensorname(self): + """Get the sensor name from the header.""" + if self._header['instrument'][0] == 11: + self.sensor = 'amsub' + elif self._header['instrument'][0] == 12: + self.sensor = 'mhs' + else: + raise IOError("Sensor neither MHS nor AMSU-B!") + + def get_angles(self, angle_id): + """Get sun-satellite viewing angles.""" + satz = self._data["angles"][:, :, 0] * 1e-2 + sata = self._data["angles"][:, :, 1] * 1e-2 + + sunz = self._data["angles"][:, :, 2] * 1e-2 + suna = self._data["angles"][:, :, 3] * 1e-2 + + name_to_variable = dict(zip(MHS_AMSUB_ANGLE_NAMES, (satz, sata, sunz, suna))) + return create_xarray(name_to_variable[angle_id]) + + def navigate(self, coordinate_id): + """Get the longitudes and latitudes of the scene.""" + lons, lats = self._get_coordinates_in_degrees() + if coordinate_id == 'longitude': + return create_xarray(lons) + if coordinate_id == 'latitude': + return create_xarray(lats) + + raise KeyError("Coordinate {} unknown.".format(coordinate_id)) + + def _get_coordinates_in_degrees(self): + lons = self._data["latlon"][:, :, 1] * 1e-4 + lats = self._data["latlon"][:, :, 0] * 1e-4 + return lons, lats + + def _calibrate_active_channel_data(self, key): + """Calibrate active channel data only.""" + return self.calibrate(key) + + def calibrate(self, dataset_id): + """Calibrate the data.""" + units = {'brightness_temperature': 'K'} + + mask = True + idx = ['1', '2', '3', '4', '5'].index(dataset_id['name']) + + ds = create_xarray( + _calibrate(self._data, idx, + dataset_id['calibration'], + mask=mask)) + + ds.attrs['units'] = units[dataset_id['calibration']] + ds.attrs.update(dataset_id._asdict()) + return ds + + +def _calibrate(data, + chn, + calib_type, + mask=True): + """Calibrate channel data. + + *calib_type* in brightness_temperature. + + """ + if calib_type not in ['brightness_temperature']: + raise ValueError('Calibration ' + calib_type + ' unknown!') + + channel = da.from_array(data["btemps"][:, :, chn] / 100., chunks=(LINE_CHUNK, 90)) + mask &= channel != 0 + + if calib_type == 'counts': + return channel + + channel = channel.astype(np.float) + + return da.where(mask, channel, np.nan) + + +HEADER_LENGTH = 1152*4 + +_HEADERTYPE = np.dtype([("siteid", "S3"), + ("cfill_1", "S1"), + ("l1bsite", "S3"), + ("cfill_2", "S1"), + ("versnb", " 23: + return False + return True + + def _modify_observation_time_for_nominal(self, observation_time): + """Round observation time to a nominal time based on known observation frequency. + + AHI observations are split into different sectors including Full Disk + (FLDK), Japan (JP) sectors, and smaller regional (R) sectors. Each + sector is observed at different frequencies (ex. every 10 minutes, + every 2.5 minutes, and every 30 seconds). This method will take the + actual observation time and round it to the nearest interval for this + sector. So if the observation time is 13:32:48 for the "JP02" sector + which is the second Japan observation where every Japan observation is + 2.5 minutes apart, then the result should be 13:32:30. + + """ timeline = "{:04d}".format(self.basic_info['observation_timeline'][0]) + if not self._is_valid_timeline(timeline): + warnings.warn("Observation timeline is fill value, not rounding observation time.") + return observation_time + if self.observation_area == 'FLDK': dt = 0 else: - observation_freq = {'JP': 150, 'R3': 150, 'R4': 30, 'R5': 30}[self.observation_area[:2]] - dt = observation_freq * (int(self.observation_area[2:]) - 1) - return self.start_time.replace(hour=int(timeline[:2]), minute=int(timeline[2:4]) + dt//60, - second=dt % 60, microsecond=0) + observation_frequency_seconds = {'JP': 150, 'R3': 150, 'R4': 30, 'R5': 30}[self.observation_area[:2]] + dt = observation_frequency_seconds * (int(self.observation_area[2:]) - 1) + + return observation_time.replace( + hour=int(timeline[:2]), minute=int(timeline[2:4]) + dt//60, + second=dt % 60, microsecond=0) def get_dataset(self, key, info): """Get the dataset.""" @@ -425,7 +512,7 @@ def _get_area_def(self): def _check_fpos(self, fp_, fpos, offset, block): """Check file position matches blocksize.""" if fp_.tell() + offset != fpos: - warnings.warn("Actual "+block+" header size does not match expected") + warnings.warn(f"Actual {block} header size does not match expected") return def _read_header(self, fp_): @@ -480,31 +567,22 @@ def _read_header(self, fp_): fp_, dtype=_NAVIGATION_CORRECTION_INFO_TYPE, count=1) # 8 The navigation corrections: ncorrs = header["block8"]['numof_correction_info_data'][0] - dtype = np.dtype([ - ("line_number_after_rotation", " 0: + coord_radian.attrs['scale_factor'] *= -1 + + # TODO remove this check when old versions of IDPF test data ( 1.1: + logger.info('The variable state/celestial/earth_sun_distance contains unexpected values' + '(mean value is {} AU). Defaulting to 1 AU for reflectance calculation.' + ''.format(sun_earth_distance)) + sun_earth_distance = 1 + res = 100 * radiance * np.pi * sun_earth_distance ** 2 / cesi - res.attrs["units"] = "%" return res diff --git a/satpy/readers/fci_l2_nc.py b/satpy/readers/fci_l2_nc.py index 9691d99d82..867317df72 100644 --- a/satpy/readers/fci_l2_nc.py +++ b/satpy/readers/fci_l2_nc.py @@ -20,19 +20,19 @@ import logging from contextlib import suppress -from datetime import datetime, timedelta import numpy as np import xarray as xr +from pyresample import geometry from satpy import CHUNK_SIZE -from satpy.readers._geos_area import get_area_definition, make_ext +from satpy.readers._geos_area import get_geos_area_naming, make_ext +from satpy.readers.eum_base import get_service_mode from satpy.readers.file_handlers import BaseFileHandler +from satpy.resample import get_area_def logger = logging.getLogger(__name__) -PRODUCT_DATA_DURATION_MINUTES = 20 - SSP_DEFAULT = 0.0 @@ -40,44 +40,24 @@ class FciL2CommonFunctions(object): """Shared operations for file handlers.""" @property - def _start_time(self): - try: - start_time = datetime.strptime(self.nc.attrs['time_coverage_start'], '%Y%m%d%H%M%S') - except (ValueError, KeyError): - # TODO if the sensing_start_time_utc attribute is not valid, uses a hardcoded value - logger.warning("Start time cannot be obtained from file content, using default value instead") - start_time = datetime.strptime('20200101120000', '%Y%m%d%H%M%S') - return start_time + def spacecraft_name(self): + """Return spacecraft name.""" + return self.nc.attrs['platform'] @property - def _end_time(self): - """Get observation end time.""" - try: - end_time = datetime.strptime(self.nc.attrs['time_coverage_end'], '%Y%m%d%H%M%S') - except (ValueError, KeyError): - # TODO if the sensing_end_time_utc attribute is not valid, adds 20 minutes to the start time - end_time = self._start_time + timedelta(minutes=PRODUCT_DATA_DURATION_MINUTES) - return end_time + def sensor_name(self): + """Return instrument name.""" + return self.nc.attrs['data_source'] @property - def _spacecraft_name(self): - """Return spacecraft name.""" - try: - return self.nc.attrs['platform'] - except KeyError: - # TODO if the platform attribute is not valid, return a default value - logger.warning("Spacecraft name cannot be obtained from file content, using default value instead") - return 'DEFAULT_MTG' - - @property - def _sensor_name(self): - """Return instrument.""" + def ssp_lon(self): + """Return longitude at subsatellite point.""" try: - return self.nc.attrs['data_source'] - except KeyError: - # TODO if the data_source attribute is not valid, return a default value - logger.warning("Sensor cannot be obtained from file content, using default value instead") - return 'fci' + return float(self.nc['mtg_geos_projection'].attrs['longitude_of_projection_origin']) + except (KeyError, AttributeError): + logger.warning(f"ssp_lon could not be obtained from file content, using default value " + f"of {SSP_DEFAULT} degrees east instead") + return SSP_DEFAULT def _get_global_attributes(self): """Create a dictionary of global attributes to be added to all datasets. @@ -85,41 +65,76 @@ def _get_global_attributes(self): Returns: dict: A dictionary of global attributes. filename: name of the product file - start_time: sensing start time from best available source - end_time: sensing end time from best available source spacecraft_name: name of the spacecraft ssp_lon: longitude of subsatellite point sensor: name of sensor - creation_time: creation time of the product platform_name: name of the platform """ attributes = { 'filename': self.filename, - 'start_time': self._start_time, - 'end_time': self._end_time, - 'spacecraft_name': self._spacecraft_name, + 'spacecraft_name': self.spacecraft_name, 'ssp_lon': self.ssp_lon, - 'sensor': self._sensor_name, - 'creation_time': self.filename_info['creation_time'], - 'platform_name': self._spacecraft_name, + 'sensor': self.sensor_name, + 'platform_name': self.spacecraft_name, } return attributes + def _set_attributes(self, variable, dataset_info, segmented=False): + """Set dataset attributes.""" + if segmented: + xdim, ydim = "number_of_FoR_cols", "number_of_FoR_rows" + else: + xdim, ydim = "number_of_columns", "number_of_rows" + + if dataset_info['file_key'] not in ['product_quality', 'product_completeness', 'product_timeliness']: + variable = variable.rename({ydim: 'y', xdim: 'x'}) + + variable.attrs.setdefault('units', None) + variable.attrs.update(dataset_info) + variable.attrs.update(self._get_global_attributes()) + + return variable + + def _slice_dataset(self, variable, dataset_info, dimensions): + """Slice data if dimension layers have been provided in yaml-file.""" + slice_dict = {dim: dataset_info[dim_id] for (dim, dim_id) in dimensions.items() + if dim_id in dataset_info.keys() and dim in variable.dims} + for dim, dim_ind in slice_dict.items(): + logger.debug(f"Extracting {dimensions[dim]}-index {dim_ind} from dimension '{dim}'.") + variable = variable.sel(slice_dict) + + return variable + + @staticmethod + def _mask_data(variable, fill_value): + """Set fill_values, as defined in yaml-file, to NaN. + + Set data points in variable to NaN if they are equal to fill_value + or any of the values in fill_value if fill_value is a list. + """ + if not isinstance(fill_value, list): + fill_value = [fill_value] + + for val in fill_value: + variable = variable.where(variable != val).astype('float32') + + return variable + def __del__(self): """Close the NetCDF file that may still be open.""" with suppress(OSError): self.nc.close() -class FciL2NCFileHandler(BaseFileHandler, FciL2CommonFunctions): +class FciL2NCFileHandler(FciL2CommonFunctions, BaseFileHandler): """Reader class for FCI L2 products in NetCDF4 format.""" - def __init__(self, filename, filename_info, filetype_info): + def __init__(self, filename, filename_info, filetype_info, with_area_definition=True): """Open the NetCDF file with xarray and prepare for dataset reading.""" super().__init__(filename, filename_info, filetype_info) - # Use xarray's default netcdf4 engine to open the file + # Use xarray's default netcdf4 engine to open the fileq self.nc = xr.open_dataset( self.filename, decode_cf=True, @@ -130,26 +145,26 @@ def __init__(self, filename, filename_info, filetype_info): } ) + if with_area_definition is False: + logger.info("Setting `with_area_defintion=False` has no effect on pixel-based products.") + # Read metadata which are common to all datasets self.nlines = self.nc['y'].size self.ncols = self.nc['x'].size self._projection = self.nc['mtg_geos_projection'] + self.multi_dims = {'maximum_number_of_layers': 'layer', 'number_of_vis_channels': 'vis_channel_id'} - # Compute the area definition - self._area_def = self._compute_area_def() - - @property - def ssp_lon(self): - """Return subsatellite point longitude.""" + def get_area_def(self, key): + """Return the area definition.""" try: - return float(self._projection.attrs['longitude_of_projection_origin']) - except KeyError: - logger.warning("ssp_lon cannot be obtained from file content, using default value instead") - return SSP_DEFAULT + return self._area_def + except AttributeError: + raise NotImplementedError def get_dataset(self, dataset_id, dataset_info): """Get dataset using the file_key in dataset_info.""" var_key = dataset_info['file_key'] + par_name = dataset_info['name'] logger.debug('Reading in file to get dataset with key %s.', var_key) try: @@ -158,109 +173,127 @@ def get_dataset(self, dataset_id, dataset_info): logger.warning("Could not find key %s in NetCDF file, no valid Dataset created", var_key) return None - # TODO in some of the test files, invalid pixels contain the value defined as "fill_value" in the YAML file - # instead of being masked directly in the netCDF variable. - # therefore NaN is applied where such value is found or (0 if the array contains integer values) - # the next 11 lines have to be removed once the product files are correctly configured - try: - mask_value = dataset_info['mask_value'] - except KeyError: - mask_value = np.NaN - try: - fill_value = dataset_info['fill_value'] - except KeyError: - fill_value = np.NaN + # Compute the area definition + if var_key not in ['product_quality', 'product_completeness', 'product_timeliness']: + self._area_def = self._compute_area_def(dataset_id) - if dataset_info['file_type'] == 'nc_fci_test_clm': - data_values = variable.where(variable != fill_value, mask_value).astype('uint32', copy=False) - else: - data_values = variable.where(variable != fill_value, mask_value).astype('float32', copy=False) + if any(dim_id in dataset_info.keys() for dim_id in self.multi_dims.values()): + variable = self._slice_dataset(variable, dataset_info, self.multi_dims) - data_values.attrs = variable.attrs - variable = data_values + if par_name == 'retrieved_cloud_optical_thickness': + variable = self.get_total_cot(variable) - # If the variable has 3 dimensions, select the required layer - if variable.ndim == 3: - layer = dataset_info.get('layer', 0) - logger.debug('Selecting the layer %d.', layer) - variable = variable.sel(maximum_number_of_layers=layer) + if dataset_info['file_type'] == 'nc_fci_test_clm': + variable = self._decode_clm_test_data(variable, dataset_info) - if dataset_info['file_type'] == 'nc_fci_test_clm' and var_key != 'cloud_mask_cmrt6_test_result': - variable.values = (variable.values >> dataset_info['extract_byte'] << 31 >> 31) + if 'fill_value' in dataset_info: + variable = self._mask_data(variable, dataset_info['fill_value']) - # Rename the dimensions as required by Satpy - variable = variable.rename({"number_of_rows": 'y', "number_of_columns": 'x'}) + variable = self._set_attributes(variable, dataset_info) - # Manage the attributes of the dataset - variable.attrs.setdefault('units', None) + return variable - variable.attrs.update(dataset_info) - variable.attrs.update(self._get_global_attributes()) + @staticmethod + def _decode_clm_test_data(variable, dataset_info): + if dataset_info['file_key'] != 'cloud_mask_cmrt6_test_result': + variable = variable.astype('uint32') + variable.values = (variable.values >> dataset_info['extract_byte'] << 31 >> 31).astype('int8') return variable - def get_area_def(self, key): - """Return the area definition (common to all data in product).""" - return self._area_def - - def _compute_area_def(self): + def _compute_area_def(self, dataset_id): """Compute the area definition. Returns: AreaDefinition: A pyresample AreaDefinition object containing the area definition. """ - # Read the projection data from the mtg_geos_projection variable - a = float(self._projection.attrs['semi_major_axis']) - b = float(self._projection.attrs['semi_minor_axis']) - h = float(self._projection.attrs['perspective_point_height']) + area_extent = self._get_area_extent() + area_naming, proj_dict = self._get_proj_area(dataset_id) + area_def = geometry.AreaDefinition( + area_naming['area_id'], + area_naming['description'], + "", + proj_dict, + self.ncols, + self.nlines, + area_extent) - # TODO sweep_angle_axis value not handled at the moment, therefore commented out - # sweep_axis = self._projection.attrs['sweep_angle_axis'] + return area_def - # Coordinates of the pixel in radians + def _get_area_extent(self): + """Calculate area extent of dataset.""" + # Load and convert x/y coordinates to degrees as required by the make_ext function x = self.nc['x'] y = self.nc['y'] - # TODO conversion to radians: offset and scale factor are missing from some test NetCDF file - # TODO the next two lines should be removed when the offset and scale factor are correctly configured - if not hasattr(x, 'standard_name'): - x = np.radians(x * 0.003202134 - 8.914740401) - y = np.radians(y * 0.003202134 - 8.914740401) - - # Convert to degrees as required by the make_ext function x_deg = np.degrees(x) y_deg = np.degrees(y) - # Select the extreme points of the extension area - x_l, x_r = x_deg.values[0], x_deg.values[-1] - y_l, y_u = y_deg.values[0], y_deg.values[-1] + # Select the extreme points and calcualte area extent (not: these refer to pixel center) + ll_x, ur_x = -x_deg.values[0], -x_deg.values[-1] + ll_y, ur_y = y_deg.values[-1], y_deg.values[0] + h = float(self._projection.attrs['perspective_point_height']) + area_extent_pixel_center = make_ext(ll_x, ur_x, ll_y, ur_y, h) - # Compute the extension area in meters - area_extent = make_ext(x_l, x_r, y_l, y_u, h) + # Shift area extent by half a pixel to get the area extent w.r.t. the dataset/pixel corners + scale_factor = (x[1:]-x[0:-1]).values.mean() + res = abs(scale_factor) * h + area_extent = tuple(i + res/2 if i > 0 else i - res/2 for i in area_extent_pixel_center) - # Assemble the projection definition dictionary - p_dict = { - 'nlines': self.nlines, - 'ncols': self.ncols, - 'ssp_lon': self.ssp_lon, - 'a': a, - 'b': b, - 'h': h, - 'a_name': 'FCI Area', # TODO to be confirmed - 'a_desc': 'Area for FCI instrument', # TODO to be confirmed - 'p_id': 'geos' - } + return area_extent - # Compute the area definition - area_def = get_area_definition(p_dict, area_extent) + def _get_proj_area(self, dataset_id): + """Extract projection and area information.""" + # Read the projection data from the mtg_geos_projection variable + a = float(self._projection.attrs['semi_major_axis']) + h = float(self._projection.attrs['perspective_point_height']) - return area_def + # Some L2PF test data files have a typo in the keyname for the inverse flattening parameter. Use a default value + # as fallback until all L2PF test files are correctly formatted. + rf = float(self._projection.attrs.get('inverse_flattening', 298.257223563)) + + res = dataset_id.resolution + + area_naming_input_dict = {'platform_name': 'mtg', + 'instrument_name': 'fci', + 'resolution': res, + } + + area_naming = get_geos_area_naming({**area_naming_input_dict, + **get_service_mode('fci', self.ssp_lon)}) + + proj_dict = {'a': a, + 'lon_0': self.ssp_lon, + 'h': h, + "rf": rf, + 'proj': 'geos', + 'units': 'm', + "sweep": 'y'} + + return area_naming, proj_dict + + @staticmethod + def get_total_cot(variable): + """Sum the cloud optical thickness from the two OCA layers. + + The optical thickness has to be transformed to linear space before adding the values from the two layers. The + combined/total optical thickness is then transformed back to logarithmic space. + """ + attrs = variable.attrs + variable = 10 ** variable + variable = variable.fillna(0.) + variable = variable.sum(dim='maximum_number_of_layers', keep_attrs=True) + variable = variable.where(variable != 0., np.nan) + variable = np.log10(variable) + variable.attrs = attrs + + return variable -class FciL2NCSegmentFileHandler(BaseFileHandler, FciL2CommonFunctions): +class FciL2NCSegmentFileHandler(FciL2CommonFunctions, BaseFileHandler): """Reader class for FCI L2 Segmented products in NetCDF4 format.""" - def __init__(self, filename, filename_info, filetype_info): + def __init__(self, filename, filename_info, filetype_info, with_area_definition=False): """Open the NetCDF file with xarray and prepare for dataset reading.""" super().__init__(filename, filename_info, filetype_info) # Use xarray's default netcdf4 engine to open the file @@ -277,8 +310,19 @@ def __init__(self, filename, filename_info, filetype_info): # Read metadata which are common to all datasets self.nlines = self.nc['number_of_FoR_rows'].size self.ncols = self.nc['number_of_FoR_cols'].size + self.with_adef = with_area_definition + self.multi_dims = { + 'number_of_categories': 'category_id', 'number_of_channels': 'channel_id', + 'number_of_vis_channels': 'vis_channel_id', 'number_of_ir_channels': 'ir_channel_id', + 'number_test': 'test_id', + } - self.ssp_lon = SSP_DEFAULT + def get_area_def(self, key): + """Return the area definition.""" + try: + return self._area_def + except AttributeError: + raise NotImplementedError def get_dataset(self, dataset_id, dataset_info): """Get dataset using the file_key in dataset_info.""" @@ -291,24 +335,70 @@ def get_dataset(self, dataset_id, dataset_info): logger.warning("Could not find key %s in NetCDF file, no valid Dataset created", var_key) return None - # TODO in some of the test files, invalid pixels contain the value defined as "fill_value" in the YAML file - # instead of being masked directly in the netCDF variable. - # therefore NaN is applied where such value is found or (0 if the array contains integer values) - # the next 11 lines have to be removed once the product files are correctly configured + if any(dim_id in dataset_info.keys() for dim_id in self.multi_dims.values()): + variable = self._slice_dataset(variable, dataset_info, self.multi_dims) - mask_value = dataset_info.get('mask_value', np.NaN) - fill_value = dataset_info.get('fill_value', np.NaN) + if self.with_adef and var_key not in ['longitude', 'latitude', + 'product_quality', 'product_completeness', 'product_timeliness']: + self._area_def = self._construct_area_def(dataset_id) - float_variable = variable.where(variable != fill_value, mask_value).astype('float32', copy=False) - float_variable.attrs = variable.attrs - variable = float_variable + # coordinates are not relevant when returning data with an AreaDefinition + if 'coordinates' in dataset_info.keys(): + del dataset_info['coordinates'] - # Rename the dimensions as required by Satpy - variable = variable.rename({"number_of_FoR_rows": 'y', "number_of_FoR_cols": 'x'}) -# # Manage the attributes of the dataset - variable.attrs.setdefault('units', None) + if 'fill_value' in dataset_info: + variable = self._mask_data(variable, dataset_info['fill_value']) - variable.attrs.update(dataset_info) - variable.attrs.update(self._get_global_attributes()) + variable = self._set_attributes(variable, dataset_info, segmented=True) return variable + + def _construct_area_def(self, dataset_id): + """Construct the area definition. + + Returns: + AreaDefinition: A pyresample AreaDefinition object containing the area definition. + + """ + res = dataset_id.resolution + + area_naming_input_dict = {'platform_name': 'mtg', + 'instrument_name': 'fci', + 'resolution': res, + } + + area_naming = get_geos_area_naming({**area_naming_input_dict, + **get_service_mode('fci', self.ssp_lon)}) + + # Construct area definition from standardized area definition. + stand_area_def = get_area_def(area_naming['area_id']) + + if (stand_area_def.x_size != self.ncols) | (stand_area_def.y_size != self.nlines): + raise NotImplementedError('Unrecognised AreaDefinition.') + + mod_area_extent = self._modify_area_extent(stand_area_def.area_extent) + + area_def = geometry.AreaDefinition( + stand_area_def.area_id, + stand_area_def.description, + "", + stand_area_def.proj_dict, + stand_area_def.x_size, + stand_area_def.y_size, + mod_area_extent) + + return area_def + + @staticmethod + def _modify_area_extent(stand_area_extent): + """Modify area extent to macth satellite projection. + + Area extent has to be modified since the L2 products are stored with the south-east + in the upper-right corner (as opposed to north-east in the standardized area definitions). + """ + ll_x, ll_y, ur_x, ur_y = stand_area_extent + ll_y *= -1. + ur_y *= -1. + area_extent = tuple([ll_x, ll_y, ur_x, ur_y]) + + return area_extent diff --git a/satpy/readers/file_handlers.py b/satpy/readers/file_handlers.py index 56b7a16675..338eeba6fd 100644 --- a/satpy/readers/file_handlers.py +++ b/satpy/readers/file_handlers.py @@ -106,10 +106,8 @@ def combine_info(self, all_infos): - end_time - start_orbit - end_orbit - - satellite_altitude - - satellite_latitude - - satellite_longitude - orbital_parameters + - time_parameters Also, concatenate the areas. @@ -118,26 +116,8 @@ def combine_info(self, all_infos): new_dict = self._combine(all_infos, min, 'start_time', 'start_orbit') new_dict.update(self._combine(all_infos, max, 'end_time', 'end_orbit')) - new_dict.update(self._combine(all_infos, np.mean, - 'satellite_longitude', - 'satellite_latitude', - 'satellite_altitude')) - - # Average orbital parameters - orb_params = [info.get('orbital_parameters', {}) for info in all_infos] - if all(orb_params): - # Collect all available keys - orb_params_comb = {} - for d in orb_params: - orb_params_comb.update(d) - - # Average known keys - keys = ['projection_longitude', 'projection_latitude', 'projection_altitude', - 'satellite_nominal_longitude', 'satellite_nominal_latitude', - 'satellite_actual_longitude', 'satellite_actual_latitude', 'satellite_actual_altitude', - 'nadir_longitude', 'nadir_latitude'] - orb_params_comb.update(self._combine(orb_params, np.mean, *keys)) - new_dict['orbital_parameters'] = orb_params_comb + new_dict.update(self._combine_orbital_parameters(all_infos)) + new_dict.update(self._combine_time_parameters(all_infos)) try: area = SwathDefinition(lons=np.ma.vstack([info['area'].lons for info in all_infos]), @@ -150,6 +130,44 @@ def combine_info(self, all_infos): new_dict.update(combined_info) return new_dict + def _combine_orbital_parameters(self, all_infos): + orb_params = [info.get('orbital_parameters', {}) for info in all_infos] + if not all(orb_params): + return {} + # Collect all available keys + orb_params_comb = {} + for d in orb_params: + orb_params_comb.update(d) + + # Average known keys + keys = ['projection_longitude', 'projection_latitude', 'projection_altitude', + 'satellite_nominal_longitude', 'satellite_nominal_latitude', + 'satellite_actual_longitude', 'satellite_actual_latitude', 'satellite_actual_altitude', + 'nadir_longitude', 'nadir_latitude'] + orb_params_comb.update(self._combine(orb_params, np.mean, *keys)) + return {'orbital_parameters': orb_params_comb} + + def _combine_time_parameters(self, all_infos): + time_params = [info.get('time_parameters', {}) for info in all_infos] + if not all(time_params): + return {} + # Collect all available keys + time_params_comb = {} + for d in time_params: + time_params_comb.update(d) + + start_keys = ( + 'nominal_start_time', + 'observation_start_time', + ) + end_keys = ( + 'nominal_end_time', + 'observation_end_time', + ) + time_params_comb.update(self._combine(time_params, min, *start_keys)) + time_params_comb.update(self._combine(time_params, max, *end_keys)) + return {'time_parameters': time_params_comb} + @property def start_time(self): """Get start time.""" diff --git a/satpy/readers/ghrsst_l2.py b/satpy/readers/ghrsst_l2.py new file mode 100644 index 0000000000..dde3ce7a71 --- /dev/null +++ b/satpy/readers/ghrsst_l2.py @@ -0,0 +1,97 @@ +# -*- coding: utf-8 -*- +# Copyright (c) 2017 - 2022 Satpy developers +# +# This file is part of Satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Reader for the GHRSST level-2 formatted data.""" + +import os +import tarfile +from contextlib import suppress +from datetime import datetime +from functools import cached_property + +import xarray as xr + +from satpy import CHUNK_SIZE +from satpy.readers.file_handlers import BaseFileHandler + + +class GHRSSTL2FileHandler(BaseFileHandler): + """File handler for GHRSST L2 netCDF files.""" + + def __init__(self, filename, filename_info, filetype_info, engine=None): + """Initialize the file handler for GHRSST L2 netCDF data.""" + super().__init__(filename, filename_info, filetype_info) + self._engine = engine + self._tarfile = None + + self.filename_info['start_time'] = datetime.strptime( + self.nc.start_time, '%Y%m%dT%H%M%SZ') + self.filename_info['end_time'] = datetime.strptime( + self.nc.stop_time, '%Y%m%dT%H%M%SZ') + + @cached_property + def nc(self): + """Get the xarray Dataset for the filename.""" + if os.fspath(self.filename).endswith('tar'): + file_obj = self._open_tarfile() + else: + file_obj = self.filename + + nc = xr.open_dataset(file_obj, + decode_cf=True, + mask_and_scale=True, + engine=self._engine, + chunks={'ni': CHUNK_SIZE, + 'nj': CHUNK_SIZE}) + + return nc.rename({'ni': 'x', 'nj': 'y'}) + + def _open_tarfile(self): + self._tarfile = tarfile.open(name=self.filename, mode='r') + sst_filename = next((name for name in self._tarfile.getnames() + if self._is_sst_file(name))) + file_obj = self._tarfile.extractfile(sst_filename) + return file_obj + + @staticmethod + def _is_sst_file(name): + """Check if file in the tar archive is a valid SST file.""" + return name.endswith('nc') and 'GHRSST-SSTskin' in name + + def get_dataset(self, key, info): + """Get any available dataset.""" + stdname = info.get('standard_name') + return self.nc[stdname].squeeze() + + @property + def start_time(self): + """Get start time.""" + return self.filename_info['start_time'] + + @property + def end_time(self): + """Get end time.""" + return self.filename_info['end_time'] + + @property + def sensor(self): + """Get the sensor name.""" + return self.nc.attrs['sensor'].lower() + + def __del__(self): + """Close the tarfile object.""" + with suppress(AttributeError): + self._tarfile.close() diff --git a/satpy/readers/glm_l2.py b/satpy/readers/glm_l2.py index 2fa31c5dc6..bfb2719b07 100644 --- a/satpy/readers/glm_l2.py +++ b/satpy/readers/glm_l2.py @@ -35,6 +35,7 @@ PLATFORM_NAMES = { 'G16': 'GOES-16', 'G17': 'GOES-17', + 'G18': 'GOES-18', } # class NC_GLM_L2_LCFA(BaseFileHandler): — add this with glmtools diff --git a/satpy/readers/goes_imager_hrit.py b/satpy/readers/goes_imager_hrit.py index 5d139f9f09..05bcc513d7 100644 --- a/satpy/readers/goes_imager_hrit.py +++ b/satpy/readers/goes_imager_hrit.py @@ -30,8 +30,8 @@ import dask.array as da import numpy as np import xarray as xr -from pyresample import geometry +from satpy.readers._geos_area import get_area_definition, get_area_extent, get_geos_area_naming from satpy.readers.eum_base import recarray2dict, time_cds_short from satpy.readers.hrit_base import ( HRITFileHandler, @@ -352,6 +352,8 @@ def process_prologue(self): 14: "GOES-14", 15: "GOES-15"} +SENSOR_NAME = 'goes_imager' + class HRITGOESFileHandler(HRITFileHandler): """GOES HRIT format reader.""" @@ -385,7 +387,7 @@ def get_dataset(self, key, info): new_attrs.update(res.attrs) res.attrs = new_attrs res.attrs['platform_name'] = self.platform_name - res.attrs['sensor'] = 'goes_imager' + res.attrs['sensor'] = SENSOR_NAME res.attrs['orbital_parameters'] = {'projection_longitude': self.mda['projection_parameters']['SSP_longitude'], 'projection_latitude': 0.0, 'projection_altitude': ALTITUDE} @@ -442,45 +444,41 @@ def _calibrate(self, data): res.attrs['units'] = units.get(unit, unit) return res - def get_area_def(self, dsid): + def get_area_def(self, dataset_id): """Get the area definition of the band.""" - cfac = np.int32(self.mda['cfac']) - lfac = np.int32(self.mda['lfac']) - coff = np.float32(self.mda['coff']) - loff = np.float32(self.mda['loff']) - - a = EQUATOR_RADIUS - b = POLE_RADIUS - h = ALTITUDE - - lon_0 = self.prologue['SubSatLongitude'] - - nlines = int(self.mda['number_of_lines']) - ncols = int(self.mda['number_of_columns']) - - loff = nlines - loff - - area_extent = self.get_area_extent((nlines, ncols), - (loff, coff), - (lfac, cfac), - h) - - proj_dict = {'a': float(a), - 'b': float(b), - 'lon_0': float(lon_0), - 'h': float(h), - 'proj': 'geos', - 'units': 'm'} - - area = geometry.AreaDefinition( - 'some_area_name', - "On-the-fly area", - 'geosmsg', - proj_dict, - ncols, - nlines, - area_extent) - + proj_dict = self._get_proj_dict(dataset_id) + area_extent = get_area_extent(proj_dict) + area = get_area_definition(proj_dict, area_extent) self.area = area - return area + + def _get_proj_dict(self, dataset_id): + loff = np.float32(self.mda['loff']) + nlines = np.int32(self.mda['number_of_lines']) + loff = nlines - loff + name_dict = get_geos_area_naming({ + 'platform_name': self.platform_name, + 'instrument_name': SENSOR_NAME, + # Partial scans are padded to full disk + 'service_name': 'FD', + 'service_desc': 'Full Disk', + 'resolution': dataset_id['resolution'] + }) + return { + 'a': EQUATOR_RADIUS, + 'b': POLE_RADIUS, + 'ssp_lon': float(self.prologue['SubSatLongitude']), + 'h': ALTITUDE, + 'proj': 'geos', + 'units': 'm', + 'a_name': name_dict['area_id'], + 'a_desc': name_dict['description'], + 'p_id': '', + 'nlines': nlines, + 'ncols': np.int32(self.mda['number_of_columns']), + 'cfac': np.int32(self.mda['cfac']), + 'lfac': np.int32(self.mda['lfac']), + 'coff': np.float32(self.mda['coff']), + 'loff': loff, + 'scandir': 'N2S' + } diff --git a/satpy/readers/goes_imager_nc.py b/satpy/readers/goes_imager_nc.py index 2a13319bdf..15b43476d5 100644 --- a/satpy/readers/goes_imager_nc.py +++ b/satpy/readers/goes_imager_nc.py @@ -733,9 +733,9 @@ def _get_area_def_uniform_sampling(self, lon0, channel): def start_time(self): """Start timestamp of the dataset.""" dt = self.nc['time'].dt - return datetime(year=dt.year, month=dt.month, day=dt.day, - hour=dt.hour, minute=dt.minute, - second=dt.second, microsecond=dt.microsecond) + return datetime(year=int(dt.year), month=int(dt.month), day=int(dt.day), + hour=int(dt.hour), minute=int(dt.minute), + second=int(dt.second), microsecond=int(dt.microsecond)) @property def end_time(self): @@ -944,10 +944,7 @@ def _update_metadata(self, data, ds_info): # Attributes only available for full disc images. YAML reader # doesn't like it if satellite_* is present but None data.attrs.update( - {'satellite_longitude': self.meta['lon0'], - 'satellite_latitude': self.meta['lat0'], - 'satellite_altitude': ALTITUDE, - 'nadir_row': self.meta['nadir_row'], + {'nadir_row': self.meta['nadir_row'], 'nadir_col': self.meta['nadir_col'], 'area_def_uniform_sampling': self.meta['area_def_uni']} ) diff --git a/satpy/readers/hdfeos_base.py b/satpy/readers/hdfeos_base.py index 731611765c..cd47daa71f 100644 --- a/satpy/readers/hdfeos_base.py +++ b/satpy/readers/hdfeos_base.py @@ -91,7 +91,7 @@ def _find_and_run_interpolation(interpolation_functions, src_resolution, dst_res class HDFEOSBaseFileReader(BaseFileHandler): """Base file handler for HDF EOS data for both L1b and L2 products.""" - def __init__(self, filename, filename_info, filetype_info): + def __init__(self, filename, filename_info, filetype_info, **kwargs): """Initialize the base reader.""" BaseFileHandler.__init__(self, filename, filename_info, filetype_info) try: @@ -291,9 +291,9 @@ class HDFEOSGeoReader(HDFEOSBaseFileReader): 'solar_zenith_angle': ('SolarZenith', 'Solar_Zenith'), } - def __init__(self, filename, filename_info, filetype_info): + def __init__(self, filename, filename_info, filetype_info, **kwargs): """Initialize the geographical reader.""" - HDFEOSBaseFileReader.__init__(self, filename, filename_info, filetype_info) + HDFEOSBaseFileReader.__init__(self, filename, filename_info, filetype_info, **kwargs) self.cache = {} @staticmethod diff --git a/satpy/readers/hrit_base.py b/satpy/readers/hrit_base.py index dabb18d47e..ccc29e617b 100644 --- a/satpy/readers/hrit_base.py +++ b/satpy/readers/hrit_base.py @@ -32,7 +32,7 @@ import os from datetime import timedelta from io import BytesIO -from subprocess import PIPE, Popen +from subprocess import PIPE, Popen # nosec B404 from tempfile import gettempdir import dask.array as da @@ -40,6 +40,7 @@ import xarray as xr from pyresample import geometry +import satpy.readers.utils as utils from satpy.readers.eum_base import time_cds_short from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.seviri_base import dec10216 @@ -132,7 +133,7 @@ def decompress(infile, outdir='.'): cwd = os.getcwd() os.chdir(outdir) - p = Popen([cmd, infile], stdout=PIPE) + p = Popen([cmd, infile], stdout=PIPE) # nosec B603 stdout = BytesIO(p.communicate()[0]) status = p.returncode os.chdir(cwd) @@ -148,6 +149,18 @@ def decompress(infile, outdir='.'): return os.path.join(outdir, outfile.decode('utf-8')) +def get_header_id(fp): + """Return the HRIT header common data.""" + data = fp.read(common_hdr.itemsize) + return np.frombuffer(data, dtype=common_hdr, count=1)[0] + + +def get_header_content(fp, header_dtype, count=1): + """Return the content of the HRIT header.""" + data = fp.read(header_dtype.itemsize*count) + return np.frombuffer(data, dtype=header_dtype, count=count) + + class HRITFileHandler(BaseFileHandler): """HRIT standard format reader.""" @@ -175,17 +188,15 @@ def _get_hd(self, hdr_info): """Open the file, read and get the basic file header info and set the mda dictionary.""" hdr_map, variable_length_headers, text_headers = hdr_info - with open(self.filename) as fp: + with utils.generic_open(self.filename, mode='rb') as fp: total_header_length = 16 while fp.tell() < total_header_length: - hdr_id = np.fromfile(fp, dtype=common_hdr, count=1)[0] + hdr_id = get_header_id(fp) the_type = hdr_map[hdr_id['hdr_id']] if the_type in variable_length_headers: field_length = int((hdr_id['record_length'] - 3) / the_type.itemsize) - current_hdr = np.fromfile(fp, - dtype=the_type, - count=field_length) + current_hdr = get_header_content(fp, the_type, field_length) key = variable_length_headers[the_type] if key in self.mda: if not isinstance(self.mda[key], list): @@ -198,14 +209,10 @@ def _get_hd(self, hdr_info): the_type.itemsize) char = list(the_type.fields.values())[0][0].char new_type = np.dtype(char + str(field_length)) - current_hdr = np.fromfile(fp, - dtype=new_type, - count=1)[0] + current_hdr = get_header_content(fp, new_type)[0] self.mda[text_headers[the_type]] = current_hdr else: - current_hdr = np.fromfile(fp, - dtype=the_type, - count=1)[0] + current_hdr = get_header_content(fp, the_type)[0] self.mda.update( dict(zip(current_hdr.dtype.names, current_hdr))) @@ -311,6 +318,33 @@ def get_area_def(self, dsid): self.area = area return area + def _memmap_data(self, shape, dtype): + # For reading the image data, unzip_context is faster than generic_open + with utils.unzip_context(self.filename) as fn: + return np.memmap(fn, mode='r', + offset=self.mda['total_header_length'], + dtype=dtype, + shape=shape) + + def _read_file_or_file_like(self, shape, dtype): + # filename is likely to be a file-like object, already in memory + with utils.generic_open(self.filename, mode="rb") as fp: + no_elements = np.prod(shape) + fp.seek(self.mda['total_header_length']) + return np.frombuffer( + fp.read(np.dtype(dtype).itemsize * no_elements), + dtype=np.dtype(dtype), + count=no_elements.item() + ).reshape(shape) + + def _read_or_memmap_data(self, shape, dtype): + # check, if 'filename' is a file on disk, + # or a file like obj, possibly residing already in memory + try: + return self._memmap_data(shape, dtype) + except (FileNotFoundError, AttributeError): + return self._read_file_or_file_like(shape, dtype) + def read_band(self, key, info): """Read the data.""" shape = int(np.ceil(self.mda['data_field_length'] / 8.)) @@ -320,10 +354,7 @@ def read_band(self, key, info): elif self.mda['number_of_bits_per_pixel'] in [8, 10]: dtype = np.uint8 shape = (shape, ) - data = np.memmap(self.filename, mode='r', - offset=self.mda['total_header_length'], - dtype=dtype, - shape=shape) + data = self._read_or_memmap_data(shape, dtype) data = da.from_array(data, chunks=shape[0]) if self.mda['number_of_bits_per_pixel'] == 10: data = dec10216(data) diff --git a/satpy/readers/hrit_jma.py b/satpy/readers/hrit_jma.py index b19aaedd11..a89724b5b7 100644 --- a/satpy/readers/hrit_jma.py +++ b/satpy/readers/hrit_jma.py @@ -61,9 +61,6 @@ * y (y) float64 5.5e+06 5.498e+06 5.496e+06 ... -5.496e+06 -5.498e+06 * x (x) float64 -5.498e+06 -5.496e+06 -5.494e+06 ... 5.498e+06 5.5e+06 Attributes: - satellite_longitude: 140.7 - satellite_latitude: 0.0 - satellite_altitude: 35785831.0 orbital_parameters: {'projection_longitude': 140.7, 'projection_latitud... standard_name: toa_brightness_temperature level: None @@ -188,9 +185,36 @@ def mjd2datetime64(mjd): class HRITJMAFileHandler(HRITFileHandler): - """JMA HRIT format reader.""" + """JMA HRIT format reader. - def __init__(self, filename, filename_info, filetype_info): + By default, the reader uses the start time parsed from the filename. To use exact time, computed + from the metadata, the user can define a keyword argument:: + + scene = Scene(filenames=filenames, + reader='ahi_hrit', + reader_kwargs={'use_acquisition_time_as_start_time': True}) + + As this time is different for every channel, time-dependent calculations like SZA correction + can be pretty slow when multiple channels are used. + + The exact scanline times are always available as coordinates of an individual channels:: + + scene.load(["B03"]) + print(scene["B03].coords["acq_time"].data) + + would print something similar to:: + + array(['2021-12-08T06:00:20.131200000', '2021-12-08T06:00:20.191948000', + '2021-12-08T06:00:20.252695000', ..., + '2021-12-08T06:09:39.449390000', '2021-12-08T06:09:39.510295000', + '2021-12-08T06:09:39.571200000'], dtype='datetime64[ns]') + + The first value represents the exact start time, and the last one the exact end time of the data + acquisition. + + """ + + def __init__(self, filename, filename_info, filetype_info, use_acquisition_time_as_start_time=False): """Initialize the reader.""" super(HRITJMAFileHandler, self).__init__(filename, filename_info, filetype_info, @@ -198,6 +222,7 @@ def __init__(self, filename, filename_info, filetype_info): jma_variable_length_headers, jma_text_headers)) + self._use_acquisition_time_as_start_time = use_acquisition_time_as_start_time self.mda['segment_sequence_number'] = self.mda['image_segm_seq_no'] self.mda['planned_end_segment_number'] = self.mda['total_no_image_segm'] self.mda['planned_start_segment_number'] = 1 @@ -353,9 +378,6 @@ def get_dataset(self, key, info): # Update attributes res.attrs.update(info) res.attrs['platform_name'] = self.platform - res.attrs['satellite_longitude'] = float(self.mda['projection_parameters']['SSP_longitude']) - res.attrs['satellite_latitude'] = 0. - res.attrs['satellite_altitude'] = float(self.mda['projection_parameters']['h']) res.attrs['orbital_parameters'] = { 'projection_longitude': float(self.mda['projection_parameters']['SSP_longitude']), 'projection_latitude': 0., @@ -430,7 +452,9 @@ def calibrate(self, data, calibration): @property def start_time(self): """Get start time of the scan.""" - return self.acq_time[0].astype(datetime) + if self._use_acquisition_time_as_start_time: + return self.acq_time[0].astype(datetime) + return self._start_time @property def end_time(self): diff --git a/satpy/readers/hrpt.py b/satpy/readers/hrpt.py index df23cc5548..cbde23559c 100644 --- a/satpy/readers/hrpt.py +++ b/satpy/readers/hrpt.py @@ -41,7 +41,7 @@ from pyorbital.orbital import Orbital from satpy._compat import cached_property -from satpy.readers.aapp_l1b import LINE_CHUNK +from satpy.readers.aapp_l1b import get_avhrr_lac_chunks from satpy.readers.file_handlers import BaseFileHandler logger = logging.getLogger(__name__) @@ -137,6 +137,11 @@ def times(self): """Get the timestamps for each line.""" return time_seconds(self._data["timecode"], self.year) + @cached_property + def _chunks(self): + """Get the best chunks for this data.""" + return get_avhrr_lac_chunks((self._data.shape[0], 2048), float) + @cached_property def _data(self): """Get the data.""" @@ -171,7 +176,7 @@ def get_dataset(self, key, info): def _get_channel_data(self, key): """Get channel data.""" - data = da.from_array(self._data["image_data"][:, :, _get_channel_index(key)], chunks=(LINE_CHUNK, 2048)) + data = da.from_array(self._data["image_data"][:, :, _get_channel_index(key)], chunks=self._chunks) if key['calibration'] != 'counts': if key['name'] in ['1', '2', '3a']: data = self.calibrate_solar_channel(data, key) @@ -184,9 +189,9 @@ def _get_navigation_data(self, key): """Get navigation data.""" lons, lats = self.lons_lats if key['name'] == 'latitude': - data = da.from_array(lats, chunks=(LINE_CHUNK, 2048)) + data = da.from_array(lats, chunks=self._chunks) else: - data = da.from_array(lons, chunks=(LINE_CHUNK, 2048)) + data = da.from_array(lons, chunks=self._chunks) return data def _get_ch3_mask_or_true(self, key): diff --git a/satpy/readers/modis_l1b.py b/satpy/readers/modis_l1b.py index f0b5a74e8a..51d6108f3e 100644 --- a/satpy/readers/modis_l1b.py +++ b/satpy/readers/modis_l1b.py @@ -25,11 +25,39 @@ a pattern similar to the following one: .. parsed-literal:: + M[O/Y]D02[1/H/Q]KM.A[date].[time].[collection].[processing_time].hdf Other patterns where "collection" and/or "proccessing_time" are missing might also work (see the readers yaml file for details). Geolocation files (MOD03) are also supported. - +The IMAPP direct broadcast naming format is also supported with names like: +``a1.12226.1846.1000m.hdf``. + +Saturation Handling +------------------- + +Band 2 of the MODIS sensor is available in 250m, 500m, and 1km resolutions. +The band data may include a special fill value to indicate when the detector +was saturated in the 250m version of the data. When the data is aggregated to +coarser resolutions this saturation fill value is converted to a +"can't aggregate" fill value. By default, Satpy will replace these fill values +with NaN to indicate they are invalid. This is typically undesired when +generating images for the data as they appear as "holes" in bright clouds. +To control this the keyword argument ``mask_saturated`` can be passed and set +to ``False`` to set these two fill values to the maximum valid value. + +.. code-block:: python + + scene = satpy.Scene(filenames=filenames, + reader='modis_l1b', + reader_kwargs={'mask_saturated': False}) + scene.load(['2']) + +Note that the saturation fill value can appear in other bands (ex. bands 7-19) +in addition to band 2. Also, the "can't aggregate" fill value is a generic +"catch all" for any problems encountered when aggregating high resolution bands +to lower resolutions. Filling this with the max valid value could replace +non-saturated invalid pixels with valid values. Geolocation files ----------------- @@ -62,9 +90,20 @@ class HDFEOSBandReader(HDFEOSBaseFileReader): "Q": 250, "H": 500} - def __init__(self, filename, filename_info, filetype_info): + res_to_possible_variable_names = { + 1000: ['EV_250_Aggr1km_RefSB', + 'EV_500_Aggr1km_RefSB', + 'EV_1KM_RefSB', + 'EV_1KM_Emissive'], + 500: ['EV_250_Aggr500_RefSB', + 'EV_500_RefSB'], + 250: ['EV_250_RefSB'], + } + + def __init__(self, filename, filename_info, filetype_info, mask_saturated=True, **kwargs): """Init the file handler.""" - HDFEOSBaseFileReader.__init__(self, filename, filename_info, filetype_info) + super().__init__(filename, filename_info, filetype_info, **kwargs) + self._mask_saturated = mask_saturated ds = self.metadata['INVENTORYMETADATA'][ 'COLLECTIONDESCRIPTIONCLASS']['SHORTNAME']['VALUE'] @@ -72,120 +111,145 @@ def __init__(self, filename, filename_info, filetype_info): def get_dataset(self, key, info): """Read data from file and return the corresponding projectables.""" - datadict = { - 1000: ['EV_250_Aggr1km_RefSB', - 'EV_500_Aggr1km_RefSB', - 'EV_1KM_RefSB', - 'EV_1KM_Emissive'], - 500: ['EV_250_Aggr500_RefSB', - 'EV_500_RefSB'], - 250: ['EV_250_RefSB']} - if self.resolution != key['resolution']: return - - datasets = datadict[self.resolution] - for dataset in datasets: - subdata = self.sd.select(dataset) + var_name, band_index = self._get_band_variable_name_and_index(key["name"]) + subdata = self.sd.select(var_name) + var_attrs = subdata.attributes() + uncertainty = self.sd.select(var_name + "_Uncert_Indexes") + array = xr.DataArray(from_sds(subdata, chunks=CHUNK_SIZE)[band_index, :, :], + dims=['y', 'x']).astype(np.float32) + valid_range = var_attrs['valid_range'] + valid_min = np.float32(valid_range[0]) + valid_max = np.float32(valid_range[1]) + if not self._mask_saturated: + array = self._fill_saturated(array, valid_max) + array = self._mask_invalid(array, valid_min, valid_max) + array = self._mask_uncertain_pixels(array, uncertainty, band_index) + projectable = self._calibrate_data(key, info, array, var_attrs, band_index) + + # if ((platform_name == 'Aqua' and key['name'] in ["6", "27", "36"]) or + # (platform_name == 'Terra' and key['name'] in ["29"])): + # height, width = projectable.shape + # row_indices = projectable.mask.sum(1) == width + # if row_indices.sum() != height: + # projectable.mask[row_indices, :] = True + + # Get the orbit number + # if not satscene.orbit: + # mda = self.data.attributes()["CoreMetadata.0"] + # orbit_idx = mda.index("ORBITNUMBER") + # satscene.orbit = mda[orbit_idx + 111:orbit_idx + 116] + + # Trimming out dead sensor lines (detectors) on terra: + # (in addition channel 27, 30, 34, 35, and 36 are nosiy) + # if satscene.satname == "terra": + # for band in ["29"]: + # if not satscene[band].is_loaded() or satscene[band].data.mask.all(): + # continue + # width = satscene[band].data.shape[1] + # height = satscene[band].data.shape[0] + # indices = satscene[band].data.mask.sum(1) < width + # if indices.sum() == height: + # continue + # satscene[band] = satscene[band].data[indices, :] + # satscene[band].area = geometry.SwathDefinition( + # lons=satscene[band].area.lons[indices, :], + # lats=satscene[band].area.lats[indices, :]) + self._add_satpy_metadata(key, projectable) + return projectable + + def _get_band_variable_name_and_index(self, band_name): + variable_names = self.res_to_possible_variable_names[self.resolution] + for variable_name in variable_names: + subdata = self.sd.select(variable_name) var_attrs = subdata.attributes() - band_names = var_attrs["band_names"].split(",") - - # get the relative indices of the desired channel try: - index = band_names.index(key['name']) + band_index = self._get_band_index(var_attrs, band_name) except ValueError: + # can't find band in list of bands continue - uncertainty = self.sd.select(dataset + "_Uncert_Indexes") - array = xr.DataArray(from_sds(subdata, chunks=CHUNK_SIZE)[index, :, :], - dims=['y', 'x']).astype(np.float32) - valid_range = var_attrs['valid_range'] - - # Fill values: - # Data Value Meaning - # 65535 Fill Value (includes reflective band data at night mode - # and completely missing L1A scans) - # 65534 L1A DN is missing within a scan - # 65533 Detector is saturated - # 65532 Cannot compute zero point DN, e.g., SV is saturated - # 65531 Detector is dead (see comments below) - # 65530 RSB dn** below the minimum of the scaling range - # 65529 TEB radiance or RSB dn** exceeds the maximum of the - # scaling range - # 65528 Aggregation algorithm failure - # 65527 Rotation of Earth view Sector from nominal science - # collection position - # 65526 Calibration coefficient b1 could not be computed - # 65525 Subframe is dead - # 65524 Both sides of the PCLW electronics on simultaneously - # 65501 - 65523 (reserved for future use) - # 65500 NAD closed upper limit - - array = array.where(array >= np.float32(valid_range[0])) - array = array.where(array <= np.float32(valid_range[1])) - array = array.where(from_sds(uncertainty, chunks=CHUNK_SIZE)[index, :, :] < 15) - - if key['calibration'] == 'brightness_temperature': - projectable = calibrate_bt(array, var_attrs, index, key['name']) - info.setdefault('units', 'K') - info.setdefault('standard_name', 'toa_brightness_temperature') - elif key['calibration'] == 'reflectance': - projectable = calibrate_refl(array, var_attrs, index) - info.setdefault('units', '%') - info.setdefault('standard_name', - 'toa_bidirectional_reflectance') - elif key['calibration'] == 'radiance': - projectable = calibrate_radiance(array, var_attrs, index) - info.setdefault('units', var_attrs.get('radiance_units')) - info.setdefault('standard_name', - 'toa_outgoing_radiance_per_unit_wavelength') - elif key['calibration'] == 'counts': - projectable = calibrate_counts(array, var_attrs, index) - info.setdefault('units', 'counts') - info.setdefault('standard_name', 'counts') # made up - else: - raise ValueError("Unknown calibration for " - "key: {}".format(key)) - projectable.attrs = info - - # if ((platform_name == 'Aqua' and key['name'] in ["6", "27", "36"]) or - # (platform_name == 'Terra' and key['name'] in ["29"])): - # height, width = projectable.shape - # row_indices = projectable.mask.sum(1) == width - # if row_indices.sum() != height: - # projectable.mask[row_indices, :] = True - - # Get the orbit number - # if not satscene.orbit: - # mda = self.data.attributes()["CoreMetadata.0"] - # orbit_idx = mda.index("ORBITNUMBER") - # satscene.orbit = mda[orbit_idx + 111:orbit_idx + 116] - - # Trimming out dead sensor lines (detectors) on terra: - # (in addition channel 27, 30, 34, 35, and 36 are nosiy) - # if satscene.satname == "terra": - # for band in ["29"]: - # if not satscene[band].is_loaded() or satscene[band].data.mask.all(): - # continue - # width = satscene[band].data.shape[1] - # height = satscene[band].data.shape[0] - # indices = satscene[band].data.mask.sum(1) < width - # if indices.sum() == height: - # continue - # satscene[band] = satscene[band].data[indices, :] - # satscene[band].area = geometry.SwathDefinition( - # lons=satscene[band].area.lons[indices, :], - # lats=satscene[band].area.lats[indices, :]) - self._add_satpy_metadata(key, projectable) - return projectable + return variable_name, band_index + + def _get_band_index(self, var_attrs, band_name): + """Get the relative indices of the desired channel.""" + band_names = var_attrs["band_names"].split(",") + index = band_names.index(band_name) + return index + + def _fill_saturated(self, array, valid_max): + """Replace saturation-related values with max reflectance. + + If the file handler was created with ``mask_saturated`` set to + ``True`` then all invalid/fill values are set to NaN. If ``False`` + then the fill values 65528 and 65533 are set to the maximum valid + value. These values correspond to "can't aggregate" and "saturation". + + Fill values: + + * 65535 Fill Value (includes reflective band data at night mode + and completely missing L1A scans) + * 65534 L1A DN is missing within a scan + * 65533 Detector is saturated + * 65532 Cannot compute zero point DN, e.g., SV is saturated + * 65531 Detector is dead (see comments below) + * 65530 RSB dn** below the minimum of the scaling range + * 65529 TEB radiance or RSB dn exceeds the maximum of the scaling range + * 65528 Aggregation algorithm failure + * 65527 Rotation of Earth view Sector from nominal science collection position + * 65526 Calibration coefficient b1 could not be computed + * 65525 Subframe is dead + * 65524 Both sides of the PCLW electronics on simultaneously + * 65501 - 65523 (reserved for future use) + * 65500 NAD closed upper limit + + """ + return array.where((array != 65533) & (array != 65528), valid_max) + + def _mask_invalid(self, array, valid_min, valid_max): + """Replace fill values with NaN.""" + return array.where((array >= valid_min) & (array <= valid_max)) + + def _mask_uncertain_pixels(self, array, uncertainty, band_index): + if not self._mask_saturated: + return array + band_uncertainty = from_sds(uncertainty, chunks=CHUNK_SIZE)[band_index, :, :] + array = array.where(band_uncertainty < 15) + return array + + def _calibrate_data(self, key, info, array, var_attrs, index): + if key['calibration'] == 'brightness_temperature': + projectable = calibrate_bt(array, var_attrs, index, key['name']) + info.setdefault('units', 'K') + info.setdefault('standard_name', 'toa_brightness_temperature') + elif key['calibration'] == 'reflectance': + projectable = calibrate_refl(array, var_attrs, index) + info.setdefault('units', '%') + info.setdefault('standard_name', + 'toa_bidirectional_reflectance') + elif key['calibration'] == 'radiance': + projectable = calibrate_radiance(array, var_attrs, index) + info.setdefault('units', var_attrs.get('radiance_units')) + info.setdefault('standard_name', + 'toa_outgoing_radiance_per_unit_wavelength') + elif key['calibration'] == 'counts': + projectable = calibrate_counts(array, var_attrs, index) + info.setdefault('units', 'counts') + info.setdefault('standard_name', 'counts') # made up + else: + raise ValueError("Unknown calibration for " + "key: {}".format(key)) + projectable.attrs = info + return projectable class MixedHDFEOSReader(HDFEOSGeoReader, HDFEOSBandReader): """A file handler for the files that have both regular bands and geographical information in them.""" - def __init__(self, filename, filename_info, filetype_info): + def __init__(self, filename, filename_info, filetype_info, **kwargs): """Init the file handler.""" - HDFEOSGeoReader.__init__(self, filename, filename_info, filetype_info) - HDFEOSBandReader.__init__(self, filename, filename_info, filetype_info) + HDFEOSGeoReader.__init__(self, filename, filename_info, filetype_info, **kwargs) + HDFEOSBandReader.__init__(self, filename, filename_info, filetype_info, **kwargs) def get_dataset(self, key, info): """Get the dataset.""" diff --git a/satpy/readers/msu_gsa_l1b.py b/satpy/readers/msu_gsa_l1b.py new file mode 100644 index 0000000000..df06239b43 --- /dev/null +++ b/satpy/readers/msu_gsa_l1b.py @@ -0,0 +1,111 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2022 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Reader for the Arctica-M1 MSU-GS/A data. + +The files for this reader are HDF5 and contain channel data at 1km resolution +for the VIS channels and 4km resolution for the IR channels. Geolocation data +is available at both resolutions, as is sun and satellite geometry. + +This reader was tested on sample data provided by EUMETSAT. + +""" +from datetime import datetime + +import numpy as np + +from satpy.readers.hdf5_utils import HDF5FileHandler + + +class MSUGSAFileHandler(HDF5FileHandler): + """MSU-GS/A L1B file reader.""" + + @property + def start_time(self): + """Time for timeslot scan start.""" + dtstr = self['/attr/timestamp_without_timezone'] + return datetime.strptime(dtstr, "%Y-%m-%dT%H:%M:%S") + + @property + def satellite_altitude(self): + """Satellite altitude at time of scan. + + There is no documentation but this appears to be + height above surface in meters. + """ + return float(self['/attr/satellite_observation_point_height']) + + @property + def satellite_latitude(self): + """Satellite latitude at time of scan.""" + return float(self['/attr/satellite_observation_point_latitude']) + + @property + def satellite_longitude(self): + """Satellite longitude at time of scan.""" + return float(self['/attr/satellite_observation_point_longitude']) + + @property + def sensor_name(self): + """Sensor name is hardcoded.""" + sensor = 'msu_gsa' + return sensor + + @property + def platform_name(self): + """Platform name is also hardcoded.""" + platform = 'Arctica-M-N1' + return platform + + @staticmethod + def _apply_scale_offset(in_data): + """Apply the scale and offset to data.""" + scl = in_data.attrs['scale'] + off = in_data.attrs['offset'] + return in_data * scl + off + + def get_dataset(self, dataset_id, ds_info): + """Load data variable and metadata and calibrate if needed.""" + file_key = ds_info.get('file_key', dataset_id['name']) + data = self[file_key] + attrs = data.attrs.copy() # avoid contaminating other band loading + attrs.update(ds_info) + + # The fill value also needs to be applied + fill_val = attrs.pop('fill_value') + data = data.where(data != fill_val, np.nan) + + # Data has a scale and offset that we must apply + data = self._apply_scale_offset(data) + + # Data is given as radiance values, we must convert if we want reflectance + if dataset_id.get('calibration') == "reflectance": + solconst = float(attrs.pop('F_solar_constant')) + data = np.pi * data / solconst + # Satpy expects reflectance values in 0-100 range + data = data * 100. + + data.attrs = attrs + data.attrs.update({ + 'platform_name': self.platform_name, + 'sensor': self.sensor_name, + 'sat_altitude': self.satellite_altitude, + 'sat_latitude': self.satellite_latitude, + 'sat_longitude': self.satellite_longitude, + }) + + return data diff --git a/satpy/readers/mviri_l1b_fiduceo_nc.py b/satpy/readers/mviri_l1b_fiduceo_nc.py index 54d244b161..de385f67fc 100644 --- a/satpy/readers/mviri_l1b_fiduceo_nc.py +++ b/satpy/readers/mviri_l1b_fiduceo_nc.py @@ -153,8 +153,8 @@ """ import abc +import functools import warnings -from functools import lru_cache import dask.array as da import numpy as np @@ -565,6 +565,13 @@ def __init__(self, filename, filename_info, filetype_info, self.projection_longitude = float(filename_info['projection_longitude']) self.calib_coefs = self._get_calib_coefs() + self._get_angles = functools.lru_cache(maxsize=8)( + self._get_angles_uncached + ) + self._get_acq_time = functools.lru_cache(maxsize=3)( + self._get_acq_time_uncached + ) + def get_dataset(self, dataset_id, dataset_info): """Get the dataset.""" name = dataset_id['name'] @@ -605,8 +612,7 @@ def _get_channel(self, name, resolution, calibration): ds['acq_time'] = self._get_acq_time(resolution) return ds - @lru_cache(maxsize=8) # 4 angle datasets with two resolutions each - def _get_angles(self, name, resolution): + def _get_angles_uncached(self, name, resolution): """Get angle dataset. Files provide angles (solar/satellite zenith & azimuth) at a coarser @@ -689,8 +695,7 @@ def _get_calib_coefs(self): return coefs - @lru_cache(maxsize=3) # Three channels - def _get_acq_time(self, resolution): + def _get_acq_time_uncached(self, resolution): """Get scanline acquisition time for the given resolution. Note that the acquisition time does not increase monotonically diff --git a/satpy/readers/nwcsaf_nc.py b/satpy/readers/nwcsaf_nc.py index ca4efcc091..9bdafbec7c 100644 --- a/satpy/readers/nwcsaf_nc.py +++ b/satpy/readers/nwcsaf_nc.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2017-2020 Satpy developers +# Copyright (c) 2017-2022 Satpy developers # # This file is part of satpy. # @@ -22,6 +22,7 @@ """ +import functools import logging import os from datetime import datetime @@ -89,6 +90,7 @@ def __init__(self, filename, filename_info, filetype_info): self.pps = False self.platform_name = None self.sensor = None + self.file_key_prefix = filetype_info.get("file_key_prefix", "") try: # NWCSAF/Geo: @@ -102,6 +104,10 @@ def __init__(self, filename, filename_info, filetype_info): self.set_platform_and_sensor(**kwrgs) + self.upsample_geolocation = functools.lru_cache(maxsize=1)( + self._upsample_geolocation_uncached + ) + def set_platform_and_sensor(self, **kwargs): """Set some metadata: platform_name, sensors, and pps (identifying PPS or Geo).""" try: @@ -130,22 +136,37 @@ def get_dataset(self, dsid, info): logger.debug('Get the data set from cache: %s.', dsid_name) return self.cache[dsid_name] if dsid_name in ['lon', 'lat'] and dsid_name not in self.nc: - dsid_name = dsid_name + '_reduced' + # Get full resolution lon,lat from the reduced (tie points) grid + lon, lat = self.upsample_geolocation() + if dsid_name == "lon": + return lon + else: + return lat logger.debug('Reading %s.', dsid_name) - variable = self.nc[dsid_name] + file_key = self._get_filekeys(dsid_name, info) + variable = self.nc[file_key] variable = self.remove_timedim(variable) - variable = self.scale_dataset(dsid, variable, info) + variable = self.scale_dataset(variable, info) - if dsid_name.endswith('_reduced'): - # Get full resolution lon,lat from the reduced (tie points) grid - self.upsample_geolocation(dsid, info) + return variable - return self.cache[dsid['name']] + def _get_varname_in_file(self, info, info_type="file_key"): + if isinstance(info[info_type], list): + for key in info[info_type]: + file_key = self.file_key_prefix + key + if file_key in self.nc: + return file_key + return self.file_key_prefix + info[info_type] - return variable + def _get_filekeys(self, dsid_name, info): + try: + file_key = self._get_varname_in_file(info, info_type="file_key") + except KeyError: + file_key = dsid_name + return file_key - def scale_dataset(self, dsid, variable, info): + def scale_dataset(self, variable, info): """Scale the data set, applying the attributes from the netCDF file. The scale and offset attributes will then be removed from the resulting variable. @@ -183,7 +204,7 @@ def scale_dataset(self, dsid, variable, info): if 'standard_name' in info: variable.attrs.setdefault('standard_name', info['standard_name']) - variable = self._adjust_variable_for_legacy_software(variable, dsid) + variable = self._adjust_variable_for_legacy_software(variable) return variable @@ -207,13 +228,14 @@ def _mask_variable(variable): return variable def _prepare_variable_for_palette(self, variable, info): - if 'scale_offset_dataset' in info: - so_dataset = self.nc[info['scale_offset_dataset']] - scale = so_dataset.attrs['scale_factor'] - offset = so_dataset.attrs['add_offset'] - else: + try: + so_dataset = self.nc[self._get_varname_in_file(info, info_type='scale_offset_dataset')] + except KeyError: scale = 1 offset = 0 + else: + scale = so_dataset.attrs['scale_factor'] + offset = so_dataset.attrs['add_offset'] variable.attrs['palette_meanings'] = [int(val) for val in variable.attrs['palette_meanings'].split()] if variable.attrs['palette_meanings'][0] == 1: @@ -225,29 +247,25 @@ def _prepare_variable_for_palette(self, variable, info): variable = variable[idx] return variable - def _adjust_variable_for_legacy_software(self, variable, data_id): - if self.sw_version == 'NWC/PPS version v2014' and data_id['name'] == 'ctth_alti': + def _adjust_variable_for_legacy_software(self, variable): + if self.sw_version == 'NWC/PPS version v2014' and variable.attrs.get('standard_name') == 'cloud_top_altitude': # pps 2014 valid range and palette don't match variable.attrs['valid_range'] = (0., 9000.) - if self.sw_version == 'NWC/PPS version v2014' and data_id['name'] == 'ctth_alti_pal': + if (self.sw_version == 'NWC/PPS version v2014' and + variable.attrs.get('long_name') == 'RGB Palette for ctth_alti'): # pps 2014 palette has the nodata color (black) first variable = variable[1:, :] - if self.sw_version == 'NWC/GEO version v2016' and data_id['name'] == 'ctth_alti': - # Geo 2016/18 valid range and palette don't match - # Valid range is 0 to 27000 in the file. But after scaling the valid range becomes -2000 to 25000 - # This now fixed by the scaling of the valid range above. - pass return variable - def upsample_geolocation(self, dsid, info): + def _upsample_geolocation_uncached(self): """Upsample the geolocation (lon,lat) from the tiepoint grid.""" from geotiepoints import SatelliteInterpolator # Read the fields needed: col_indices = self.nc['nx_reduced'].values row_indices = self.nc['ny_reduced'].values - lat_reduced = self.scale_dataset(dsid, self.nc['lat_reduced'], info) - lon_reduced = self.scale_dataset(dsid, self.nc['lon_reduced'], info) + lat_reduced = self.scale_dataset(self.nc['lat_reduced'], {}) + lon_reduced = self.scale_dataset(self.nc['lon_reduced'], {}) shape = (self.nc['y'].shape[0], self.nc['x'].shape[0]) cols_full = np.arange(shape[1]) @@ -259,10 +277,9 @@ def upsample_geolocation(self, dsid, info): (rows_full, cols_full)) lons, lats = satint.interpolate() - self.cache['lon'] = xr.DataArray(lons, attrs=lon_reduced.attrs, dims=['y', 'x']) - self.cache['lat'] = xr.DataArray(lats, attrs=lat_reduced.attrs, dims=['y', 'x']) - - return + lon = xr.DataArray(lons, attrs=lon_reduced.attrs, dims=['y', 'x']) + lat = xr.DataArray(lats, attrs=lat_reduced.attrs, dims=['y', 'x']) + return lon, lat def get_area_def(self, dsid): """Get the area definition of the datasets in the file. diff --git a/satpy/readers/olci_nc.py b/satpy/readers/olci_nc.py index aa6eb395a2..982e774f77 100644 --- a/satpy/readers/olci_nc.py +++ b/satpy/readers/olci_nc.py @@ -97,11 +97,13 @@ def __getitem__(self, item): class NCOLCIBase(BaseFileHandler): """The OLCI reader base.""" + rows_name = "rows" + cols_name = "columns" + def __init__(self, filename, filename_info, filetype_info, engine=None): """Init the olci reader base.""" - super(NCOLCIBase, self).__init__(filename, filename_info, - filetype_info) + super().__init__(filename, filename_info, filetype_info) self._engine = engine self._start_time = filename_info['start_time'] self._end_time = filename_info['end_time'] @@ -118,9 +120,9 @@ def nc(self): decode_cf=True, mask_and_scale=True, engine=self._engine, - chunks={'columns': CHUNK_SIZE, - 'rows': CHUNK_SIZE}) - return dataset.rename({'columns': 'x', 'rows': 'y'}) + chunks={self.cols_name: CHUNK_SIZE, + self.rows_name: CHUNK_SIZE}) + return dataset.rename({self.cols_name: 'x', self.rows_name: 'y'}) @property def start_time(self): @@ -141,7 +143,7 @@ def get_dataset(self, key, info): def __del__(self): """Close the NetCDF file that may still be open.""" - with suppress(IOError, OSError, AttributeError): + with suppress(IOError, OSError, AttributeError, TypeError): self.nc.close() @@ -156,12 +158,9 @@ class NCOLCIGeo(NCOLCIBase): class NCOLCIChannelBase(NCOLCIBase): """Base class for channel reading.""" - def __init__(self, filename, filename_info, filetype_info, - engine=None): + def __init__(self, filename, filename_info, filetype_info, engine=None): """Init the file handler.""" - super(NCOLCIChannelBase, self).__init__(filename, filename_info, - filetype_info) - + super().__init__(filename, filename_info, filetype_info, engine) self.channel = filename_info.get('dataset_name') @@ -171,8 +170,7 @@ class NCOLCI1B(NCOLCIChannelBase): def __init__(self, filename, filename_info, filetype_info, cal, engine=None): """Init the file handler.""" - super(NCOLCI1B, self).__init__(filename, filename_info, - filetype_info) + super().__init__(filename, filename_info, filetype_info, engine) self.cal = cal.nc @staticmethod @@ -241,33 +239,18 @@ def getbitmask(self, wqsf, items=None): return reduce(np.logical_or, [bflags[item] for item in items]) -class NCOLCILowResData(BaseFileHandler): +class NCOLCILowResData(NCOLCIBase): """Handler for low resolution data.""" + rows_name = "tie_rows" + cols_name = "tie_columns" + def __init__(self, filename, filename_info, filetype_info, engine=None): """Init the file handler.""" - super(NCOLCILowResData, self).__init__(filename, filename_info, filetype_info) - self.nc = None - # TODO: get metadata from the manifest file (xfdumanifest.xml) - self.platform_name = PLATFORM_NAMES[filename_info['mission_id']] - self.sensor = 'olci' - self.cache = {} - self.engine = engine - - def _open_dataset(self): - if self.nc is None: - self.nc = xr.open_dataset(self.filename, - decode_cf=True, - mask_and_scale=True, - engine=self.engine, - chunks={'tie_columns': CHUNK_SIZE, - 'tie_rows': CHUNK_SIZE}) - - self.nc = self.nc.rename({'tie_columns': 'x', 'tie_rows': 'y'}) - - self.l_step = self.nc.attrs['al_subsampling_factor'] - self.c_step = self.nc.attrs['ac_subsampling_factor'] + super().__init__(filename, filename_info, filetype_info, engine) + self.l_step = self.nc.attrs['al_subsampling_factor'] + self.c_step = self.nc.attrs['ac_subsampling_factor'] def _do_interpolate(self, data): @@ -293,16 +276,10 @@ def _do_interpolate(self, data): return [xr.DataArray(da.from_array(x, chunks=(CHUNK_SIZE, CHUNK_SIZE)), dims=['y', 'x']) for x in int_data] + @property def _need_interpolation(self): return (self.c_step != 1 or self.l_step != 1) - def __del__(self): - """Close the NetCDF file that may still be open.""" - try: - self.nc.close() - except (OSError, AttributeError): - pass - class NCOLCIAngles(NCOLCILowResData): """File handler for the OLCI angles.""" @@ -317,49 +294,22 @@ def get_dataset(self, key, info): if key['name'] not in self.datasets: return - self._open_dataset() - logger.debug('Reading %s.', key['name']) - if self._need_interpolation() and self.cache.get(key['name']) is None: - + if self._need_interpolation: if key['name'].startswith('satellite'): - zen = self.nc[self.datasets['satellite_zenith_angle']] - zattrs = zen.attrs - azi = self.nc[self.datasets['satellite_azimuth_angle']] - aattrs = azi.attrs + azi, zen = self.satellite_angles elif key['name'].startswith('solar'): - zen = self.nc[self.datasets['solar_zenith_angle']] - zattrs = zen.attrs - azi = self.nc[self.datasets['solar_azimuth_angle']] - aattrs = azi.attrs + azi, zen = self.sun_angles else: raise NotImplementedError("Don't know how to read " + key['name']) - x, y, z = angle2xyz(azi, zen) - - x, y, z = self._do_interpolate((x, y, z)) - - azi, zen = xyz2angle(x, y, z) - azi.attrs = aattrs - zen.attrs = zattrs - if 'zenith' in key['name']: values = zen elif 'azimuth' in key['name']: values = azi else: raise NotImplementedError("Don't know how to read " + key['name']) - - if key['name'].startswith('satellite'): - self.cache['satellite_zenith_angle'] = zen - self.cache['satellite_azimuth_angle'] = azi - elif key['name'].startswith('solar'): - self.cache['solar_zenith_angle'] = zen - self.cache['solar_azimuth_angle'] = azi - - elif key['name'] in self.cache: - values = self.cache[key['name']] else: values = self.nc[self.datasets[key['name']]] @@ -369,12 +319,31 @@ def get_dataset(self, key, info): values.attrs.update(key.to_dict()) return values - def __del__(self): - """Close the NetCDF file that may still be open.""" - try: - self.nc.close() - except (OSError, AttributeError): - pass + @cached_property + def sun_angles(self): + """Return the sun angles.""" + zen = self.nc[self.datasets['solar_zenith_angle']] + azi = self.nc[self.datasets['solar_azimuth_angle']] + azi, zen = self._interpolate_angles(azi, zen) + return azi, zen + + @cached_property + def satellite_angles(self): + """Return the satellite angles.""" + zen = self.nc[self.datasets['satellite_zenith_angle']] + azi = self.nc[self.datasets['satellite_azimuth_angle']] + azi, zen = self._interpolate_angles(azi, zen) + return azi, zen + + def _interpolate_angles(self, azi, zen): + aattrs = azi.attrs + zattrs = zen.attrs + x, y, z = angle2xyz(azi, zen) + x, y, z = self._do_interpolate((x, y, z)) + azi, zen = xyz2angle(x, y, z) + azi.attrs = aattrs + zen.attrs = zattrs + return azi, zen class NCOLCIMeteo(NCOLCILowResData): @@ -382,6 +351,12 @@ class NCOLCIMeteo(NCOLCILowResData): datasets = ['humidity', 'sea_level_pressure', 'total_columnar_water_vapour', 'total_ozone'] + def __init__(self, filename, filename_info, filetype_info, + engine=None): + """Init the file handler.""" + super().__init__(filename, filename_info, filetype_info, engine) + self.cache = {} + # TODO: the following depends on more than columns, rows # float atmospheric_temperature_profile(tie_rows, tie_columns, tie_pressure_levels) ; # float horizontal_wind(tie_rows, tie_columns, wind_vectors) ; @@ -392,11 +367,9 @@ def get_dataset(self, key, info): if key['name'] not in self.datasets: return - self._open_dataset() - logger.debug('Reading %s.', key['name']) - if self._need_interpolation() and self.cache.get(key['name']) is None: + if self._need_interpolation and self.cache.get(key['name']) is None: data = self.nc[key['name']] diff --git a/satpy/readers/sar_c_safe.py b/satpy/readers/sar_c_safe.py index 257bcde188..6353a5992d 100644 --- a/satpy/readers/sar_c_safe.py +++ b/satpy/readers/sar_c_safe.py @@ -34,11 +34,11 @@ """ +import functools import logging -import xml.etree.ElementTree as ET -from functools import lru_cache from threading import Lock +import defusedxml.ElementTree as ET import numpy as np import rasterio import rioxarray @@ -126,6 +126,14 @@ def end_time(self): class SAFEXMLAnnotation(SAFEXML): """XML file reader for the SAFE format, Annotation file.""" + def __init__(self, filename, filename_info, filetype_info, + header_file=None): + """Init the XML annotation reader.""" + super().__init__(filename, filename_info, filetype_info, header_file) + self.get_incidence_angle = functools.lru_cache(maxsize=10)( + self._get_incidence_angle_uncached + ) + def get_dataset(self, key, info, chunks=None): """Load a dataset.""" if self._polarization != key["polarization"]: @@ -134,8 +142,7 @@ def get_dataset(self, key, info, chunks=None): if key["name"] == "incidence_angle": return self.get_incidence_angle(chunks=chunks or CHUNK_SIZE) - @lru_cache(maxsize=10) - def get_incidence_angle(self, chunks): + def _get_incidence_angle_uncached(self, chunks): """Get the incidence angle array.""" incidence_angle = XMLArray(self.root, ".//geolocationGridPoint", "incidenceAngle") return incidence_angle.expand(self._image_shape, chunks=chunks) @@ -144,6 +151,14 @@ def get_incidence_angle(self, chunks): class SAFEXMLCalibration(SAFEXML): """XML file reader for the SAFE format, Calibration file.""" + def __init__(self, filename, filename_info, filetype_info, + header_file=None): + """Init the XML calibration reader.""" + super().__init__(filename, filename_info, filetype_info, header_file) + self.get_calibration = functools.lru_cache(maxsize=10)( + self._get_calibration_uncached + ) + def get_dataset(self, key, info, chunks=None): """Load a dataset.""" if self._polarization != key["polarization"]: @@ -156,8 +171,7 @@ def get_calibration_constant(self): """Load the calibration constant.""" return float(self.root.find('.//absoluteCalibrationConstant').text) - @lru_cache(maxsize=10) - def get_calibration(self, calibration, chunks=None): + def _get_calibration_uncached(self, calibration, chunks=None): """Get the calibration array.""" calibration_name = _get_calibration_name(calibration) calibration_vector = self._get_calibration_vector(calibration_name, chunks) @@ -178,6 +192,9 @@ def __init__(self, filename, filename_info, filetype_info, super().__init__(filename, filename_info, filetype_info, header_file) self.azimuth_noise_reader = AzimuthNoiseReader(self.root, self._image_shape) + self.get_noise_correction = functools.lru_cache(maxsize=10)( + self._get_noise_correction_uncached + ) def get_dataset(self, key, info, chunks=None): """Load a dataset.""" @@ -186,8 +203,7 @@ def get_dataset(self, key, info, chunks=None): if key["name"] == "noise": return self.get_noise_correction(chunks=chunks or CHUNK_SIZE) - @lru_cache(maxsize=10) - def get_noise_correction(self, chunks=None): + def _get_noise_correction_uncached(self, chunks=None): """Get the noise correction array.""" try: noise = self.read_legacy_noise(chunks) @@ -281,17 +297,21 @@ def _create_dask_slices_from_blocks(self, chunks): def _create_dask_slice_from_block_line(self, current_line, chunks): """Create a dask slice from the blocks at the current line.""" - current_blocks = self._find_blocks_covering_line(current_line) - current_blocks.sort(key=(lambda x: x.coords['x'][0])) - - next_line = min((arr.coords['y'][-1] for arr in current_blocks)) - current_y = np.arange(current_line, next_line + 1) - - pieces = [arr.sel(y=current_y) for arr in current_blocks] + pieces = self._get_array_pieces_for_current_line(current_line) dask_pieces = self._get_padded_dask_pieces(pieces, chunks) new_slice = da.hstack(dask_pieces) + return new_slice + def _get_array_pieces_for_current_line(self, current_line): + """Get the array pieces that cover the current line.""" + current_blocks = self._find_blocks_covering_line(current_line) + current_blocks.sort(key=(lambda x: x.coords['x'][0])) + next_line = self._get_next_start_line(current_blocks, current_line) + current_y = np.arange(current_line, next_line) + pieces = [arr.sel(y=current_y) for arr in current_blocks] + return pieces + def _find_blocks_covering_line(self, current_line): """Find the blocks covering a given line.""" current_blocks = [] @@ -300,30 +320,43 @@ def _find_blocks_covering_line(self, current_line): current_blocks.append(block) return current_blocks + def _get_next_start_line(self, current_blocks, current_line): + next_line = min((arr.coords['y'][-1] for arr in current_blocks)) + 1 + blocks_starting_soon = [block for block in self.blocks if current_line < block.coords["y"][0] < next_line] + if blocks_starting_soon: + next_start_line = min((arr.coords["y"][0] for arr in blocks_starting_soon)) + next_line = min(next_line, next_start_line) + return next_line + def _get_padded_dask_pieces(self, pieces, chunks): """Get the padded pieces of a slice.""" - dask_pieces = [piece.data for piece in pieces] - self._pad_dask_pieces_before(pieces, dask_pieces, chunks) - self._pad_dask_pieces_after(pieces, dask_pieces, chunks) + pieces = sorted(pieces, key=(lambda x: x.coords['x'][0])) + dask_pieces = [] + previous_x_end = -1 + piece = pieces[0] + next_x_start = piece.coords['x'][0].item() + y_shape = len(piece.coords['y']) + + x_shape = (next_x_start - previous_x_end - 1) + self._fill_dask_pieces(dask_pieces, (y_shape, x_shape), chunks) + + for i, piece in enumerate(pieces): + dask_pieces.append(piece.data) + previous_x_end = piece.coords['x'][-1].item() + try: + next_x_start = pieces[i + 1].coords['x'][0].item() + except IndexError: + next_x_start = self._image_shape[1] + + x_shape = (next_x_start - previous_x_end - 1) + self._fill_dask_pieces(dask_pieces, (y_shape, x_shape), chunks) + return dask_pieces @staticmethod - def _pad_dask_pieces_before(pieces, dask_pieces, chunks): - """Pad the dask pieces before.""" - first_x = min(arr.coords['x'][0] for arr in pieces) - if first_x > 0: - missing_x = np.arange(first_x) - missing_y = pieces[0].coords['y'] - new_piece = da.full((len(missing_y), len(missing_x)), np.nan, chunks=chunks) - dask_pieces.insert(0, new_piece) - - def _pad_dask_pieces_after(self, pieces, dask_pieces, chunks): - """Pad the dask pieces after.""" - last_x = max(arr.coords['x'][-1] for arr in pieces) - if last_x < self._image_shape[1] - 1: - missing_x = np.arange(last_x + 1, self._image_shape[1]) - missing_y = pieces[-1].coords['y'] - new_piece = da.full((len(missing_y), len(missing_x)), np.nan, chunks=chunks) + def _fill_dask_pieces(dask_pieces, shape, chunks): + if shape[1] > 0: + new_piece = da.full(shape, np.nan, chunks=chunks) dask_pieces.append(new_piece) @@ -527,6 +560,9 @@ def __init__(self, filename, filename_info, filetype_info, calfh, noisefh, annot self.read_lock = Lock() self.filehandle = rasterio.open(self.filename, 'r', sharing=False) + self.get_lonlatalts = functools.lru_cache(maxsize=2)( + self._get_lonlatalts_uncached + ) def get_dataset(self, key, info): """Load a dataset.""" @@ -599,8 +635,7 @@ def _calibrate(self, dn, chunks, key): data = ((dn + cal_constant) / (cal ** 2)).clip(min=0) return data - @lru_cache(maxsize=2) - def get_lonlatalts(self): + def _get_lonlatalts_uncached(self): """Obtain GCPs and construct latitude and longitude arrays. Args: diff --git a/satpy/readers/scmi.py b/satpy/readers/scmi.py index 3dd2eb9f62..c8cdb65cf5 100644 --- a/satpy/readers/scmi.py +++ b/satpy/readers/scmi.py @@ -157,9 +157,11 @@ def get_dataset(self, key, info): 'sensor': data.attrs.get('sensor', self.sensor), }) if 'satellite_longitude' in self.nc.attrs: - data.attrs['satellite_longitude'] = self.nc.attrs['satellite_longitude'] - data.attrs['satellite_latitude'] = self.nc.attrs['satellite_latitude'] - data.attrs['satellite_altitude'] = self.nc.attrs['satellite_altitude'] + data.attrs['orbital_parameters'] = { + 'projection_longitude': self.nc.attrs['satellite_longitude'], + 'projection_latitude': self.nc.attrs['satellite_latitude'], + 'projection_altitude': self.nc.attrs['satellite_altitude'], + } scene_id = self.nc.attrs.get('scene_id') if scene_id is not None: diff --git a/satpy/readers/seadas_l2.py b/satpy/readers/seadas_l2.py new file mode 100644 index 0000000000..708b694fac --- /dev/null +++ b/satpy/readers/seadas_l2.py @@ -0,0 +1,103 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Reader for SEADAS L2 products. + +This reader currently only supports MODIS and VIIRS Chlorophyll A from SEADAS. + +The reader includes an additional keyword argument ``apply_quality_flags`` +which can be used to mask out low-quality pixels based on quality flags +contained in the file (``l2_flags``). This option defaults to ``False``, but +when set to ``True`` the "CHLWARN" pixels of the ``l2_flags`` variable +are masked out. These pixels represent data where the chlorophyll algorithm +warned about the quality of the result. + +""" + +from datetime import datetime + +from .hdf4_utils import HDF4FileHandler + +TIME_FORMAT = "%Y%j%H%M%S" + + +class SEADASL2HDFFileHandler(HDF4FileHandler): + """Simple handler of SEADAS L2 files.""" + + def __init__(self, filename, filename_info, filetype_info, apply_quality_flags=False): + """Initialize file handler and determine if data quality flags should be applied.""" + super().__init__(filename, filename_info, filetype_info) + self.apply_quality_flags = apply_quality_flags and "l2_flags" in self + + def _add_satpy_metadata(self, data): + data.attrs["sensor"] = self.sensor_names + data.attrs["platform_name"] = self._platform_name() + data.attrs["rows_per_scan"] = self._rows_per_scan() + return data + + def _rows_per_scan(self): + if "modis" in self.sensor_names: + return 10 + if "viirs" in self.sensor_names: + return 16 + raise ValueError(f"Don't know how to read data for sensors: {self.sensor_names}") + + def _platform_name(self): + platform = self["/attr/Mission"] + platform_dict = {'NPP': 'Suomi-NPP', + 'JPSS-1': 'NOAA-20', + 'JPSS-2': 'NOAA-21'} + return platform_dict.get(platform, platform) + + @property + def start_time(self): + """Get the starting observation time of this file's data.""" + start_time = self["/attr/Start Time"] + return datetime.strptime(start_time[:-3], TIME_FORMAT) + + @property + def end_time(self): + """Get the ending observation time of this file's data.""" + end_time = self["/attr/End Time"] + return datetime.strptime(end_time[:-3], TIME_FORMAT) + + @property + def sensor_names(self): + """Get sensor for the current file's data.""" + # Example: MODISA or VIIRSN or VIIRSJ1 + sensor_name = self["/attr/Sensor Name"].lower() + if sensor_name.startswith("modis"): + return {"modis"} + return {"viirs"} + + def get_dataset(self, data_id, dataset_info): + """Get DataArray for the specified DataID.""" + file_key = dataset_info.get("file_key", data_id["name"]) + data = self[file_key] + valid_range = data.attrs["valid_range"] + data = data.where(valid_range[0] <= data) + data = data.where(data <= valid_range[1]) + if self.apply_quality_flags and not ("lon" in file_key or "lat" in file_key): + l2_flags = self["l2_flags"] + mask = (l2_flags & 0b00000000010000000000000000000000) != 0 + data = data.where(~mask) + for attr_name in ("standard_name", "long_name", "units"): + val = data.attrs[attr_name] + if val[-1] == "\x00": + data.attrs[attr_name] = data.attrs[attr_name][:-1] + data = self._add_satpy_metadata(data) + return data diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index 37686c0ce9..774f8045d4 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -449,7 +449,12 @@ def get(self): ('TerminationSpace', 'S1'), ('EncodingVersion', np.uint16), ('Channel', np.uint8), - ('Filler', 'S20'), + ('ImageLocation', 'S3'), + ('GsicsCalMode', np.bool), + ('GsicsCalValidity', np.bool), + ('Padding', 'S2'), + ('OffsetToData', np.uint32), + ('Padding2', 'S9'), ('RepeatCycle', 'S15'), ] @@ -526,7 +531,7 @@ def vis_calibrate(self, data, solar_irradiance): """Calibrate to reflectance. This uses the method described in Conversion from radiances to - reflectances for SEVIRI warm channels: https://tinyurl.com/y67zhphm + reflectances for SEVIRI warm channels: https://www-cdn.eumetsat.int/files/2020-04/pdf_msg_seviri_rad2refl.pdf """ reflectance = np.pi * data * 100.0 / solar_irradiance return apply_earthsun_distance_correction(reflectance, self._scan_time) @@ -826,39 +831,44 @@ def _get_closest_interval(self, time): return closest_match, distance +# def calculate_area_extent(center_point, north, east, south, west, we_offset, ns_offset, column_step, line_step): def calculate_area_extent(area_dict): """Calculate the area extent seen by a geostationary satellite. Args: area_dict: A dictionary containing the required parameters center_point: Center point for the projection - resolution: Pixel resulution in meters north: Northmost row number east: Eastmost column number west: Westmost column number south: Southmost row number + column_step: Pixel resulution in meters in east-west direction + line_step: Pixel resulution in meters in soutth-north direction [column_offset: Column offset, defaults to 0 if not given] - [row_offset: Row offset, defaults to 0 if not given] + [line_offset: Line offset, defaults to 0 if not given] Returns: tuple: An area extent for the scene defined by the lower left and upper right corners + # For Earth model 2 and full disk VISIR, (center_point - west - 0.5 + we_offset) must be -1856.5 . + # See MSG Level 1.5 Image Data Format Description Figure 7 - Alignment and numbering of the non-HRV pixels. """ - # For Earth model 2 and full disk resolution center point - # column and row is (1856.5, 1856.5) - # See: MSG Level 1.5 Image Data Format Description, Figure 7 - cp_c = area_dict['center_point'] + area_dict.get('column_offset', 0) - cp_r = area_dict['center_point'] + area_dict.get('row_offset', 0) - - # Calculate column and row for lower left and upper right corners. - ll_c = (area_dict['west'] - cp_c) - ll_r = (area_dict['north'] - cp_r + 1) - ur_c = (area_dict['east'] - cp_c - 1) - ur_r = (area_dict['south'] - cp_r) - - aex = np.array([ll_c, ll_r, ur_c, ur_r]) * area_dict['resolution'] - - return tuple(aex) + center_point = area_dict['center_point'] + east = area_dict['east'] + west = area_dict['west'] + south = area_dict['south'] + north = area_dict['north'] + column_step = area_dict['column_step'] + line_step = area_dict['line_step'] + column_offset = area_dict.get('column_offset', 0) + line_offset = area_dict.get('line_offset', 0) + + ll_c = (center_point - east + 0.5 + column_offset) * column_step + ll_l = (north - center_point + 0.5 + line_offset) * line_step + ur_c = (center_point - west - 0.5 + column_offset) * column_step + ur_l = (south - center_point - 0.5 + line_offset) * line_step + + return (ll_c, ll_l, ur_c, ur_l) def create_coef_dict(coefs_nominal, coefs_gsics, radiance_type, ext_coefs): diff --git a/satpy/readers/seviri_l1b_hrit.py b/satpy/readers/seviri_l1b_hrit.py index 191855fdbc..fc5858fabf 100644 --- a/satpy/readers/seviri_l1b_hrit.py +++ b/satpy/readers/seviri_l1b_hrit.py @@ -38,12 +38,12 @@ H-000-MSG4__-MSG4________-_________-EPI______-201903011200-__ Each image is decomposed into 24 segments (files) for the high-resolution-visible (HRV) channel and 8 segments for other -visible (VIS) and infrared (IR) channels. Additionally there is one prologue and one epilogue file for the entire scan +visible (VIS) and infrared (IR) channels. Additionally, there is one prologue and one epilogue file for the entire scan which contain global metadata valid for all channels. Reader Arguments ---------------- -Some arguments can be provided to the reader to change it's behaviour. These are +Some arguments can be provided to the reader to change its behaviour. These are provided through the `Scene` instantiation, eg:: Scene(reader="seviri_l1b_hrit", filenames=fnames, reader_kwargs={'fill_hrv': False}) @@ -51,6 +51,16 @@ To see the full list of arguments that can be provided, look into the documentation of :class:`HRITMSGFileHandler`. +Compression +----------- + +This reader accepts compressed HRIT files, ending in ``C_`` as other HRIT readers, see +:class:`satpy.readers.hrit_base.HRITFileHandler`. + +This reader also accepts bzipped file with the extension ``.bz2`` for the prologue, +epilogue, and segment files. + + Example ------- Here is an example how to read the data in satpy: @@ -77,9 +87,6 @@ * x (x) float64 5.566e+06 5.563e+06 5.56e+06 ... -5.566e+06 -5.569e+06 * y (y) float64 -5.566e+06 -5.563e+06 ... 5.566e+06 5.569e+06 Attributes: - satellite_longitude: 0.0 - satellite_latitude: 0.0 - satellite_altitude: 35785831.0 orbital_parameters: {'projection_longitude': 0.0, 'projection_latit... platform_name: Meteosat-11 georef_offset_corrected: True @@ -100,9 +107,80 @@ modifiers: () ancillary_variables: [] +The `filenames` argument can either be a list of strings, see the example above, or a list of +:class:`satpy.readers.FSFile` objects. FSFiles can be used in conjunction with `fsspec`_, +e.g. to handle in-memory data: + +.. code-block:: python + + import glob + + from fsspec.implementations.memory import MemoryFile, MemoryFileSystem + from satpy import Scene + from satpy.readers import FSFile + + # In this example, we will make use of `MemoryFile`s in a `MemoryFileSystem`. + memory_fs = MemoryFileSystem() + + # Usually, the data already resides in memory. + # For explanatory reasons, we will load the files found with glob in memory, + # and load the scene with FSFiles. + filenames = glob.glob('data/H-000-MSG4__-MSG4________-*201903011200*') + fs_files = [] + for fn in filenames: + with open(fn, 'rb') as fh: + fs_files.append(MemoryFile( + fs=memory_fs, + path="{}{}".format(memory_fs.root_marker, fn), + data=fh.read() + )) + fs_files[-1].commit() # commit the file to the filesystem + fs_files = [FSFile(open_file) for open_file in filenames] # wrap MemoryFiles as FSFiles + # similar to the example above, we pass a list of FSFiles to the `Scene` + scn = Scene(filenames=fs_files, reader='seviri_l1b_hrit') + scn.load(['VIS006', 'IR_108']) + print(scn['IR_108']) + + +Output: + +.. code-block:: none + + + dask.array + Coordinates: + acq_time (y) datetime64[ns] NaT NaT NaT NaT NaT NaT ... NaT NaT NaT NaT NaT + * x (x) float64 5.566e+06 5.563e+06 5.56e+06 ... -5.566e+06 -5.569e+06 + * y (y) float64 -5.566e+06 -5.563e+06 ... 5.566e+06 5.569e+06 + Attributes: + orbital_parameters: {'projection_longitude': 0.0, 'projection_latit... + platform_name: Meteosat-11 + georef_offset_corrected: True + standard_name: brightness_temperature + raw_metadata: {'file_type': 0, 'total_header_length': 6198, '... + wavelength: (9.8, 10.8, 11.8) + units: K + sensor: seviri + platform_name: Meteosat-11 + start_time: 2019-03-01 12:00:09.716000 + end_time: 2019-03-01 12:12:42.946000 + area: Area ID: some_area_name\\nDescription: On-the-fl... + name: IR_108 + resolution: 3000.403165817 + calibration: brightness_temperature + polarization: None + level: None + modifiers: () + ancillary_variables: [] + + +References: + - `MSG Level 1.5 Image Data Format Description`_ + .. _MSG Level 1.5 Image Data Format Description: https://www-cdn.eumetsat.int/files/2020-05/pdf_ten_05105_msg_img_data.pdf - +.. _fsspec: + https://filesystem-spec.readthedocs.io """ from __future__ import division @@ -223,17 +301,13 @@ def __init__(self, filename, filename_info, filetype_info, calib_mode='nominal', ext_calib_coefs=None, include_raw_metadata=False, mda_max_array_size=None, fill_hrv=None): """Initialize the reader.""" - with utils.unzip_context(filename) as fn: - if fn is not None: - self.filename = fn - - super(HRITMSGPrologueFileHandler, self).__init__(self.filename, filename_info, - filetype_info, - (msg_hdr_map, - msg_variable_length_headers, - msg_text_headers)) - self.prologue = {} - self.read_prologue() + super(HRITMSGPrologueFileHandler, self).__init__(filename, filename_info, + filetype_info, + (msg_hdr_map, + msg_variable_length_headers, + msg_text_headers)) + self.prologue = {} + self.read_prologue() service = filename_info['service'] if service == '': @@ -243,13 +317,13 @@ def __init__(self, filename, filename_info, filetype_info, calib_mode='nominal', def read_prologue(self): """Read the prologue metadata.""" - with open(self.filename, "rb") as fp_: + with utils.generic_open(self.filename, mode="rb") as fp_: fp_.seek(self.mda['total_header_length']) - data = np.fromfile(fp_, dtype=hrit_prologue, count=1) + data = np.frombuffer(fp_.read(hrit_prologue.itemsize), dtype=hrit_prologue, count=1) self.prologue.update(recarray2dict(data)) try: - impf = np.fromfile(fp_, dtype=impf_configuration, count=1)[0] - except IndexError: + impf = np.frombuffer(fp_.read(impf_configuration.itemsize), dtype=impf_configuration, count=1)[0] + except ValueError: logger.info('No IMPF configuration field found in prologue.') else: self.prologue.update(recarray2dict(impf)) @@ -300,16 +374,13 @@ def __init__(self, filename, filename_info, filetype_info, calib_mode='nominal', ext_calib_coefs=None, include_raw_metadata=False, mda_max_array_size=None, fill_hrv=None): """Initialize the reader.""" - with utils.unzip_context(filename) as fn: - if fn is not None: - self.filename = fn - super(HRITMSGEpilogueFileHandler, self).__init__(self.filename, filename_info, - filetype_info, - (msg_hdr_map, - msg_variable_length_headers, - msg_text_headers)) - self.epilogue = {} - self.read_epilogue() + super(HRITMSGEpilogueFileHandler, self).__init__(filename, filename_info, + filetype_info, + (msg_hdr_map, + msg_variable_length_headers, + msg_text_headers)) + self.epilogue = {} + self.read_epilogue() service = filename_info['service'] if service == '': @@ -319,9 +390,9 @@ def __init__(self, filename, filename_info, filetype_info, calib_mode='nominal', def read_epilogue(self): """Read the epilogue metadata.""" - with open(self.filename, "rb") as fp_: + with utils.generic_open(self.filename, mode="rb") as fp_: fp_.seek(self.mda['total_header_length']) - data = np.fromfile(fp_, dtype=hrit_epilogue, count=1) + data = np.frombuffer(fp_.read(hrit_epilogue.itemsize), dtype=hrit_epilogue, count=1) self.epilogue.update(recarray2dict(data)) def reduce(self, max_size): @@ -635,11 +706,6 @@ def _update_attrs(self, res, info): res.attrs['standard_name'] = info['standard_name'] res.attrs['platform_name'] = self.platform_name res.attrs['sensor'] = 'seviri' - res.attrs['satellite_longitude'] = self.mda[ - 'projection_parameters']['SSP_longitude'] - res.attrs['satellite_latitude'] = self.mda[ - 'projection_parameters']['SSP_latitude'] - res.attrs['satellite_altitude'] = self.mda['projection_parameters']['h'] res.attrs['orbital_parameters'] = { 'projection_longitude': self.mda['projection_parameters']['SSP_longitude'], 'projection_latitude': self.mda['projection_parameters']['SSP_latitude'], diff --git a/satpy/readers/seviri_l1b_icare.py b/satpy/readers/seviri_l1b_icare.py index b5dfeda5e5..38c4360744 100644 --- a/satpy/readers/seviri_l1b_icare.py +++ b/satpy/readers/seviri_l1b_icare.py @@ -254,13 +254,13 @@ def get_dataset(self, ds_id, ds_info): offset = data.attrs.get('add_offset') scale_factor = data.attrs.get('scale_factor') data = data.where(data != fill) - data.values = data.values.astype(np.float32) + data = data.astype(np.float32) if scale_factor is not None and offset is not None: - data.values *= scale_factor - data.values += offset + data = data * scale_factor + data = data + offset # Now we correct range from 0-1 to 0-100 for VIS: if ds_id['name'] in self.ref_bands: - data.values *= 100. + data = data * 100. return data def get_area_def(self, ds_id): diff --git a/satpy/readers/seviri_l1b_native.py b/satpy/readers/seviri_l1b_native.py index 877c5b9805..41bcad49ec 100644 --- a/satpy/readers/seviri_l1b_native.py +++ b/satpy/readers/seviri_l1b_native.py @@ -49,6 +49,7 @@ OrbitPolynomialFinder, SEVIRICalibrationHandler, add_scanline_acq_time, + calculate_area_extent, create_coef_dict, dec10216, get_cds_time, @@ -136,19 +137,6 @@ def end_time(self): return self.header['15_DATA_HEADER']['ImageAcquisition'][ 'PlannedAcquisitionTime']['PlannedRepeatCycleEnd'] - @staticmethod - def _calculate_area_extent(center_point, north, east, south, west, - we_offset, ns_offset, column_step, line_step): - # For Earth model 2 and full disk VISIR, (center_point - west - 0.5 + we_offset) must be -1856.5 . - # See MSG Level 1.5 Image Data Format Description Figure 7 - Alignment and numbering of the non-HRV pixels. - - ll_c = (center_point - east + 0.5 + we_offset) * column_step - ll_l = (north - center_point + 0.5 + ns_offset) * line_step - ur_c = (center_point - west - 0.5 + we_offset) * column_step - ur_l = (south - center_point - 0.5 + ns_offset) * line_step - - return (ll_c, ll_l, ur_c, ur_l) - def _get_data_dtype(self): """Get the dtype of the file based on the actual available channels.""" pkhrec = [ @@ -421,8 +409,19 @@ def get_area_extent(self, dataset_id): nlines = north_bound - south_bound + 1 ncolumns = west_bound - east_bound + 1 - aex = self._calculate_area_extent(center_point, north_bound, east_bound, south_bound, west_bound, - we_offset, ns_offset, column_step, line_step) + + area_dict = {'center_point': center_point, + 'east': east_bound, + 'west': west_bound, + 'south': south_bound, + 'north': north_bound, + 'column_step': column_step, + 'line_step': line_step, + 'column_offset': we_offset, + 'line_offset': ns_offset + } + + aex = calculate_area_extent(area_dict) aex_data['area_extent'].append(aex) aex_data['nlines'].append(nlines) diff --git a/satpy/readers/seviri_l2_bufr.py b/satpy/readers/seviri_l2_bufr.py index 9f824bc80d..8e283e1d23 100644 --- a/satpy/readers/seviri_l2_bufr.py +++ b/satpy/readers/seviri_l2_bufr.py @@ -30,8 +30,12 @@ import numpy as np import xarray as xr -from satpy.readers.eum_base import recarray2dict +from satpy import CHUNK_SIZE +from satpy.readers._geos_area import get_geos_area_naming +from satpy.readers.eum_base import get_service_mode, recarray2dict +from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.seviri_base import mpef_product_header +from satpy.resample import get_area_def try: import eccodes as ec @@ -39,12 +43,9 @@ raise ImportError( "Missing eccodes-python and/or eccodes C-library installation. Use conda to install eccodes") -from satpy import CHUNK_SIZE -from satpy.readers.file_handlers import BaseFileHandler - logger = logging.getLogger('SeviriL2Bufr') -data_center_dict = {55: {'ssp': 'E0415', 'name': '08'}, 56: {'ssp': 'E0000', 'name': '09'}, +data_center_dict = {55: {'ssp': 'E0415', 'name': '08'}, 56: {'ssp': 'E0455', 'name': '09'}, 57: {'ssp': 'E0095', 'name': '10'}, 70: {'ssp': 'E0000', 'name': '11'}} seg_size_dict = {'seviri_l2_bufr_asr': 16, 'seviri_l2_bufr_cla': 16, @@ -53,9 +54,34 @@ class SeviriL2BufrFileHandler(BaseFileHandler): - """File handler for SEVIRI L2 BUFR products.""" + """File handler for SEVIRI L2 BUFR products. + + **Loading data with AreaDefinition** + + By providing the `with_area_definition` as True in the `reader_kwargs`, the dataset is loaded with + an AreaDefinition using a standardized AreaDefinition in areas.yaml. By default, the dataset will + be loaded with a SwathDefinition, i.e. similar to how the data are stored in the BUFR file: + + scene = satpy.Scene(filenames, + reader='seviri_l2_bufr', + reader_kwargs={'with_area_definition': False}) + + **Defining dataset recticifation longitude** + + The BUFR data were originally extracted from a rectified two-dimensional grid with a given central longitude + (typically the sub-satellite point). This information is not available in the file itself nor the filename (for + files from the EUMETSAT archive). Also, it cannot be reliably derived from all datasets themselves. Hence, the + rectification longitude can be defined by the user by providing `rectification_longitude` in the `reader_kwargs`: - def __init__(self, filename, filename_info, filetype_info, **kwargs): + scene = satpy.Scene(filenames, + reader='seviri_l2_bufr', + reader_kwargs={'rectification_longitude': 0.0}) + + If not done, default values applicable to the operational grids of the respective SEVIRI instruments will be used. + """ + + def __init__(self, filename, filename_info, filetype_info, with_area_definition=False, + rectification_longitude='default', **kwargs): """Initialise the file handler for SEVIRI L2 BUFR data.""" super(SeviriL2BufrFileHandler, self).__init__(filename, filename_info, @@ -74,6 +100,10 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs): self.mpef_header['SpacecraftName'] = data_center_dict[sc_id]['name'] self.mpef_header['RectificationLongitude'] = data_center_dict[sc_id]['ssp'] + if rectification_longitude != 'default': + self.mpef_header['RectificationLongitude'] = f'E{int(rectification_longitude * 10):04d}' + + self.with_adef = with_area_definition self.seg_size = seg_size_dict[filetype_info['file_type']] @property @@ -98,6 +128,13 @@ def ssp_lon(self): ssp_lon = self.mpef_header['RectificationLongitude'] return float(ssp_lon[1:])/10. + def get_area_def(self, key): + """Return the area definition.""" + try: + return self._area_def + except AttributeError: + raise NotImplementedError + def _read_mpef_header(self): """Read MPEF header.""" hdr = np.fromfile(self.filename, mpef_product_header, 1) @@ -150,15 +187,86 @@ def get_array(self, key): return arr def get_dataset(self, dataset_id, dataset_info): - """Get dataset using the BUFR key in dataset_info.""" + """Create dataset. + + Load data from BUFR file using the BUFR key in dataset_info + and create the dataset with or without an AreaDefinition. + + """ arr = self.get_array(dataset_info['key']) - arr[arr == dataset_info['fill_value']] = np.nan - xarr = xr.DataArray(arr, dims=["y"]) + if self.with_adef: + xarr = self.get_dataset_with_area_def(arr, dataset_id) + # coordinates are not relevant when returning data with an AreaDefinition + if 'coordinates' in dataset_info.keys(): + del dataset_info['coordinates'] + else: + xarr = xr.DataArray(arr, dims=["y"]) + + if 'fill_value' in dataset_info: + xarr = xarr.where(xarr != dataset_info['fill_value']) + + self._add_attributes(xarr, dataset_info) + + return xarr + + def get_dataset_with_area_def(self, arr, dataset_id): + """Get dataset with an AreaDefinition.""" + if dataset_id['name'] in ['latitude', 'longitude']: + self.__setattr__(dataset_id['name'], arr) + xarr = xr.DataArray(arr, dims=["y"]) + else: + lons_1d, lats_1d, data_1d = da.compute(self.longitude, self.latitude, arr) + + self._area_def = self._construct_area_def(dataset_id) + icol, irow = self._area_def.get_array_indices_from_lonlat(lons_1d, lats_1d) + + data_2d = np.empty(self._area_def.shape) + data_2d[:] = np.nan + data_2d[irow.compressed(), icol.compressed()] = data_1d[~irow.mask] + + xarr = xr.DataArray(da.from_array(data_2d, CHUNK_SIZE), dims=('y', 'x')) + + ntotal = len(icol) + nvalid = len(icol.compressed()) + if nvalid < ntotal: + logging.warning(f'{ntotal-nvalid} out of {ntotal} data points could not be put on ' + f'the grid {self._area_def.area_id}.') + + return xarr + + def _construct_area_def(self, dataset_id): + """Construct a standardized AreaDefinition based on satellite, instrument, resolution and sub-satellite point. + + Returns: + AreaDefinition: A pyresample AreaDefinition object containing the area definition. + + """ + res = dataset_id['resolution'] + + area_naming_input_dict = {'platform_name': 'msg', + 'instrument_name': 'seviri', + 'resolution': res, + } + + area_naming = get_geos_area_naming({**area_naming_input_dict, + **get_service_mode('seviri', self.ssp_lon)}) + + # Datasets with a segment size of 3 pixels extend outside the original SEVIRI 3km grid (with 1238 x 1238 + # segments a 3 pixels). Hence, we need to use corresponding area defintions in areas.yaml + if self.seg_size == 3: + area_naming['area_id'] += '_ext' + area_naming['description'] += ' (extended outside original 3km grid)' + + # Construct AreaDefinition from standardized area definition in areas.yaml. + stand_area_def = get_area_def(area_naming['area_id']) + + return stand_area_def + + def _add_attributes(self, xarr, dataset_info): + """Add dataset attributes to xarray.""" xarr.attrs['sensor'] = 'SEVIRI' xarr.attrs['platform_name'] = self.platform_name xarr.attrs['ssp_lon'] = self.ssp_lon xarr.attrs['seg_size'] = self.seg_size xarr.attrs.update(dataset_info) - - return xarr diff --git a/satpy/readers/seviri_l2_grib.py b/satpy/readers/seviri_l2_grib.py index e4c436e73a..974e575e08 100644 --- a/satpy/readers/seviri_l2_grib.py +++ b/satpy/readers/seviri_l2_grib.py @@ -21,11 +21,10 @@ References: FM 92 GRIB Edition 2 https://www.wmo.int/pages/prog/www/WMOCodes/Guides/GRIB/GRIB2_062006.pdf - EUMETSAT Product Navigator https://navigator.eumetsat.int/ - """ + import logging from datetime import timedelta @@ -34,7 +33,8 @@ import xarray as xr from satpy import CHUNK_SIZE -from satpy.readers._geos_area import get_area_definition +from satpy.readers._geos_area import get_area_definition, get_geos_area_naming +from satpy.readers.eum_base import get_service_mode from satpy.readers.file_handlers import BaseFileHandler from satpy.readers.seviri_base import PLATFORM_DICT, REPEAT_CYCLE_DURATION, calculate_area_extent @@ -44,7 +44,6 @@ raise ImportError( "Missing eccodes-python and/or eccodes C-library installation. Use conda to install eccodes") - logger = logging.getLogger(__name__) @@ -56,48 +55,6 @@ def __init__(self, filename, filename_info, filetype_info): super().__init__(filename, filename_info, filetype_info) # Turn on support for multiple fields in single GRIB messages (required for SEVIRI L2 files) ec.codes_grib_multi_support_on() - self._read_global_attributes() - - def _read_global_attributes(self): - """Read the global product attributes from the first message. - - Read the information about the date and time of the data product, - the projection and area definition and the number of messages. - - """ - with open(self.filename, 'rb') as fh: - gid = ec.codes_grib_new_from_file(fh) - - if gid is None: - # Could not obtain a valid message id: set attributes to None, number of messages to 0 - logger.warning("Could not obtain a valid message id in GRIB file") - - self._ssp_lon = None - self._nrows = None - self._ncols = None - self._pdict, self._area_dict = None, None - - return - - # Read SSP and date/time - self._ssp_lon = self._get_from_msg(gid, 'longitudeOfSubSatellitePointInDegrees') - - # Read number of points on the x and y axes - self._nrows = self._get_from_msg(gid, 'Ny') - self._ncols = self._get_from_msg(gid, 'Nx') - - # Creates the projection and area dictionaries - self._pdict, self._area_dict = self._get_proj_area(gid) - - # Determine the number of messages in the product by iterating until an invalid id is obtained - i = 1 - ec.codes_release(gid) - while True: - gid = ec.codes_grib_new_from_file(fh) - if gid is None: - break - ec.codes_release(gid) - i = i+1 @property def start_time(self): @@ -111,10 +68,10 @@ def end_time(self): def get_area_def(self, dataset_id): """Return the area definition for a dataset.""" - # The area extension depends on the resolution of the dataset - area_dict = self._area_dict.copy() - area_dict['resolution'] = dataset_id.resolution - area_extent = calculate_area_extent(area_dict) + self._area_dict['column_step'] = dataset_id.resolution + self._area_dict['line_step'] = dataset_id.resolution + + area_extent = calculate_area_extent(self._area_dict) # Call the get_area_definition function to obtain the area area_def = get_area_definition(self._pdict, area_extent) @@ -122,54 +79,80 @@ def get_area_def(self, dataset_id): return area_def def get_dataset(self, dataset_id, dataset_info): - """Get dataset using the parameter_number key in dataset_info.""" + """Get dataset using the parameter_number key in dataset_info. + + In a previous version of the reader, the attributes (nrows, ncols, ssp_lon) and projection information + (pdict and area_dict) were computed while initializing the file handler. Also the code would break out from + the While-loop below as soon as the correct parameter_number was found. This has now been revised becasue the + reader would sometimes give corrupt information about the number of messages in the file and the dataset + dimensions within a given message if the file was only partly read (not looping over all messages) in an earlier + instance. + """ logger.debug('Reading in file to get dataset with parameter number %d.', dataset_info['parameter_number']) xarr = None - + message_found = False with open(self.filename, 'rb') as fh: - # Iterate until a message containing the correct parameter number is found + + # Iterate over all messages and fetch data when the correct parameter number is found while True: gid = ec.codes_grib_new_from_file(fh) if gid is None: - # Could not obtain a valid message ID, break out of the loop - logger.warning("Could not find parameter_number %d in GRIB file, no valid Dataset created", - dataset_info['parameter_number']) + if not message_found: + # Could not obtain a valid message ID from the grib file + logger.warning("Could not find parameter_number %d in GRIB file, no valid Dataset created", + dataset_info['parameter_number']) break # Check if the parameter number in the GRIB message corresponds to the required key parameter_number = self._get_from_msg(gid, 'parameterNumber') - if parameter_number != dataset_info['parameter_number']: - # The parameter number is not the correct one, skip to next message - ec.codes_release(gid) - continue + if parameter_number == dataset_info['parameter_number']: + + self._res = dataset_id.resolution + self._read_attributes(gid) + + # Read the missing value + missing_value = self._get_from_msg(gid, 'missingValue') - # Read the missing value - missing_value = self._get_from_msg(gid, 'missingValue') + # Retrieve values and metadata from the GRIB message, masking the values equal to missing_value + xarr = self._get_xarray_from_msg(gid) - # Retrieve values and metadata from the GRIB message, masking the values equal to missing_value - xarr = self._get_xarray_from_msg(gid) + xarr.data = da.where(xarr.data == missing_value, np.nan, xarr.data) - xarr.data = da.where(xarr.data == missing_value, np.nan, xarr.data) + ec.codes_release(gid) + + # Combine all metadata into the dataset attributes and break out of the loop + xarr.attrs.update(dataset_info) + xarr.attrs.update(self._get_attributes()) - ec.codes_release(gid) + message_found = True - # Combine all metadata into the dataset attributes and break out of the loop - xarr.attrs.update(dataset_info) - xarr.attrs.update(self._get_global_attributes()) - break + else: + # The parameter number is not the correct one, release gid and skip to next message + ec.codes_release(gid) return xarr + def _read_attributes(self, gid): + """Read the parameter attributes from the message and create the projection and area dictionaries.""" + # Read SSP and date/time + self._ssp_lon = self._get_from_msg(gid, 'longitudeOfSubSatellitePointInDegrees') + + # Read number of points on the x and y axes + self._nrows = self._get_from_msg(gid, 'Ny') + self._ncols = self._get_from_msg(gid, 'Nx') + + # Creates the projection and area dictionaries + self._pdict, self._area_dict = self._get_proj_area(gid) + def _get_proj_area(self, gid): """Compute the dictionary with the projection and area definition from a GRIB message. Args: gid: The ID of the GRIB message. - Returns: tuple: A tuple of two dictionaries for the projection and the area definition. pdict: @@ -188,14 +171,26 @@ def _get_proj_area(self, gid): east: coodinate of the east limit west: coodinate of the west limit south: coodinate of the south limit - """ + # Get name of area definition + area_naming_input_dict = {'platform_name': 'msg', + 'instrument_name': 'seviri', + 'resolution': self._res, + } + + area_naming = get_geos_area_naming({**area_naming_input_dict, + **get_service_mode('seviri', self._ssp_lon)}) + # Read all projection and area parameters from the message - earth_major_axis_in_meters = self._get_from_msg(gid, 'earthMajorAxis') * 1000.0 # [m] - earth_minor_axis_in_meters = self._get_from_msg(gid, 'earthMinorAxis') * 1000.0 # [m] + earth_major_axis_in_meters = self._get_from_msg(gid, 'earthMajorAxis') * 1000.0 # [m] + earth_minor_axis_in_meters = self._get_from_msg(gid, 'earthMinorAxis') * 1000.0 # [m] + + earth_major_axis_in_meters = self._scale_earth_axis(earth_major_axis_in_meters) + earth_minor_axis_in_meters = self._scale_earth_axis(earth_minor_axis_in_meters) + nr_in_radius_of_earth = self._get_from_msg(gid, 'NrInRadiusOfEarth') xp_in_grid_lengths = self._get_from_msg(gid, 'XpInGridLengths') - h_in_meters = earth_major_axis_in_meters * (nr_in_radius_of_earth - 1.0) # [m] + h_in_meters = earth_major_axis_in_meters * (nr_in_radius_of_earth - 1.0) # [m] # Create the dictionary with the projection data pdict = { @@ -205,14 +200,14 @@ def _get_proj_area(self, gid): 'ssp_lon': self._ssp_lon, 'nlines': self._ncols, 'ncols': self._nrows, - 'a_name': 'geos_seviri', - 'a_desc': 'Calculated area for SEVIRI L2 GRIB product', - 'p_id': 'geos', + 'a_name': area_naming['area_id'], + 'a_desc': area_naming['description'], + 'p_id': "", } # Compute the dictionary with the area extension area_dict = { - 'center_point': xp_in_grid_lengths + 0.5, + 'center_point': xp_in_grid_lengths, 'north': self._nrows, 'east': 1, 'west': self._ncols, @@ -221,15 +216,26 @@ def _get_proj_area(self, gid): return pdict, area_dict + @staticmethod + def _scale_earth_axis(data): + """Scale Earth axis data to make sure the value matched the expected unit [m]. + + The earthMinorAxis value stored in the aerosol over sea product is scaled incorrectly by a factor of 1e8. This + method provides a flexible temporarily workaraound by making sure that all earth axis values are scaled such + that they are on the order of millions of meters as expected by the reader. As soon as the scaling issue has + been resolved by EUMETSAT this workaround can be removed. + + """ + scale_factor = 10 ** np.ceil(np.log10(1e6/data)) + return data * scale_factor + def _get_xarray_from_msg(self, gid): """Read the values from the GRIB message and return a DataArray object. Args: gid: The ID of the GRIB message. - Returns: DataArray: The array containing the retrieved values. - """ # Data from GRIB message are read into an Xarray... xarr = xr.DataArray(da.from_array(ec.codes_get_values( @@ -237,15 +243,14 @@ def _get_xarray_from_msg(self, gid): return xarr - def _get_global_attributes(self): - """Create a dictionary of global attributes to be added to all datasets. + def _get_attributes(self): + """Create a dictionary of attributes to be added to the dataset. Returns: - dict: A dictionary of global attributes. + dict: A dictionary of parameter attributes. ssp_lon: longitude of subsatellite point sensor: name of sensor platform_name: name of the platform - """ orbital_parameters = { 'projection_longitude': self._ssp_lon @@ -258,16 +263,15 @@ def _get_global_attributes(self): } return attributes - def _get_from_msg(self, gid, key): + @staticmethod + def _get_from_msg(gid, key): """Get a value from the GRIB message based on the key, return None if missing. Args: gid: The ID of the GRIB message. key: The key of the required attribute. - Returns: The retrieved attribute or None if the key is missing. - """ try: attr = ec.codes_get(gid, key) diff --git a/satpy/readers/slstr_l1b.py b/satpy/readers/slstr_l1b.py index 55ccce62f8..507d4448a1 100644 --- a/satpy/readers/slstr_l1b.py +++ b/satpy/readers/slstr_l1b.py @@ -36,23 +36,23 @@ 'S3B': 'Sentinel-3B'} # These are the default channel adjustment factors. -# Defined in the product notice: S3.PN-SLSTR-L1.06 -# https://www4-int.eumetsat.int/media/42788 -CHANCALIB_FACTORS = {'S1_nadir': 1.0, - 'S2_nadir': 1.0, - 'S3_nadir': 1.0, +# Defined in the product notice: S3.PN-SLSTR-L1.08 +# https://sentinel.esa.int/documents/247904/2731673/Sentinel-3A-and-3B-SLSTR-Product-Notice-Level-1B-SL-1-RBT-at-NRT-and-NTC.pdf +CHANCALIB_FACTORS = {'S1_nadir': 0.97, + 'S2_nadir': 0.98, + 'S3_nadir': 0.98, 'S4_nadir': 1.0, - 'S5_nadir': 1.12, - 'S6_nadir': 1.2, + 'S5_nadir': 1.11, + 'S6_nadir': 1.13, 'S7_nadir': 1.0, 'S8_nadir': 1.0, 'S9_nadir': 1.0, - 'S1_oblique': 1.0, - 'S2_oblique': 1.0, - 'S3_oblique': 1.0, + 'S1_oblique': 0.94, + 'S2_oblique': 0.95, + 'S3_oblique': 0.95, 'S4_oblique': 1.0, - 'S5_oblique': 1.15, - 'S6_oblique': 1.26, + 'S5_oblique': 1.04, + 'S6_oblique': 1.07, 'S7_oblique': 1.0, 'S8_oblique': 1.0, 'S9_oblique': 1.0, } @@ -107,7 +107,7 @@ class NCSLSTR1B(BaseFileHandler): By default, the calibration factors recommended by EUMETSAT are applied. This is required as the SLSTR VIS channels are producing slightly incorrect radiances that require adjustment. - Satpy uses the radiance corrections in S3.PN-SLSTR-L1.06, checked 26/10/2020. + Satpy uses the radiance corrections in S3.PN-SLSTR-L1.08, checked 11/03/2022. User-supplied coefficients can be passed via the `user_calibration` kwarg This should be a dict of channel names (such as `S1_nadir`, `S8_oblique`). diff --git a/satpy/readers/slstr_l2.py b/satpy/readers/slstr_l2.py deleted file mode 100644 index 8a23947ef3..0000000000 --- a/satpy/readers/slstr_l2.py +++ /dev/null @@ -1,77 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright (c) 2017 Satpy developers -# -# This file is part of satpy. -# -# satpy is free software: you can redistribute it and/or modify it under the -# terms of the GNU General Public License as published by the Free Software -# Foundation, either version 3 of the License, or (at your option) any later -# version. -# -# satpy is distributed in the hope that it will be useful, but WITHOUT ANY -# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR -# A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# satpy. If not, see . -"""Reader for Sentinel-3 SLSTR SST data.""" - -from datetime import datetime - -import xarray as xr - -from satpy import CHUNK_SIZE -from satpy.readers.file_handlers import BaseFileHandler - - -class SLSTRL2FileHandler(BaseFileHandler): - """File handler for Sentinel-3 SSL L2 netCDF files.""" - - def __init__(self, filename, filename_info, filetype_info, engine=None): - """Initialize the file handler for Sentinel-3 SSL L2 netCDF data.""" - super(SLSTRL2FileHandler, self).__init__(filename, filename_info, filetype_info) - - if filename.endswith('tar'): - import os - import tarfile - import tempfile - with tempfile.TemporaryDirectory() as tempdir: - with tarfile.open(name=filename, mode='r') as tf: - sst_filename = next((name for name in tf.getnames() - if name.endswith('nc') and 'GHRSST-SSTskin' in name)) - tf.extract(sst_filename, tempdir) - fullpath = os.path.join(tempdir, sst_filename) - self.nc = xr.open_dataset(fullpath, - decode_cf=True, - mask_and_scale=True, - engine=engine, - chunks={'ni': CHUNK_SIZE, - 'nj': CHUNK_SIZE}) - else: - self.nc = xr.open_dataset(filename, - decode_cf=True, - mask_and_scale=True, - engine=engine, - chunks={'ni': CHUNK_SIZE, - 'nj': CHUNK_SIZE}) - - self.nc = self.nc.rename({'ni': 'x', 'nj': 'y'}) - self.filename_info['start_time'] = datetime.strptime( - self.nc.start_time, '%Y%m%dT%H%M%SZ') - self.filename_info['end_time'] = datetime.strptime( - self.nc.stop_time, '%Y%m%dT%H%M%SZ') - - def get_dataset(self, key, info): - """Get any available dataset.""" - stdname = info.get('standard_name') - return self.nc[stdname].squeeze() - - @property - def start_time(self): - """Get start time.""" - return self.filename_info['start_time'] - - @property - def end_time(self): - """Get end time.""" - return self.filename_info['end_time'] diff --git a/satpy/readers/utils.py b/satpy/readers/utils.py index cf019a76f9..50e36ec3a5 100644 --- a/satpy/readers/utils.py +++ b/satpy/readers/utils.py @@ -26,7 +26,7 @@ from contextlib import closing from io import BytesIO from shutil import which -from subprocess import PIPE, Popen +from subprocess import PIPE, Popen # nosec import numpy as np import pyproj @@ -104,12 +104,13 @@ def get_geostationary_angle_extent(geos_area): return xmax, ymax -def get_geostationary_mask(area): +def get_geostationary_mask(area, chunks=None): """Compute a mask of the earth's shape as seen by a geostationary satellite. Args: area (pyresample.geometry.AreaDefinition) : Corresponding area definition + chunks (int or tuple): Chunk size for the 2D array that is generated. Returns: Boolean mask, True inside the earth's shape, False outside. @@ -122,7 +123,7 @@ def get_geostationary_mask(area): ymax *= h # Compute projection coordinates at the centre of each pixel - x, y = area.get_proj_coords(chunks=CHUNK_SIZE) + x, y = area.get_proj_coords(chunks=chunks or CHUNK_SIZE) # Compute mask of the earth's elliptical shape return ((x / xmax) ** 2 + (y / ymax) ** 2) <= 1 @@ -199,7 +200,7 @@ def get_sub_area(area, xslice, yslice): def unzip_file(filename): """Unzip the file if file is bzipped = ending with 'bz2'.""" - if filename.endswith('bz2'): + if os.fspath(filename).endswith('bz2'): fdn, tmpfilepath = tempfile.mkstemp() LOGGER.info("Using temp file for BZ2 decompression: %s", tmpfilepath) # try pbzip2 @@ -216,7 +217,7 @@ def unzip_file(filename): runner = [pbzip, '-dc', filename] - p = Popen(runner, stdout=PIPE, stderr=PIPE) + p = Popen(runner, stdout=PIPE, stderr=PIPE) # nosec stdout = BytesIO(p.communicate()[0]) status = p.returncode if status != 0: @@ -281,6 +282,32 @@ def __exit__(self, exc_type, exc_value, traceback): os.remove(self.unzipped_filename) +class generic_open(): + """Context manager for opening either a regular file or a bzip2 file.""" + + def __init__(self, filename, *args, **kwargs): + """Keep filename and mode.""" + self.filename = filename + self.open_args = args + self.open_kwargs = kwargs + + def __enter__(self): + """Return a file-like object.""" + if os.fspath(self.filename).endswith('.bz2'): + self.fp = bz2.open(self.filename, *self.open_args, **self.open_kwargs) + else: + if hasattr(self.filename, "open"): + self.fp = self.filename.open(*self.open_args, **self.open_kwargs) + else: + self.fp = open(self.filename, *self.open_args, **self.open_kwargs) + + return self.fp + + def __exit__(self, exc_type, exc_value, traceback): + """Close the file handler.""" + self.fp.close() + + def bbox(img): """Find the bounding box around nonzero elements in the given array. diff --git a/satpy/readers/vii_base_nc.py b/satpy/readers/vii_base_nc.py index 02eacaaa59..e1f9f74e76 100644 --- a/satpy/readers/vii_base_nc.py +++ b/satpy/readers/vii_base_nc.py @@ -18,6 +18,7 @@ """EUMETSAT EPS-SG Visible/Infrared Imager (VII) readers base class.""" + import logging from datetime import datetime @@ -101,11 +102,6 @@ def get_dataset(self, dataset_id, dataset_info): if orthorect_data_name is not None: variable = self._perform_orthorectification(variable, orthorect_data_name) - # If the dataset contains a longitude, change it to the interval [0., 360.) as natively in the product - # since the unwrapping performed during the interpolation might have created values outside this range - if dataset_info.get('standard_name', None) == 'longitude': - variable %= 360. - # Manage the attributes of the dataset variable.attrs.setdefault('units', None) diff --git a/satpy/readers/vii_l1b_nc.py b/satpy/readers/vii_l1b_nc.py index 2c5063ebb3..2e66c3deb0 100644 --- a/satpy/readers/vii_l1b_nc.py +++ b/satpy/readers/vii_l1b_nc.py @@ -18,9 +18,11 @@ """EUMETSAT EPS-SG Visible/Infrared Imager (VII) Level 1B products reader. The ``vii_l1b_nc`` reader reads and calibrates EPS-SG VII L1b image data in netCDF format. The format is explained -in the `EPS-SG VII Level 1B Product Format Specification`_. +in the `EPS-SG VII Level 1B Product Format Specification V4A`_. -.. _EPS-SG VII Level 1B Product Format Specification: https://www.eumetsat.int/media/44393 +This version is applicable for the vii test data V2 to be released in Jan 2022. + +.. _EPS-SG VII Level 1B Product Format Specification V4A: https://www.eumetsat.int/media/44393 """ @@ -69,19 +71,18 @@ def _perform_calibration(self, variable, dataset_info): if calibration_name == 'brightness_temperature': # Extract the values of calibration coefficients for the current channel chan_index = dataset_info['chan_thermal_index'] - cw = self._channel_cw_thermal[chan_index] * 1e-3 + cw = self._channel_cw_thermal[chan_index] a = self._bt_conversion_a[chan_index] b = self._bt_conversion_b[chan_index] # Perform the calibration calibrated_variable = self._calibrate_bt(variable, cw, a, b) calibrated_variable.attrs = variable.attrs elif calibration_name == 'reflectance': - scale = 1/(dataset_info['wavelength'][2] - dataset_info['wavelength'][0]) # Extract the values of calibration coefficients for the current channel chan_index = dataset_info['chan_solar_index'] - isi = scale * self._integrated_solar_irradiance[chan_index] + isi = self._integrated_solar_irradiance[chan_index] # Perform the calibration - calibrated_variable = self._calibrate_refl(variable, self.angle_factor, isi) + calibrated_variable = self._calibrate_refl(variable, self.angle_factor.data, isi) calibrated_variable.attrs = variable.attrs elif calibration_name == 'radiance': calibrated_variable = variable @@ -141,5 +142,5 @@ def _calibrate_refl(radiance, angle_factor, isi): numpy ndarray: array containing the calibrated reflectance values. """ - refl_values = (np.pi / isi) * angle_factor * radiance + refl_values = (np.pi / isi) * angle_factor * radiance * 100.0 return refl_values diff --git a/satpy/readers/viirs_l1b.py b/satpy/readers/viirs_l1b.py index 6676c8e55e..a265bb1f82 100644 --- a/satpy/readers/viirs_l1b.py +++ b/satpy/readers/viirs_l1b.py @@ -70,11 +70,7 @@ def platform_name(self): @property def sensor_name(self): """Get sensor name.""" - res = self['/attr/instrument'] - if isinstance(res, np.ndarray): - return str(res.astype(str)) - else: - return res + return self['/attr/instrument'].lower() def adjust_scaling_factors(self, factors, file_units, output_units): """Adjust scaling factors.""" @@ -100,7 +96,7 @@ def adjust_scaling_factors(self, factors, file_units, output_units): def get_shape(self, ds_id, ds_info): """Get shape.""" - var_path = ds_info.get('file_key', 'observation_data/{}'.format(ds_id['name'])) + var_path = self._dataset_name_to_var_path(ds_id['name'], ds_info) return self.get(var_path + '/shape', 1) @property @@ -174,12 +170,12 @@ def _get_dataset_valid_range(self, dataset_id, ds_info, var_path): def get_metadata(self, dataset_id, ds_info): """Get metadata.""" - var_path = ds_info.get('file_key', 'observation_data/{}'.format(dataset_id['name'])) + var_path = self._dataset_name_to_var_path(dataset_id['name'], ds_info) shape = self.get_shape(dataset_id, ds_info) file_units = self._get_dataset_file_units(dataset_id, ds_info, var_path) # Get extra metadata - if '/dimension/number_of_scans' in self: + if self._is_scan_based_array(shape): rows_per_scan = int(shape[0] / self['/dimension/number_of_scans']) ds_info.setdefault('rows_per_scan', rows_per_scan) @@ -198,9 +194,12 @@ def get_metadata(self, dataset_id, ds_info): i.update(dataset_id.to_dict()) return i + def _is_scan_based_array(self, shape): + return '/dimension/number_of_scans' in self and isinstance(shape, tuple) and shape + def get_dataset(self, dataset_id, ds_info): """Get dataset.""" - var_path = ds_info.get('file_key', 'observation_data/{}'.format(dataset_id['name'])) + var_path = self._dataset_name_to_var_path(dataset_id['name'], ds_info) metadata = self.get_metadata(dataset_id, ds_info) valid_min, valid_max, scale_factor, scale_offset = self._get_dataset_valid_range(dataset_id, ds_info, var_path) @@ -240,3 +239,27 @@ def get_dataset(self, dataset_id, ds_info): if 'number_of_lines' in data.dims: data = data.rename({'number_of_lines': 'y', 'number_of_pixels': 'x'}) return data + + def available_datasets(self, configured_datasets=None): + """Generate dataset info and their availablity. + + See + :meth:`satpy.readers.file_handlers.BaseFileHandler.available_datasets` + for details. + + """ + for is_avail, ds_info in (configured_datasets or []): + if is_avail is not None: + # some other file handler said it has this dataset + # we don't know any more information than the previous + # file handler so let's yield early + yield is_avail, ds_info + continue + ft_matches = self.file_type_matches(ds_info['file_type']) + var_path = self._dataset_name_to_var_path(ds_info['name'], ds_info) + is_in_file = var_path in self + yield ft_matches and is_in_file, ds_info + + @staticmethod + def _dataset_name_to_var_path(dataset_name: str, ds_info: dict) -> str: + return ds_info.get('file_key', 'observation_data/{}'.format(dataset_name)) diff --git a/satpy/readers/viirs_sdr.py b/satpy/readers/viirs_sdr.py index 82e6b70bb1..a8fd98954e 100644 --- a/satpy/readers/viirs_sdr.py +++ b/satpy/readers/viirs_sdr.py @@ -604,7 +604,7 @@ def _get_file_handlers(self, dsid): LOG.warning("Required file type '%s' not found or loaded for " "'%s'", ds_info['file_type'], dsid['name']) else: - if len(set(ds_info['dataset_groups']) & set(['GITCO', 'GIMGO', 'GMTCO', 'GMODO'])) > 1: + if len(set(ds_info['dataset_groups']) & {'GITCO', 'GIMGO', 'GMTCO', 'GMODO'}) > 1: fhs = self.get_right_geo_fhs(dsid, fhs) return fhs diff --git a/satpy/readers/yaml_reader.py b/satpy/readers/yaml_reader.py index 3266c32131..2e3203e5a8 100644 --- a/satpy/readers/yaml_reader.py +++ b/satpy/readers/yaml_reader.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2016-2019 Satpy developers +# Copyright (c) 2016-2019, 2021 Satpy developers # # This file is part of satpy. # @@ -24,6 +24,7 @@ import warnings from abc import ABCMeta, abstractmethod from collections import OrderedDict, deque +from contextlib import suppress from fnmatch import fnmatch from weakref import WeakValueDictionary @@ -36,6 +37,8 @@ except ImportError: from yaml import Loader as UnsafeLoader # type: ignore +from functools import cached_property + from pyresample.boundary import AreaDefBoundary, Boundary from pyresample.geometry import AreaDefinition, StackedAreaDefinition, SwathDefinition from trollsift.parser import globify, parse @@ -301,7 +304,10 @@ def load_ds_ids_from_config(self): ds_info = dataset.copy() for key in dsid.keys(): if isinstance(ds_info.get(key), dict): - ds_info.update(ds_info[key][dsid.get(key)]) + with suppress(KeyError): + # KeyError is suppressed in case the key does not represent interesting metadata, + # eg a custom type + ds_info.update(ds_info[key][dsid.get(key)]) # this is important for wavelength which was converted # to a tuple ds_info[key] = dsid.get(key) @@ -345,6 +351,10 @@ class FileYAMLReader(AbstractYAMLReader, DataDownloadMixin): """ + # WeakValueDictionary objects must be created at the class level or else + # dask will not be able to serialize them on a distributed environment + _coords_cache: WeakValueDictionary = WeakValueDictionary() + def __init__(self, config_dict, filter_parameters=None, @@ -357,7 +367,6 @@ def __init__(self, self.available_ids = {} self.filter_filenames = self.info.get('filter_filenames', filter_filenames) self.filter_parameters = filter_parameters or {} - self.coords_cache = WeakValueDictionary() self.register_data_files() @property @@ -805,7 +814,7 @@ def _make_swath_definition_from_lons_lats(self, lons, lats): key = None try: key = (lons.data.name, lats.data.name) - sdef = self.coords_cache.get(key) + sdef = FileYAMLReader._coords_cache.get(key) except AttributeError: sdef = None if sdef is None: @@ -816,7 +825,7 @@ def _make_swath_definition_from_lons_lats(self, lons, lats): lons.attrs.get('name', lons.name), lats.attrs.get('name', lats.name)) if key is not None: - self.coords_cache[key] = sdef + FileYAMLReader._coords_cache[key] = sdef return sdef def _load_dataset_area(self, dsid, file_handlers, coords, **kwargs): @@ -1003,6 +1012,11 @@ def _set_orientation(dataset, upper_right_corner): "and will not be flipped.".format(dataset.attrs.get('name', 'unknown_name'))) return dataset + if isinstance(dataset.attrs['area'], SwathDefinition): + logger.info("Dataset {} is in a SwathDefinition " + "and will not be flipped.".format(dataset.attrs.get('name', 'unknown_name'))) + return dataset + projection_type = _get_projection_type(dataset.attrs['area']) accepted_geos_proj_types = ['Geostationary Satellite (Sweep Y)', 'Geostationary Satellite (Sweep X)'] if projection_type not in accepted_geos_proj_types: @@ -1161,8 +1175,7 @@ def create_filehandlers(self, filenames, fh_kwargs=None): fh.filename_info['segment'] = fh.filename_info.get('count_in_repeat_cycle', 1) return created_fhs - @staticmethod - def _load_dataset(dsid, ds_info, file_handlers, dim='y', pad_data=True): + def _load_dataset(self, dsid, ds_info, file_handlers, dim='y', pad_data=True): """Load only a piece of the dataset.""" if not pad_data: return FileYAMLReader._load_dataset(dsid, ds_info, @@ -1175,28 +1188,14 @@ def _load_dataset(dsid, ds_info, file_handlers, dim='y', pad_data=True): raise KeyError( "Could not load {} from any provided files".format(dsid)) - padding_fci_scene = file_handlers[0].filetype_info.get('file_type') == 'fci_l1c_fdhsi' - - empty_segment = xr.full_like(projectable, np.nan) + filetype = file_handlers[0].filetype_info['file_type'] + self.empty_segment = xr.full_like(projectable, np.nan) for i, sli in enumerate(slice_list): if sli is None: - if padding_fci_scene: - slice_list[i] = _get_empty_segment_with_height(empty_segment, - _get_FCI_L1c_FDHSI_chunk_height( - empty_segment.shape[1], i + 1), - dim=dim) - else: - slice_list[i] = empty_segment + slice_list[i] = self._get_empty_segment(dim=dim, idx=i, filetype=filetype) while expected_segments > counter: - if padding_fci_scene: - slice_list.append(_get_empty_segment_with_height(empty_segment, - _get_FCI_L1c_FDHSI_chunk_height( - empty_segment.shape[1], counter + 1), - dim=dim)) - else: - slice_list.append(empty_segment) - + slice_list.append(self._get_empty_segment(dim=dim, idx=counter, filetype=filetype)) counter += 1 if dim not in slice_list[0].dims: @@ -1209,97 +1208,114 @@ def _load_dataset(dsid, ds_info, file_handlers, dim='y', pad_data=True): res.attrs = combined_info return res + def _get_empty_segment(self, **kwargs): + return self.empty_segment + def _load_area_def(self, dsid, file_handlers, pad_data=True): """Load the area definition of *dsid* with padding.""" if not pad_data: return _load_area_def(dsid, file_handlers) - return _load_area_def_with_padding(dsid, file_handlers) + return self._load_area_def_with_padding(dsid, file_handlers) + def _load_area_def_with_padding(self, dsid, file_handlers): + """Load the area definition of *dsid* with padding.""" + # Pad missing segments between the first available and expected + area_defs = self._pad_later_segments_area(file_handlers, dsid) -def _load_area_def_with_padding(dsid, file_handlers): - """Load the area definition of *dsid* with padding.""" - # Pad missing segments between the first available and expected - area_defs = _pad_later_segments_area(file_handlers, dsid) + # Add missing start segments + area_defs = self._pad_earlier_segments_area(file_handlers, dsid, area_defs) - # Add missing start segments - area_defs = _pad_earlier_segments_area(file_handlers, dsid, area_defs) + # Stack the area definitions + area_def = _stack_area_defs(area_defs) - # Stack the area definitions - area_def = _stack_area_defs(area_defs) + return area_def - return area_def + def _pad_later_segments_area(self, file_handlers, dsid): + """Pad area definitions for missing segments that are later in sequence than the first available.""" + expected_segments = file_handlers[0].filetype_info['expected_segments'] + filetype = file_handlers[0].filetype_info['file_type'] + available_segments = [int(fh.filename_info.get('segment', 1)) for + fh in file_handlers] + area_defs = self._get_segments_areadef_with_later_padded(file_handlers, filetype, dsid, available_segments, + expected_segments) -def _stack_area_defs(area_def_dict): - """Stack given dict of area definitions and return a StackedAreaDefinition.""" - area_defs = [area_def_dict[area_def] for - area_def in sorted(area_def_dict.keys()) - if area_def is not None] + return area_defs - area_def = StackedAreaDefinition(*area_defs) - area_def = area_def.squeeze() + def _get_segments_areadef_with_later_padded(self, file_handlers, filetype, dsid, available_segments, + expected_segments): + seg_size = None + area_defs = {} + for segment in range(available_segments[0], expected_segments + 1): + try: + idx = available_segments.index(segment) + fh = file_handlers[idx] + area = fh.get_area_def(dsid) + except ValueError: + area = self._get_new_areadef_for_padded_segment(area, filetype, seg_size, segment, padding_type='later') + + area_defs[segment] = area + seg_size = area.shape + return area_defs + + def _pad_earlier_segments_area(self, file_handlers, dsid, area_defs): + """Pad area definitions for missing segments that are earlier in sequence than the first available.""" + available_segments = [int(fh.filename_info.get('segment', 1)) for + fh in file_handlers] + area = file_handlers[0].get_area_def(dsid) + seg_size = area.shape + filetype = file_handlers[0].filetype_info['file_type'] - return area_def + for segment in range(available_segments[0] - 1, 0, -1): + area = self._get_new_areadef_for_padded_segment(area, filetype, seg_size, segment, padding_type='earlier') + area_defs[segment] = area + seg_size = area.shape + return area_defs -def _pad_later_segments_area(file_handlers, dsid): - """Pad area definitions for missing segments that are later in sequence than the first available.""" - seg_size = None - expected_segments = file_handlers[0].filetype_info['expected_segments'] - available_segments = [int(fh.filename_info.get('segment', 1)) for - fh in file_handlers] - area_defs = {} - padding_fci_scene = file_handlers[0].filetype_info.get('file_type') == 'fci_l1c_fdhsi' + def _get_new_areadef_for_padded_segment(self, area, filetype, seg_size, segment, padding_type): + logger.debug("Padding to full disk with segment nr. %d", segment) + new_height_px, new_ll_y, new_ur_y = self._get_y_area_extents_for_padded_segment(area, filetype, padding_type, + seg_size, segment) - for segment in range(available_segments[0], expected_segments + 1): - try: - idx = available_segments.index(segment) - fh = file_handlers[idx] - area = fh.get_area_def(dsid) - except ValueError: - logger.debug("Padding to full disk with segment nr. %d", segment) - - new_height_proj_coord, new_height_px = _get_new_areadef_heights(area, seg_size, segment, - padding_fci_scene) + fill_extent = (area.area_extent[0], new_ll_y, + area.area_extent[2], new_ur_y) + area = AreaDefinition('fill', 'fill', 'fill', area.crs, + seg_size[1], new_height_px, + fill_extent) + return area + + def _get_y_area_extents_for_padded_segment(self, area, filetype, padding_type, seg_size, segment): + new_height_proj_coord, new_height_px = self._get_new_areadef_heights(area, seg_size, + segment_n=segment, + filetype=filetype) + if padding_type == 'later': new_ll_y = area.area_extent[1] + new_height_proj_coord new_ur_y = area.area_extent[1] - fill_extent = (area.area_extent[0], new_ll_y, - area.area_extent[2], new_ur_y) - area = AreaDefinition('fill', 'fill', 'fill', area.crs, - seg_size[1], new_height_px, - fill_extent) - - area_defs[segment] = area - seg_size = area.shape + elif padding_type == 'earlier': + new_ll_y = area.area_extent[3] + new_ur_y = area.area_extent[3] - new_height_proj_coord + else: + raise ValueError("Padding type not recognised.") + return new_height_px, new_ll_y, new_ur_y - return area_defs + def _get_new_areadef_heights(self, previous_area, previous_seg_size, **kwargs): + new_height_px = previous_seg_size[0] + new_height_proj_coord = previous_area.area_extent[1] - previous_area.area_extent[3] + return new_height_proj_coord, new_height_px -def _pad_earlier_segments_area(file_handlers, dsid, area_defs): - """Pad area definitions for missing segments that are earlier in sequence than the first available.""" - available_segments = [int(fh.filename_info.get('segment', 1)) for - fh in file_handlers] - area = file_handlers[0].get_area_def(dsid) - seg_size = area.shape - padding_fci_scene = file_handlers[0].filetype_info.get('file_type') == 'fci_l1c_fdhsi' - for segment in range(available_segments[0] - 1, 0, -1): - logger.debug("Padding segment %d to full disk.", - segment) +def _stack_area_defs(area_def_dict): + """Stack given dict of area definitions and return a StackedAreaDefinition.""" + area_defs = [area_def_dict[area_def] for + area_def in sorted(area_def_dict.keys()) + if area_def is not None] - new_height_proj_coord, new_height_px = _get_new_areadef_heights(area, seg_size, segment, padding_fci_scene) - new_ll_y = area.area_extent[3] - new_ur_y = area.area_extent[3] - new_height_proj_coord - fill_extent = (area.area_extent[0], new_ll_y, - area.area_extent[2], new_ur_y) - area = AreaDefinition('fill', 'fill', 'fill', - area.crs, - seg_size[1], new_height_px, - fill_extent) - area_defs[segment] = area - seg_size = area.shape + area_def = StackedAreaDefinition(*area_defs) + area_def = area_def.squeeze() - return area_defs + return area_def def _find_missing_segments(file_handlers, ds_info, dsid): @@ -1336,21 +1352,6 @@ def _find_missing_segments(file_handlers, ds_info, dsid): return counter, expected_segments, slice_list, failure, projectable -def _get_new_areadef_heights(previous_area, previous_seg_size, segment_n, padding_fci_scene): - """Get the area definition heights in projection coordinates and pixels for the new padded segment.""" - if padding_fci_scene: - # retrieve the chunk/segment pixel height - new_height_px = _get_FCI_L1c_FDHSI_chunk_height(previous_seg_size[1], segment_n) - # scale the previous vertical area extent using the new pixel height - new_height_proj_coord = (previous_area.area_extent[1] - previous_area.area_extent[3]) * new_height_px / \ - previous_seg_size[0] - else: - # all other cases have constant segment size, so reuse the previous segment heights - new_height_px = previous_seg_size[0] - new_height_proj_coord = previous_area.area_extent[1] - previous_area.area_extent[3] - return new_height_proj_coord, new_height_px - - def _get_empty_segment_with_height(empty_segment, new_height, dim): """Get a new empty segment with the specified height.""" if empty_segment.shape[0] > new_height: @@ -1362,22 +1363,172 @@ def _get_empty_segment_with_height(empty_segment, new_height, dim): return empty_segment -def _get_FCI_L1c_FDHSI_chunk_height(chunk_width, chunk_n): - """Get the height in pixels of a FCI L1c FDHSI chunk given the chunk width and number (starting from 1).""" - if chunk_width == 11136: - # 1km resolution case - if chunk_n in [3, 5, 8, 10, 13, 15, 18, 20, 23, 25, 28, 30, 33, 35, 38, 40]: - chunk_height = 279 - else: - chunk_height = 278 - elif chunk_width == 5568: - # 2km resolution case - if chunk_n in [5, 10, 15, 20, 25, 30, 35, 40]: - chunk_height = 140 - else: - chunk_height = 139 - else: - raise ValueError("FCI L1c FDHSI chunk width {} not recognized. Must be either 5568 or 11136.".format( - chunk_width)) +class GEOVariableSegmentYAMLReader(GEOSegmentYAMLReader): + """GEOVariableSegmentYAMLReader for handling chunked/segmented GEO products with segments of variable height. + + This YAMLReader overrides parts of the GEOSegmentYAMLReader to account for formats where the segments can + have variable heights. It computes the sizes of the padded segments using the information available in the + file(handlers), so that gaps of any size can be filled as needed. + + This implementation was motivated by the FCI L1c format, where the segments (called chunks in the FCI world) + can have variable heights. It is however generic, so that any future reader can use it. The requirement + for the reader is to have a method called `get_segment_position_info`, returning a dictionary containing + the positioning info for each chunk (see example in + :func:`satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler.get_segment_position_info`). + + For more information on please see the documentation of :func:`satpy.readers.yaml_reader.GEOSegmentYAMLReader`. + """ + + def create_filehandlers(self, filenames, fh_kwargs=None): + """Create file handler objects and collect the location information.""" + created_fhs = super().create_filehandlers(filenames, fh_kwargs=fh_kwargs) + self._extract_segment_location_dicts(created_fhs) + return created_fhs + + def _extract_segment_location_dicts(self, created_fhs): + self.segment_infos = dict() + for filetype, filetype_fhs in created_fhs.items(): + self._initialise_segment_infos(filetype, filetype_fhs) + self._collect_segment_position_infos(filetype, filetype_fhs) + return + + def _collect_segment_position_infos(self, filetype, filetype_fhs): + # collect the segment positioning infos for all available segments + for fh in filetype_fhs: + chk_infos = fh.get_segment_position_info() + chk_infos.update({'segment_nr': fh.filename_info['segment'] - 1}) + self.segment_infos[filetype]['available_segment_infos'].append(chk_infos) + + def _initialise_segment_infos(self, filetype, filetype_fhs): + # initialise the segment info for this filetype + exp_segment_nr = filetype_fhs[0].filetype_info['expected_segments'] + width_to_grid_type = _get_width_to_grid_type(filetype_fhs[0].get_segment_position_info()) + self.segment_infos.update({filetype: {'available_segment_infos': [], + 'expected_segments': exp_segment_nr, + 'width_to_grid_type': width_to_grid_type}}) + + def _get_empty_segment(self, dim=None, idx=None, filetype=None): + grid_type = self.segment_infos[filetype]['width_to_grid_type'][self.empty_segment.shape[1]] + segment_height = self.segment_heights[filetype][grid_type][idx] + return _get_empty_segment_with_height(self.empty_segment, segment_height, dim=dim) + + @cached_property + def segment_heights(self): + """Compute optimal padded segment heights (in number of pixels) based on the location of available segments.""" + segment_heights = dict() + for filetype, filetype_seginfos in self.segment_infos.items(): + filetype_seg_heights = {'1km': _compute_optimal_missing_segment_heights(filetype_seginfos, '1km', 11136), + '2km': _compute_optimal_missing_segment_heights(filetype_seginfos, '2km', 5568)} + segment_heights.update({filetype: filetype_seg_heights}) + return segment_heights + + def _get_new_areadef_heights(self, previous_area, previous_seg_size, segment_n=None, filetype=None): + # retrieve the segment height in number of pixels + grid_type = self.segment_infos[filetype]['width_to_grid_type'][previous_seg_size[1]] + new_height_px = self.segment_heights[filetype][grid_type][segment_n - 1] + # scale the previous vertical area extent using the new pixel height + prev_area_extent = previous_area.area_extent[1] - previous_area.area_extent[3] + new_height_proj_coord = prev_area_extent * new_height_px / previous_seg_size[0] + + return new_height_proj_coord, new_height_px + + +def _get_width_to_grid_type(seg_info): + width_to_grid_type = dict() + for grid_type, grid_type_seg_info in seg_info.items(): + width_to_grid_type.update({grid_type_seg_info['segment_width']: grid_type}) + return width_to_grid_type + + +def _compute_optimal_missing_segment_heights(seg_infos, grid_type, expected_vertical_size): + # initialise positioning arrays + segment_start_rows, segment_end_rows, segment_heights = _init_positioning_arrays_for_variable_padding( + seg_infos['available_segment_infos'], grid_type, seg_infos['expected_segments']) - return chunk_height + # populate start row of first segment and end row of last segment with known values + segment_start_rows[0] = 1 + segment_end_rows[seg_infos['expected_segments'] - 1] = expected_vertical_size + + # find missing segments and group contiguous missing segments together + missing_segments = np.where(segment_heights == 0)[0] + groups_missing_segments = np.split(missing_segments, np.where(np.diff(missing_segments) > 1)[0] + 1) + + for group in groups_missing_segments: + _compute_positioning_data_for_missing_group(segment_start_rows, segment_end_rows, segment_heights, group) + + return segment_heights.astype('int') + + +def _compute_positioning_data_for_missing_group(segment_start_rows, segment_end_rows, segment_heights, group): + _populate_group_start_end_row_using_neighbour_segments(group, segment_end_rows, segment_start_rows) + proposed_sizes_missing_segments = _compute_proposed_sizes_of_missing_segments_in_group(group, segment_end_rows, + segment_start_rows) + _populate_start_end_rows_of_missing_segments_with_proposed_sizes(group, proposed_sizes_missing_segments, + segment_start_rows, segment_end_rows, + segment_heights) + + +def _populate_start_end_rows_of_missing_segments_with_proposed_sizes(group, proposed_sizes_missing_segments, + segment_start_rows, segment_end_rows, + segment_heights): + for n in range(len(group)): + # start of first and end of last missing segment have been populated already + if n != 0: + segment_start_rows[group[n]] = segment_start_rows[group[n - 1]] + proposed_sizes_missing_segments[n] + 1 + if n != len(group) - 1: + segment_end_rows[group[n]] = segment_start_rows[group[n]] + proposed_sizes_missing_segments[n] + segment_heights[group[n]] = proposed_sizes_missing_segments[n] + + +def _compute_proposed_sizes_of_missing_segments_in_group(group, segment_end_rows, segment_start_rows): + size_group_gap = segment_end_rows[group[-1]] - segment_start_rows[group[0]] + 1 + proposed_sizes_missing_segments = split_integer_in_most_equal_parts(size_group_gap, len(group)) + return proposed_sizes_missing_segments + + +def _populate_group_start_end_row_using_neighbour_segments(group, segment_end_rows, segment_start_rows): + # if group is at the start/end of the full-disk, we know the start/end value already + if segment_start_rows[group[0]] == 0: + _populate_group_start_row_using_previous_segment(group, segment_end_rows, segment_start_rows) + if segment_end_rows[group[-1]] == 0: + _populate_group_end_row_using_later_segment(group, segment_end_rows, segment_start_rows) + + +def _populate_group_end_row_using_later_segment(group, segment_end_rows, segment_start_rows): + segment_end_rows[group[-1]] = segment_start_rows[group[-1] + 1] - 1 + + +def _populate_group_start_row_using_previous_segment(group, segment_end_rows, segment_start_rows): + segment_start_rows[group[0]] = segment_end_rows[group[0] - 1] + 1 + + +def _init_positioning_arrays_for_variable_padding(chk_infos, grid_type, exp_segment_nr): + segment_heights = np.zeros(exp_segment_nr) + segment_start_rows = np.zeros(exp_segment_nr) + segment_end_rows = np.zeros(exp_segment_nr) + + _populate_positioning_arrays_with_available_chunk_info(chk_infos, grid_type, segment_start_rows, segment_end_rows, + segment_heights) + return segment_start_rows, segment_end_rows, segment_heights + + +def _populate_positioning_arrays_with_available_chunk_info(chk_infos, grid_type, segment_start_rows, segment_end_rows, + segment_heights): + for chk_info in chk_infos: + current_fh_segment_nr = chk_info['segment_nr'] + segment_heights[current_fh_segment_nr] = chk_info[grid_type]['segment_height'] + segment_start_rows[current_fh_segment_nr] = chk_info[grid_type]['start_position_row'] + segment_end_rows[current_fh_segment_nr] = chk_info[grid_type]['end_position_row'] + + +def split_integer_in_most_equal_parts(x, n): + """Split an integer number x in n parts that are as equally-sizes as possible.""" + if x % n == 0: + return np.repeat(x // n, n).astype('int') + else: + # split the remainder amount over the last remainder parts + remainder = int(x % n) + mod = int(x // n) + ar = np.repeat(mod, n) + ar[-remainder:] = mod + 1 + return ar.astype('int') diff --git a/satpy/resample.py b/satpy/resample.py index d9f19073ee..a8230ae8ed 100644 --- a/satpy/resample.py +++ b/satpy/resample.py @@ -193,7 +193,7 @@ def hash_dict(the_dict, the_hash=None): """Calculate a hash for a dictionary.""" if the_hash is None: - the_hash = hashlib.sha1() + the_hash = hashlib.sha1() # nosec the_hash.update(json.dumps(the_dict, sort_keys=True).encode('utf-8')) return the_hash @@ -950,6 +950,17 @@ def _mean(data, y_size, x_size): return data_mean +def _repeat_by_factor(data, block_info=None): + if block_info is None: + return data + out_shape = block_info[None]['chunk-shape'] + out_data = data + for axis, axis_size in enumerate(out_shape): + in_size = data.shape[axis] + out_data = np.repeat(out_data, int(axis_size / in_size), axis=axis) + return out_data + + class NativeResampler(BaseResampler): """Expand or reduce input datasets to be the same shape. @@ -973,7 +984,7 @@ def resample(self, data, cache_dir=None, mask_area=False, **kwargs): **kwargs) @staticmethod - def aggregate(d, y_size, x_size): + def _aggregate(d, y_size, x_size): """Average every 4 elements (2x2) in a 2D array.""" if d.ndim != 2: # we can't guarantee what blocks we are getting and how @@ -991,39 +1002,53 @@ def aggregate(d, y_size, x_size): new_chunks = (tuple(int(x / y_size) for x in d.chunks[0]), tuple(int(x / x_size) for x in d.chunks[1])) - return da.core.map_blocks(_mean, d, y_size, x_size, dtype=d.dtype, chunks=new_chunks) + return da.core.map_blocks(_mean, d, y_size, x_size, + meta=np.array((), dtype=d.dtype), + dtype=d.dtype, chunks=new_chunks) + + @staticmethod + def _replicate(d_arr, repeats): + """Repeat data pixels by the per-axis factors specified.""" + # rechunk so new chunks are the same size as old chunks + c_size = max(x[0] for x in d_arr.chunks) + + def _calc_chunks(c, c_size): + whole_chunks = [c_size] * int(sum(c) // c_size) + remaining = sum(c) - sum(whole_chunks) + if remaining: + whole_chunks += [remaining] + return tuple(whole_chunks) + new_chunks = [_calc_chunks(x, int(c_size // repeats[axis])) + for axis, x in enumerate(d_arr.chunks)] + d_arr = d_arr.rechunk(new_chunks) + + repeated_chunks = [] + for axis, axis_chunks in enumerate(d_arr.chunks): + factor = repeats[axis] + if not factor.is_integer(): + raise ValueError("Expand factor must be a whole number") + repeated_chunks.append(tuple(x * int(factor) for x in axis_chunks)) + repeated_chunks = tuple(repeated_chunks) + d_arr = d_arr.map_blocks(_repeat_by_factor, + meta=np.array((), dtype=d_arr.dtype), + dtype=d_arr.dtype, + chunks=repeated_chunks) + return d_arr @classmethod - def expand_reduce(cls, d_arr, repeats): + def _expand_reduce(cls, d_arr, repeats): """Expand reduce.""" if not isinstance(d_arr, da.Array): d_arr = da.from_array(d_arr, chunks=CHUNK_SIZE) if all(x == 1 for x in repeats.values()): return d_arr if all(x >= 1 for x in repeats.values()): - # rechunk so new chunks are the same size as old chunks - c_size = max(x[0] for x in d_arr.chunks) - - def _calc_chunks(c, c_size): - whole_chunks = [c_size] * int(sum(c) // c_size) - remaining = sum(c) - sum(whole_chunks) - if remaining: - whole_chunks += [remaining] - return tuple(whole_chunks) - new_chunks = [_calc_chunks(x, int(c_size // repeats[axis])) - for axis, x in enumerate(d_arr.chunks)] - d_arr = d_arr.rechunk(new_chunks) - - for axis, factor in repeats.items(): - if not factor.is_integer(): - raise ValueError("Expand factor must be a whole number") - d_arr = da.repeat(d_arr, int(factor), axis=axis) - return d_arr + return cls._replicate(d_arr, repeats) if all(x <= 1 for x in repeats.values()): # reduce y_size = 1. / repeats[0] x_size = 1. / repeats[1] - return cls.aggregate(d_arr, y_size, x_size) + return cls._aggregate(d_arr, y_size, x_size) raise ValueError("Must either expand or reduce in both " "directions") @@ -1056,7 +1081,7 @@ def compute(self, data, expand=True, **kwargs): repeats[y_axis] = y_repeats repeats[x_axis] = x_repeats - d_arr = self.expand_reduce(data.data, repeats) + d_arr = self._expand_reduce(data.data, repeats) new_data = xr.DataArray(d_arr, dims=data.dims) return update_resampled_coords(data, new_data, target_geo_def) diff --git a/satpy/scene.py b/satpy/scene.py index 293f7619b5..41bb77b94a 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2010-2017 Satpy developers +# Copyright (c) 2010-2022 Satpy developers # # This file is part of satpy. # @@ -21,6 +21,7 @@ import logging import os import warnings +from typing import Callable import numpy as np import xarray as xr @@ -34,6 +35,7 @@ from satpy.node import CompositorNode, MissingDependencies, ReaderNode from satpy.readers import load_readers from satpy.resample import get_area_def, prepare_resampler, resample_dataset +from satpy.utils import convert_remote_files_to_fsspec, get_storage_options_from_reader_kwargs from satpy.writers import load_writer LOG = logging.getLogger(__name__) @@ -71,14 +73,29 @@ def __init__(self, filenames=None, reader=None, filter_parameters=None, reader_kwargs=None): """Initialize Scene with Reader and Compositor objects. - To load data `filenames` and preferably `reader` must be specified. If `filenames` is provided without `reader` - then the available readers will be searched for a Reader that can support the provided files. This can take - a considerable amount of time so it is recommended that `reader` always be provided. Note without `filenames` - the Scene is created with no Readers available requiring Datasets to be added manually:: + To load data `filenames` and preferably `reader` must be specified:: + + scn = Scene(filenames=glob('/path/to/viirs/sdr/files/*'), reader='viirs_sdr') + + + If ``filenames`` is provided without ``reader`` then the available readers + will be searched for a Reader that can support the provided files. This + can take a considerable amount of time so it is recommended that + ``reader`` always be provided. Note without ``filenames`` the Scene is + created with no Readers available requiring Datasets to be added + manually:: scn = Scene() scn['my_dataset'] = Dataset(my_data_array, **my_info) + Further, notice that it is also possible to load a combination of files + or sets of files each requiring their specific reader. For that + ``filenames`` needs to be a `dict` (see parameters list below), e.g.:: + + scn = Scene(filenames={'nwcsaf-pps_nc': glob('/path/to/nwc/saf/pps/files/*'), + 'modis_l1b': glob('/path/to/modis/lvl1/files/*')}) + + Args: filenames (iterable or dict): A sequence of files that will be used to load data from. A ``dict`` object should map reader names to a list of filenames for that reader. @@ -91,21 +108,31 @@ def __init__(self, filenames=None, reader=None, filter_parameters=None, sub-dictionaries to pass different arguments to different reader instances. + Keyword arguments for remote file access are also given in this dictionary. + See `documentation `_ + for usage examples. + """ self.attrs = dict() + + storage_options, cleaned_reader_kwargs = get_storage_options_from_reader_kwargs(reader_kwargs) + if filter_parameters: - if reader_kwargs is None: - reader_kwargs = {} + if cleaned_reader_kwargs is None: + cleaned_reader_kwargs = {} else: - reader_kwargs = reader_kwargs.copy() - reader_kwargs.setdefault('filter_parameters', {}).update(filter_parameters) + cleaned_reader_kwargs = cleaned_reader_kwargs.copy() + cleaned_reader_kwargs.setdefault('filter_parameters', {}).update(filter_parameters) if filenames and isinstance(filenames, str): raise ValueError("'filenames' must be a list of files: Scene(filenames=[filename])") + if filenames: + filenames = convert_remote_files_to_fsspec(filenames, storage_options) + self._readers = self._create_reader_instances(filenames=filenames, reader=reader, - reader_kwargs=reader_kwargs) + reader_kwargs=cleaned_reader_kwargs) self._datasets = DatasetDict() self._wishlist = set() self._dependency_tree = DependencyTree(self._readers) @@ -135,16 +162,15 @@ def sensor_names(self) -> set[str]: Sensor information is collected from data contained in the Scene whether loaded from a reader or generated as a composite with :meth:`load` or added manually using ``scn["name"] = data_arr``). - If no data is currently contained in the Scene or no sensor - information is found in the data metadata, loaded readers will be - consulted for sensor information. + Sensor information is also collected from any loaded readers. + In some rare cases this may mean that the reader includes sensor + information for data that isn't actually loaded or even available. """ - sensor_names = self._contained_sensor_names() - if not sensor_names: - sensor_names = set([sensor for reader_instance in self._readers.values() - for sensor in reader_instance.sensor_names]) - return sensor_names + contained_sensor_names = self._contained_sensor_names() + reader_sensor_names = set([sensor for reader_instance in self._readers.values() + for sensor in reader_instance.sensor_names]) + return contained_sensor_names | reader_sensor_names def _contained_sensor_names(self) -> set[str]: sensor_names = set() @@ -224,15 +250,33 @@ def _compare_areas(self, datasets=None, compare_func=max): if not all(ad.crs == first_crs for ad in areas[1:]): raise ValueError("Can't compare areas with different " "projections.") + return self._compare_area_defs(compare_func, areas) + return self._compare_swath_defs(compare_func, areas) - def key_func(ds): - return 1. / abs(ds.pixel_size_x) - else: - def key_func(ds): - return ds.shape + @staticmethod + def _compare_area_defs(compare_func: Callable, area_defs: list[AreaDefinition]) -> list[AreaDefinition]: + def _key_func(area_def: AreaDefinition) -> tuple: + """Get comparable version of area based on resolution. + + Pixel size x is the primary comparison parameter followed by + the y dimension pixel size. The extent of the area and the + name (area_id) of the area are also used to act as + "tiebreakers" between areas of the same resolution. + + """ + pixel_size_x_inverse = 1. / abs(area_def.pixel_size_x) + pixel_size_y_inverse = 1. / abs(area_def.pixel_size_y) + area_id = area_def.area_id + return pixel_size_x_inverse, pixel_size_y_inverse, area_def.area_extent, area_id + return compare_func(area_defs, key=_key_func) - # find the highest/lowest area among the provided - return compare_func(areas, key=key_func) + @staticmethod + def _compare_swath_defs(compare_func: Callable, swath_defs: list[SwathDefinition]) -> list[SwathDefinition]: + def _key_func(swath_def: SwathDefinition) -> tuple: + attrs = getattr(swath_def.lons, "attrs", {}) + lon_ds_name = attrs.get("name") + return swath_def.shape[1], swath_def.shape[0], lon_ds_name + return compare_func(swath_defs, key=_key_func) def _gather_all_areas(self, datasets): """Gather all areas from datasets. @@ -909,7 +953,8 @@ def resample(self, destination=None, datasets=None, generate=True, # regenerate anything from the wishlist that needs it (combining # multiple resolutions, etc.) - new_scn.generate_possible_composites(generate, unload) + if generate: + new_scn.generate_possible_composites(unload) return new_scn @@ -1143,6 +1188,47 @@ def save_datasets(self, writer=None, filename=None, datasets=None, compute=True, **kwargs) return writer.save_datasets(dataarrays, compute=compute, **save_kwargs) + def compute(self, **kwargs): + """Call `compute` on all Scene data arrays. + + See :meth:`xarray.DataArray.compute` for more details. + Note that this will convert the contents of the DataArray to numpy arrays which + may not work with all parts of Satpy which may expect dask arrays. + """ + from dask import compute + new_scn = self.copy() + datasets = compute(*(new_scn._datasets.values()), **kwargs) + + for i, k in enumerate(new_scn._datasets.keys()): + new_scn[k] = datasets[i] + + return new_scn + + def persist(self, **kwargs): + """Call `persist` on all Scene data arrays. + + See :meth:`xarray.DataArray.persist` for more details. + """ + from dask import persist + new_scn = self.copy() + datasets = persist(*(new_scn._datasets.values()), **kwargs) + + for i, k in enumerate(new_scn._datasets.keys()): + new_scn[k] = datasets[i] + + return new_scn + + def chunk(self, **kwargs): + """Call `chunk` on all Scene data arrays. + + See :meth:`xarray.DataArray.chunk` for more details. + """ + new_scn = self.copy() + for k in new_scn._datasets.keys(): + new_scn[k] = new_scn[k].chunk(**kwargs) + + return new_scn + @staticmethod def _get_writer_by_ext(extension): """Find the writer matching the ``extension``. @@ -1258,7 +1344,8 @@ def load(self, wishlist, calibration='*', resolution='*', self._wishlist |= needed_datasets self._read_datasets_from_storage(**kwargs) - self.generate_possible_composites(generate, unload) + if generate: + self.generate_possible_composites(unload) def _update_dependency_tree(self, needed_datasets, query): try: @@ -1314,13 +1401,15 @@ def _load_datasets_by_readers(self, reader_datasets, **kwargs): loaded_datasets.update(new_datasets) return loaded_datasets - def generate_possible_composites(self, generate, unload): - """See what we can generate and do it.""" - if generate: - keepables = self._generate_composites_from_loaded_datasets() - else: - # don't lose datasets we loaded to try to generate composites - keepables = set(self._datasets.keys()) | self._wishlist + def generate_possible_composites(self, unload): + """See which composites can be generated and generate them. + + Args: + unload (bool): if the dependencies of the composites + should be unloaded after successful generation. + """ + keepables = self._generate_composites_from_loaded_datasets() + if self.missing_datasets: self._remove_failed_datasets(keepables) if unload: diff --git a/satpy/tests/compositor_tests/test_viirs.py b/satpy/tests/compositor_tests/test_viirs.py index 7c05c58423..54000ccb70 100644 --- a/satpy/tests/compositor_tests/test_viirs.py +++ b/satpy/tests/compositor_tests/test_viirs.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2018 Satpy developers +# Copyright (c) 2018, 2022 Satpy developers # # This file is part of satpy. # @@ -17,25 +17,19 @@ # satpy. If not, see . """Tests for VIIRS compositors.""" -import unittest +import dask.array as da +import numpy as np +import pytest +import xarray as xr +from pyresample.geometry import AreaDefinition -class TestVIIRSComposites(unittest.TestCase): - """Test VIIRS-specific composites.""" +class TestVIIRSComposites: + """Test various VIIRS-specific composites.""" - def test_load_composite_yaml(self): - """Test loading the yaml for this sensor.""" - from satpy.composites.config_loader import load_compositor_configs_for_sensors - load_compositor_configs_for_sensors(['viirs']) - - def test_histogram_dnb(self): - """Test the 'histogram_dnb' compositor.""" - import dask.array as da - import numpy as np - import xarray as xr - from pyresample.geometry import AreaDefinition - - from satpy.composites.viirs import HistogramDNB + @pytest.fixture + def area(self): + """Return fake area for use with DNB tests.""" rows = 5 cols = 10 area = AreaDefinition( @@ -44,186 +38,166 @@ def test_histogram_dnb(self): 'lat_0': 0.0}, cols, rows, (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) + return area - comp = HistogramDNB('histogram_dnb', prerequisites=('dnb',), - standard_name='toa_outgoing_radiance_per_' - 'unit_wavelength') - dnb = np.zeros((rows, cols)) + 0.25 + @pytest.fixture + def c01(self, area): + """Return fake channel 1 data for DNB tests.""" + dnb = np.zeros(area.shape) + 0.25 dnb[3, :] += 0.25 dnb[4:, :] += 0.5 dnb = da.from_array(dnb, chunks=25) c01 = xr.DataArray(dnb, dims=('y', 'x'), attrs={'name': 'DNB', 'area': area}) + return c01 + + @pytest.fixture + def c02(self, area): + """Return fake sza dataset for DNB tests.""" # data changes by row, sza changes by col for testing - sza = np.zeros((rows, cols)) + 70.0 + sza = np.zeros(area.shape) + 70.0 sza[:, 3] += 20.0 sza[:, 4:] += 45.0 sza = da.from_array(sza, chunks=25) c02 = xr.DataArray(sza, dims=('y', 'x'), attrs={'name': 'solar_zenith_angle', 'area': area}) + return c02 + + @pytest.fixture + def c03(self, area): + """Return fake lunal zenith angle dataset for DNB tests.""" + lza = np.zeros(area.shape) + 70.0 + lza[:, 3] += 20.0 + lza[:, 4:] += 45.0 + lza = da.from_array(lza, chunks=25) + c03 = xr.DataArray(lza, + dims=('y', 'x'), + attrs={'name': 'lunar_zenith_angle', 'area': area}) + return c03 + + def test_load_composite_yaml(self): + """Test loading the yaml for this sensor.""" + from satpy.composites.config_loader import load_compositor_configs_for_sensors + load_compositor_configs_for_sensors(['viirs']) + + def test_histogram_dnb(self, c01, c02): + """Test the 'histogram_dnb' compositor.""" + from satpy.composites.viirs import HistogramDNB + + comp = HistogramDNB('histogram_dnb', prerequisites=('dnb',), + standard_name='toa_outgoing_radiance_per_' + 'unit_wavelength') res = comp((c01, c02)) - self.assertIsInstance(res, xr.DataArray) - self.assertIsInstance(res.data, da.Array) - self.assertEqual(res.attrs['name'], 'histogram_dnb') - self.assertEqual(res.attrs['standard_name'], - 'equalized_radiance') + assert isinstance(res, xr.DataArray) + assert isinstance(res.data, da.Array) + assert res.attrs['name'] == 'histogram_dnb' + assert res.attrs['standard_name'] == 'equalized_radiance' data = res.compute() unique_values = np.unique(data) np.testing.assert_allclose(unique_values, [0.5994, 0.7992, 0.999], rtol=1e-3) - def test_adaptive_dnb(self): + def test_adaptive_dnb(self, c01, c02): """Test the 'adaptive_dnb' compositor.""" - import dask.array as da - import numpy as np - import xarray as xr - from pyresample.geometry import AreaDefinition - from satpy.composites.viirs import AdaptiveDNB - rows = 5 - cols = 10 - area = AreaDefinition( - 'test', 'test', 'test', - {'proj': 'eqc', 'lon_0': 0.0, - 'lat_0': 0.0}, - cols, rows, - (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) comp = AdaptiveDNB('adaptive_dnb', prerequisites=('dnb',), standard_name='toa_outgoing_radiance_per_' 'unit_wavelength') - dnb = np.zeros((rows, cols)) + 0.25 - dnb[3, :] += 0.25 - dnb[4:, :] += 0.5 - dnb = da.from_array(dnb, chunks=25) - c01 = xr.DataArray(dnb, - dims=('y', 'x'), - attrs={'name': 'DNB', 'area': area}) - sza = np.zeros((rows, cols)) + 70.0 - sza[:, 3] += 20.0 - sza[:, 4:] += 45.0 - sza = da.from_array(sza, chunks=25) - c02 = xr.DataArray(sza, - dims=('y', 'x'), - attrs={'name': 'solar_zenith_angle', 'area': area}) res = comp((c01, c02)) - self.assertIsInstance(res, xr.DataArray) - self.assertIsInstance(res.data, da.Array) - self.assertEqual(res.attrs['name'], 'adaptive_dnb') - self.assertEqual(res.attrs['standard_name'], - 'equalized_radiance') + assert isinstance(res, xr.DataArray) + assert isinstance(res.data, da.Array) + assert res.attrs['name'] == 'adaptive_dnb' + assert res.attrs['standard_name'] == 'equalized_radiance' data = res.compute() np.testing.assert_allclose(data.data, 0.999, rtol=1e-4) - def test_erf_dnb(self): - """Test the 'dynamic_dnb' or ERF DNB compositor.""" - import dask.array as da - import numpy as np - import xarray as xr - from pyresample.geometry import AreaDefinition - - from satpy.composites.viirs import ERFDNB - rows = 5 - cols = 10 - area = AreaDefinition( - 'test', 'test', 'test', - {'proj': 'eqc', 'lon_0': 0.0, - 'lat_0': 0.0}, - cols, rows, - (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) + def test_hncc_dnb(self, area, c01, c02, c03): + """Test the 'hncc_dnb' compositor.""" + from satpy.composites.viirs import NCCZinke - comp = ERFDNB('dynamic_dnb', prerequisites=('dnb',), - standard_name='toa_outgoing_radiance_per_' - 'unit_wavelength') - dnb = np.zeros((rows, cols)) + 0.25 - dnb[3, :] += 0.25 - dnb[4:, :] += 0.5 - dnb = da.from_array(dnb, chunks=25) - c01 = xr.DataArray(dnb, - dims=('y', 'x'), - attrs={'name': 'DNB', 'area': area}) - sza = np.zeros((rows, cols)) + 70.0 - sza[:, 3] += 20.0 - sza[:, 4:] += 45.0 - sza = da.from_array(sza, chunks=25) - c02 = xr.DataArray(sza, - dims=('y', 'x'), - attrs={'name': 'solar_zenith_angle', 'area': area}) - lza = np.zeros((rows, cols)) + 70.0 - lza[:, 3] += 20.0 - lza[:, 4:] += 45.0 - lza = da.from_array(lza, chunks=25) - c03 = xr.DataArray(lza, - dims=('y', 'x'), - attrs={'name': 'lunar_zenith_angle', 'area': area}) + comp = NCCZinke('hncc_dnb', prerequisites=('dnb',), + standard_name='toa_outgoing_radiance_per_' + 'unit_wavelength') mif = xr.DataArray(da.zeros((5,), chunks=5) + 0.1, dims=('y',), attrs={'name': 'moon_illumination_fraction', 'area': area}) res = comp((c01, c02, c03, mif)) - self.assertIsInstance(res, xr.DataArray) - self.assertIsInstance(res.data, da.Array) - self.assertEqual(res.attrs['name'], 'dynamic_dnb') - self.assertEqual(res.attrs['standard_name'], - 'equalized_radiance') + assert isinstance(res, xr.DataArray) + assert isinstance(res.data, da.Array) + assert res.attrs['name'] == 'hncc_dnb' + assert res.attrs['standard_name'] == 'ncc_radiance' data = res.compute() unique = np.unique(data) - np.testing.assert_allclose(unique, [0.00000000e+00, 1.00446703e-01, 1.64116082e-01, 2.09233451e-01, - 1.43916324e+02, 2.03528498e+02, 2.49270516e+02]) - - def test_hncc_dnb(self): - """Test the 'hncc_dnb' compositor.""" - import dask.array as da - import numpy as np - import xarray as xr - from pyresample.geometry import AreaDefinition + np.testing.assert_allclose( + unique, [3.48479712e-04, 6.96955799e-04, 1.04543189e-03, 4.75394738e-03, + 9.50784532e-03, 1.42617433e-02, 1.50001560e+03, 3.00001560e+03, + 4.50001560e+03]) - from satpy.composites.viirs import NCCZinke - rows = 5 - cols = 10 - area = AreaDefinition( - 'test', 'test', 'test', - {'proj': 'eqc', 'lon_0': 0.0, - 'lat_0': 0.0}, - cols, rows, - (-20037508.34, -10018754.17, 20037508.34, 10018754.17)) + @pytest.mark.parametrize("dnb_units", ["W m-2 sr-1", "W cm-2 sr-1"]) + @pytest.mark.parametrize("saturation_correction", [False, True]) + def test_erf_dnb(self, dnb_units, saturation_correction, area, c02, c03): + """Test the 'dynamic_dnb' or ERF DNB compositor.""" + from satpy.composites.viirs import ERFDNB - comp = NCCZinke('hncc_dnb', prerequisites=('dnb',), - standard_name='toa_outgoing_radiance_per_' - 'unit_wavelength') - dnb = np.zeros((rows, cols)) + 0.25 + comp = ERFDNB('dynamic_dnb', prerequisites=('dnb',), + saturation_correction=saturation_correction, + standard_name='toa_outgoing_radiance_per_' + 'unit_wavelength') + # c01 is different from in the other tests, so don't use the fixture + # here + dnb = np.zeros(area.shape) + 0.25 + cols = area.shape[1] + dnb[2, :cols // 2] = np.nan dnb[3, :] += 0.25 dnb[4:, :] += 0.5 + if dnb_units == "W cm-2 sr-1": + dnb /= 10000.0 dnb = da.from_array(dnb, chunks=25) c01 = xr.DataArray(dnb, dims=('y', 'x'), - attrs={'name': 'DNB', 'area': area}) - sza = np.zeros((rows, cols)) + 70.0 - sza[:, 3] += 20.0 - sza[:, 4:] += 45.0 - sza = da.from_array(sza, chunks=25) - c02 = xr.DataArray(sza, - dims=('y', 'x'), - attrs={'name': 'solar_zenith_angle', 'area': area}) - lza = np.zeros((rows, cols)) + 70.0 - lza[:, 3] += 20.0 - lza[:, 4:] += 45.0 - lza = da.from_array(lza, chunks=25) - c03 = xr.DataArray(lza, - dims=('y', 'x'), - attrs={'name': 'lunar_zenith_angle', 'area': area}) + attrs={'name': 'DNB', 'area': area, 'units': dnb_units}) mif = xr.DataArray(da.zeros((5,), chunks=5) + 0.1, dims=('y',), attrs={'name': 'moon_illumination_fraction', 'area': area}) res = comp((c01, c02, c03, mif)) - self.assertIsInstance(res, xr.DataArray) - self.assertIsInstance(res.data, da.Array) - self.assertEqual(res.attrs['name'], 'hncc_dnb') - self.assertEqual(res.attrs['standard_name'], - 'ncc_radiance') + assert isinstance(res, xr.DataArray) + assert isinstance(res.data, da.Array) + assert res.attrs['name'] == 'dynamic_dnb' + assert res.attrs['standard_name'] == 'equalized_radiance' data = res.compute() unique = np.unique(data) - np.testing.assert_allclose( - unique, [3.48479712e-04, 6.96955799e-04, 1.04543189e-03, 4.75394738e-03, - 9.50784532e-03, 1.42617433e-02, 1.50001560e+03, 3.00001560e+03, - 4.50001560e+03]) + assert np.isnan(unique).any() + nonnan_unique = unique[~np.isnan(unique)] + if saturation_correction: + exp_unique = [0.000000e+00, 3.978305e-04, 6.500003e-04, + 8.286927e-04, 5.628335e-01, 7.959671e-01, + 9.748567e-01] + else: + exp_unique = [0.00000000e+00, 1.00446703e-01, 1.64116082e-01, + 2.09233451e-01, 1.43916324e+02, 2.03528498e+02, + 2.49270516e+02] + np.testing.assert_allclose(nonnan_unique, exp_unique) + + def test_snow_age(self, area): + """Test the 'snow_age' compositor.""" + from satpy.composites.viirs import SnowAge + + projectables = tuple( + xr.DataArray( + da.from_array(np.full(area.shape, 5.*i), chunks=5), + dims=("y", "x"), + attrs={"name": f"M0{i:d}", + "calibration": "reflectance", + "units": "%"}) + for i in range(7, 12)) + comp = SnowAge( + "snow_age", + prerequisites=("M07", "M08", "M09", "M10", "M11",), + standard_name="snow_age") + res = comp(projectables) + assert isinstance(res, xr.DataArray) + assert isinstance(res.data, da.Array) + assert res.attrs["name"] == "snow_age" + assert "units" not in res.attrs diff --git a/satpy/tests/enhancement_tests/test_ahi.py b/satpy/tests/enhancement_tests/test_ahi.py new file mode 100644 index 0000000000..118a00efe6 --- /dev/null +++ b/satpy/tests/enhancement_tests/test_ahi.py @@ -0,0 +1,52 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Unit testing for the AHI enhancement function.""" + +import dask.array as da +import numpy as np +import xarray as xr + + +class TestAHIEnhancement(): + """Test the AHI enhancement functions.""" + + def setup(self): + """Create test data.""" + data = da.arange(-100, 1000, 110).reshape(2, 5) + rgb_data = np.stack([data, data, data]) + self.rgb = xr.DataArray(rgb_data, dims=('bands', 'y', 'x'), + coords={'bands': ['R', 'G', 'B']}) + + def test_jma_true_color_reproduction(self): + """Test the jma_true_color_reproduction enhancement.""" + from trollimage.xrimage import XRImage + + from satpy.enhancements.ahi import jma_true_color_reproduction + + expected = [[[-109.98, 10.998, 131.976, 252.954, 373.932], + [494.91, 615.888, 736.866, 857.844, 978.822]], + + [[-97.6, 9.76, 117.12, 224.48, 331.84], + [439.2, 546.56, 653.92, 761.28, 868.64]], + + [[-94.27, 9.427, 113.124, 216.821, 320.518], + [424.215, 527.912, 631.609, 735.306, 839.003]]] + + img = XRImage(self.rgb) + jma_true_color_reproduction(img) + np.testing.assert_almost_equal(img.data.compute(), expected) diff --git a/satpy/tests/enhancement_tests/test_enhancements.py b/satpy/tests/enhancement_tests/test_enhancements.py index 4ab0136d07..656bdab3df 100644 --- a/satpy/tests/enhancement_tests/test_enhancements.py +++ b/satpy/tests/enhancement_tests/test_enhancements.py @@ -17,19 +17,40 @@ # satpy. If not, see . """Unit testing the enhancements functions, e.g. cira_stretch.""" +import contextlib import os -import unittest +from tempfile import NamedTemporaryFile from unittest import mock import dask.array as da import numpy as np +import pytest import xarray as xr +from satpy.enhancements import create_colormap -class TestEnhancementStretch(unittest.TestCase): + +def run_and_check_enhancement(func, data, expected, **kwargs): + """Perform basic checks that apply to multiple tests.""" + from trollimage.xrimage import XRImage + + pre_attrs = data.attrs + img = XRImage(data) + func(img, **kwargs) + + assert isinstance(img.data.data, da.Array) + old_keys = set(pre_attrs.keys()) + # It is OK to have "enhancement_history" added + new_keys = set(img.data.attrs.keys()) - {"enhancement_history"} + assert old_keys == new_keys + + np.testing.assert_allclose(img.data.values, expected, atol=1.e-6, rtol=0) + + +class TestEnhancementStretch: """Class for testing enhancements in satpy.enhancements.""" - def setUp(self): + def setup_method(self): """Create test data used by every test.""" data = np.arange(-210, 790, 100).reshape((2, 5)) * 0.95 data[0, 0] = np.nan # one bad value for testing @@ -43,21 +64,6 @@ def setUp(self): self.rgb = xr.DataArray(rgb_data, dims=('bands', 'y', 'x'), coords={'bands': ['R', 'G', 'B']}) - def _test_enhancement(self, func, data, expected, **kwargs): - """Perform basic checks that apply to multiple tests.""" - from trollimage.xrimage import XRImage - - pre_attrs = data.attrs - img = XRImage(data) - func(img, **kwargs) - - self.assertIsInstance(img.data.data, da.Array) - self.assertListEqual(sorted(pre_attrs.keys()), - sorted(img.data.attrs.keys()), - "DataArray attributes were not preserved") - - np.testing.assert_allclose(img.data.values, expected, atol=1.e-6, rtol=0) - def test_cira_stretch(self): """Test applying the cira_stretch.""" from satpy.enhancements import cira_stretch @@ -65,7 +71,7 @@ def test_cira_stretch(self): expected = np.array([[ [np.nan, -7.04045974, -7.04045974, 0.79630132, 0.95947296], [1.05181359, 1.11651012, 1.16635571, 1.20691137, 1.24110186]]]) - self._test_enhancement(cira_stretch, self.ch1, expected) + run_and_check_enhancement(cira_stretch, self.ch1, expected) def test_reinhard(self): """Test the reinhard algorithm.""" @@ -78,7 +84,7 @@ def test_reinhard(self): [[np.nan, 0., 0., 0.93333793, 1.29432402], [1.55428709, 1.76572249, 1.94738635, 2.10848544, 2.25432809]]]) - self._test_enhancement(reinhard_to_srgb, self.rgb, expected) + run_and_check_enhancement(reinhard_to_srgb, self.rgb, expected) def test_lookup(self): """Test the lookup enhancement function.""" @@ -87,7 +93,7 @@ def test_lookup(self): [0., 0., 0., 0.333333, 0.705882], [1., 1., 1., 1., 1.]]]) lut = np.arange(256.) - self._test_enhancement(lookup, self.ch1, expected, luts=lut) + run_and_check_enhancement(lookup, self.ch1, expected, luts=lut) expected = np.array([[[0., 0., 0., 0.333333, 0.705882], [1., 1., 1., 1., 1.]], @@ -97,7 +103,7 @@ def test_lookup(self): [1., 1., 1., 1., 1.]]]) lut = np.arange(256.) lut = np.vstack((lut, lut, lut)).T - self._test_enhancement(lookup, self.rgb, expected, luts=lut) + run_and_check_enhancement(lookup, self.rgb, expected, luts=lut) def test_colorize(self): """Test the colorize enhancement function.""" @@ -117,7 +123,7 @@ def test_colorize(self): 1.88238767e-01, 1.88238767e-01], [1.88238767e-01, 1.88238767e-01, 1.88238767e-01, 1.88238767e-01, 1.88238767e-01]]]) - self._test_enhancement(colorize, self.ch1, expected, palettes=brbg) + run_and_check_enhancement(colorize, self.ch1, expected, palettes=brbg) def test_palettize(self): """Test the palettize enhancement function.""" @@ -125,7 +131,7 @@ def test_palettize(self): from satpy.enhancements import palettize expected = np.array([[[10, 0, 0, 10, 10], [10, 10, 10, 10, 10]]]) - self._test_enhancement(palettize, self.ch1, expected, palettes=brbg) + run_and_check_enhancement(palettize, self.ch1, expected, palettes=brbg) def test_three_d_effect(self): """Test the three_d_effect enhancement function.""" @@ -133,7 +139,7 @@ def test_three_d_effect(self): expected = np.array([[ [np.nan, np.nan, -389.5, -294.5, 826.5], [np.nan, np.nan, 85.5, 180.5, 1301.5]]]) - self._test_enhancement(three_d_effect, self.ch1, expected) + run_and_check_enhancement(three_d_effect, self.ch1, expected) def test_crefl_scaling(self): """Test the crefl_scaling enhancement function.""" @@ -141,8 +147,8 @@ def test_crefl_scaling(self): expected = np.array([[ [np.nan, 0., 0., 0.44378, 0.631734], [0.737562, 0.825041, 0.912521, 1., 1.]]]) - self._test_enhancement(crefl_scaling, self.ch2, expected, idx=[0., 25., 55., 100., 255.], - sc=[0., 90., 140., 175., 255.]) + run_and_check_enhancement(crefl_scaling, self.ch2, expected, idx=[0., 25., 55., 100., 255.], + sc=[0., 90., 140., 175., 255.]) def test_piecewise_linear_stretch(self): """Test the piecewise_linear_stretch enhancement function.""" @@ -150,13 +156,13 @@ def test_piecewise_linear_stretch(self): expected = np.array([[ [np.nan, 0., 0., 0.44378, 0.631734], [0.737562, 0.825041, 0.912521, 1., 1.]]]) - self._test_enhancement(piecewise_linear_stretch, - self.ch2 / 100.0, - expected, - xp=[0., 25., 55., 100., 255.], - fp=[0., 90., 140., 175., 255.], - reference_scale_factor=255, - ) + run_and_check_enhancement(piecewise_linear_stretch, + self.ch2 / 100.0, + expected, + xp=[0., 25., 55., 100., 255.], + fp=[0., 90., 140., 175., 255.], + reference_scale_factor=255, + ) def test_btemp_threshold(self): """Test applying the cira_stretch.""" @@ -165,8 +171,8 @@ def test_btemp_threshold(self): expected = np.array([[ [np.nan, 0.946207, 0.892695, 0.839184, 0.785672], [0.73216, 0.595869, 0.158745, -0.278379, -0.715503]]]) - self._test_enhancement(btemp_threshold, self.ch1, expected, - min_in=-200, max_in=500, threshold=350) + run_and_check_enhancement(btemp_threshold, self.ch1, expected, + min_in=-200, max_in=500, threshold=350) def test_merge_colormaps(self): """Test merging colormaps.""" @@ -182,7 +188,7 @@ def test_merge_colormaps(self): with mock.patch('satpy.enhancements.create_colormap', create_colormap_mock): res = mcp(kwargs) - self.assertTrue(res is cmap1) + assert res is cmap1 create_colormap_mock.assert_not_called() create_colormap_mock.reset_mock() ret_map.reset_mock() @@ -218,171 +224,132 @@ def tearDown(self): """Clean up.""" -class TestColormapLoading(unittest.TestCase): +@contextlib.contextmanager +def closed_named_temp_file(**kwargs): + """Named temporary file context manager that closes the file after creation. + + This helps with Windows systems which can get upset with opening or + deleting a file that is already open. + + """ + try: + with NamedTemporaryFile(delete=False, **kwargs) as tmp_cmap: + yield tmp_cmap.name + finally: + os.remove(tmp_cmap.name) + + +def _write_cmap_to_file(cmap_filename, cmap_data): + ext = os.path.splitext(cmap_filename)[1] + if ext in (".npy",): + np.save(cmap_filename, cmap_data) + elif ext in (".npz",): + np.savez(cmap_filename, cmap_data) + else: + np.savetxt(cmap_filename, cmap_data, delimiter=",") + + +def _generate_cmap_test_data(color_scale, colormap_mode): + cmap_data = np.array([ + [1, 0, 0], + [1, 1, 0], + [1, 1, 1], + [0, 0, 1], + ], dtype=np.float64) + if len(colormap_mode) != 3: + _cmap_data = cmap_data + cmap_data = np.empty((cmap_data.shape[0], len(colormap_mode)), + dtype=np.float64) + if colormap_mode.startswith("V") or colormap_mode.endswith("A"): + cmap_data[:, 0] = np.array([128, 130, 132, 134]) / 255.0 + cmap_data[:, -3:] = _cmap_data + if colormap_mode.startswith("V") and colormap_mode.endswith("A"): + cmap_data[:, 1] = np.array([128, 130, 132, 134]) / 255.0 + if color_scale is None or color_scale == 255: + cmap_data = (cmap_data * 255).astype(np.uint8) + return cmap_data + + +class TestColormapLoading: """Test utilities used with colormaps.""" - def test_cmap_from_file_rgb(self): + @pytest.mark.parametrize("color_scale", [None, 1.0]) + @pytest.mark.parametrize("colormap_mode", ["RGB", "VRGB", "VRGBA"]) + @pytest.mark.parametrize("extra_kwargs", + [ + {}, + {"min_value": 50, "max_value": 100}, + ]) + @pytest.mark.parametrize("filename_suffix", [".npy", ".npz", ".csv"]) + def test_cmap_from_file(self, color_scale, colormap_mode, extra_kwargs, filename_suffix): """Test that colormaps can be loaded from a binary file.""" - from tempfile import NamedTemporaryFile - - from satpy.enhancements import create_colormap - # create the colormap file on disk - with NamedTemporaryFile(suffix='.npy', delete=False) as tmp_cmap: - cmap_filename = tmp_cmap.name - np.save(cmap_filename, np.array([ - [255, 0, 0], - [255, 255, 0], - [255, 255, 255], - [0, 0, 255], - ])) - - try: - cmap = create_colormap({'filename': cmap_filename}) - self.assertEqual(cmap.colors.shape[0], 4) - np.testing.assert_equal(cmap.colors[0], [1.0, 0, 0]) - self.assertEqual(cmap.values.shape[0], 4) - self.assertEqual(cmap.values[0], 0) - self.assertEqual(cmap.values[-1], 1.0) - - cmap = create_colormap({'filename': cmap_filename, 'min_value': 50, 'max_value': 100}) - self.assertEqual(cmap.colors.shape[0], 4) - np.testing.assert_equal(cmap.colors[0], [1.0, 0, 0]) - self.assertEqual(cmap.values.shape[0], 4) - self.assertEqual(cmap.values[0], 50) - self.assertEqual(cmap.values[-1], 100) - finally: - os.remove(cmap_filename) - - def test_cmap_from_file_rgb_1(self): - """Test that colormaps can be loaded from a binary file with 0-1 colors.""" - from tempfile import NamedTemporaryFile - - from satpy.enhancements import create_colormap - - # create the colormap file on disk - with NamedTemporaryFile(suffix='.npy', delete=False) as tmp_cmap: - cmap_filename = tmp_cmap.name - np.save(cmap_filename, np.array([ - [1, 0, 0], - [1, 1, 0], - [1, 1, 1], - [0, 0, 1], - ])) - - try: - cmap = create_colormap({'filename': cmap_filename, - 'color_scale': 1}) - self.assertEqual(cmap.colors.shape[0], 4) - np.testing.assert_equal(cmap.colors[0], [1.0, 0, 0]) - self.assertEqual(cmap.values.shape[0], 4) - self.assertEqual(cmap.values[0], 0) - self.assertEqual(cmap.values[-1], 1.0) - - cmap = create_colormap({'filename': cmap_filename, 'color_scale': 1, - 'min_value': 50, 'max_value': 100}) - self.assertEqual(cmap.colors.shape[0], 4) - np.testing.assert_equal(cmap.colors[0], [1.0, 0, 0]) - self.assertEqual(cmap.values.shape[0], 4) - self.assertEqual(cmap.values[0], 50) - self.assertEqual(cmap.values[-1], 100) - finally: - os.remove(cmap_filename) - - def test_cmap_from_file_vrgb(self): - """Test that colormaps can be loaded from a binary file with values.""" - from tempfile import NamedTemporaryFile - - from satpy.enhancements import create_colormap - - # create the colormap file on disk - with NamedTemporaryFile(suffix='.npy', delete=False) as tmp_cmap: - cmap_filename = tmp_cmap.name - np.save(cmap_filename, np.array([ - [128, 255, 0, 0], - [130, 255, 255, 0], - [132, 255, 255, 255], - [134, 0, 0, 255], - ])) - - try: - # default mode of VRGB - cmap = create_colormap({'filename': cmap_filename}) - self.assertEqual(cmap.colors.shape[0], 4) - np.testing.assert_equal(cmap.colors[0], [1.0, 0, 0]) - self.assertEqual(cmap.values.shape[0], 4) - self.assertEqual(cmap.values[0], 128) - self.assertEqual(cmap.values[-1], 134) - - cmap = create_colormap({'filename': cmap_filename, 'colormap_mode': 'RGBA'}) - self.assertEqual(cmap.colors.shape[0], 4) - self.assertEqual(cmap.colors.shape[1], 4) # RGBA + with closed_named_temp_file(suffix=filename_suffix) as cmap_filename: + cmap_data = _generate_cmap_test_data(color_scale, colormap_mode) + _write_cmap_to_file(cmap_filename, cmap_data) + + unset_first_value = 128.0 / 255.0 if colormap_mode.startswith("V") else 0.0 + unset_last_value = 134.0 / 255.0 if colormap_mode.startswith("V") else 1.0 + if (color_scale is None or color_scale == 255) and colormap_mode.startswith("V"): + unset_first_value *= 255 + unset_last_value *= 255 + if "min_value" in extra_kwargs: + unset_first_value = extra_kwargs["min_value"] + unset_last_value = extra_kwargs["max_value"] + + first_color = [1.0, 0.0, 0.0] + if colormap_mode == "VRGBA": + first_color = [128.0 / 255.0] + first_color + + kwargs1 = {"filename": cmap_filename} + kwargs1.update(extra_kwargs) + if color_scale is not None: + kwargs1["color_scale"] = color_scale + + cmap = create_colormap(kwargs1) + assert cmap.colors.shape[0] == 4 + np.testing.assert_equal(cmap.colors[0], first_color) + assert cmap.values.shape[0] == 4 + assert cmap.values[0] == unset_first_value + assert cmap.values[-1] == unset_last_value + + def test_cmap_vrgb_as_rgba(self): + """Test that data created as VRGB still reads as RGBA.""" + with closed_named_temp_file(suffix=".npy") as cmap_filename: + cmap_data = _generate_cmap_test_data(None, "VRGB") + np.save(cmap_filename, cmap_data) + cmap = create_colormap({'filename': cmap_filename, 'colormap_mode': "RGBA"}) + assert cmap.colors.shape[0] == 4 + assert cmap.colors.shape[1] == 4 # RGBA np.testing.assert_equal(cmap.colors[0], [128 / 255., 1.0, 0, 0]) - self.assertEqual(cmap.values.shape[0], 4) - self.assertEqual(cmap.values[0], 0) - self.assertEqual(cmap.values[-1], 1.0) - - cmap = create_colormap({'filename': cmap_filename, 'min_value': 50, 'max_value': 100}) - self.assertEqual(cmap.colors.shape[0], 4) - np.testing.assert_equal(cmap.colors[0], [1.0, 0, 0]) - self.assertEqual(cmap.values.shape[0], 4) - self.assertEqual(cmap.values[0], 50) - self.assertEqual(cmap.values[-1], 100) - - self.assertRaises(ValueError, create_colormap, - {'filename': cmap_filename, 'colormap_mode': 'RGB', - 'min_value': 50, 'max_value': 100}) - finally: - os.remove(cmap_filename) - - def test_cmap_from_file_vrgba(self): - """Test that colormaps can be loaded RGBA colors and values.""" - from tempfile import NamedTemporaryFile - - from satpy.enhancements import create_colormap - - # create the colormap file on disk - with NamedTemporaryFile(suffix='.npy', delete=False) as tmp_cmap: - cmap_filename = tmp_cmap.name - np.save(cmap_filename, np.array([ - [128, 128, 255, 0, 0], # value, R, G, B, A - [130, 130, 255, 255, 0], - [132, 132, 255, 255, 255], - [134, 134, 0, 0, 255], - ])) - - try: - # default mode of VRGBA - cmap = create_colormap({'filename': cmap_filename}) - self.assertEqual(cmap.colors.shape[0], 4) - self.assertEqual(cmap.colors.shape[1], 4) # RGBA - np.testing.assert_equal(cmap.colors[0], [128 / 255.0, 1.0, 0, 0]) - self.assertEqual(cmap.values.shape[0], 4) - self.assertEqual(cmap.values[0], 128) - self.assertEqual(cmap.values[-1], 134) - - self.assertRaises(ValueError, create_colormap, - {'filename': cmap_filename, 'colormap_mode': 'RGBA'}) - - cmap = create_colormap({'filename': cmap_filename, 'min_value': 50, 'max_value': 100}) - self.assertEqual(cmap.colors.shape[0], 4) - self.assertEqual(cmap.colors.shape[1], 4) # RGBA - np.testing.assert_equal(cmap.colors[0], [128 / 255.0, 1.0, 0, 0]) - self.assertEqual(cmap.values.shape[0], 4) - self.assertEqual(cmap.values[0], 50) - self.assertEqual(cmap.values[-1], 100) - finally: - os.remove(cmap_filename) + assert cmap.values.shape[0] == 4 + assert cmap.values[0] == 0 + assert cmap.values[-1] == 1.0 + + @pytest.mark.parametrize( + ("real_mode", "forced_mode"), + [ + ("VRGBA", "RGBA"), + ("VRGBA", "VRGB"), + ("RGBA", "RGB"), + ] + ) + @pytest.mark.parametrize("filename_suffix", [".npy", ".csv"]) + def test_cmap_bad_mode(self, real_mode, forced_mode, filename_suffix): + """Test that reading colormaps with the wrong mode fails.""" + with closed_named_temp_file(suffix=filename_suffix) as cmap_filename: + cmap_data = _generate_cmap_test_data(None, real_mode) + _write_cmap_to_file(cmap_filename, cmap_data) + # Force colormap_mode VRGBA to RGBA and we should see an exception + with pytest.raises(ValueError): + create_colormap({'filename': cmap_filename, 'colormap_mode': forced_mode}) def test_cmap_from_file_bad_shape(self): """Test that unknown array shape causes an error.""" - from tempfile import NamedTemporaryFile - from satpy.enhancements import create_colormap # create the colormap file on disk - with NamedTemporaryFile(suffix='.npy', delete=False) as tmp_cmap: - cmap_filename = tmp_cmap.name + with closed_named_temp_file(suffix='.npy') as cmap_filename: np.save(cmap_filename, np.array([ [0], [64], @@ -390,11 +357,28 @@ def test_cmap_from_file_bad_shape(self): [255], ])) - try: - self.assertRaises(ValueError, create_colormap, - {'filename': cmap_filename}) - finally: - os.remove(cmap_filename) + with pytest.raises(ValueError): + create_colormap({'filename': cmap_filename}) + + def test_cmap_from_config_path(self, tmp_path): + """Test loading a colormap relative to a config path.""" + import satpy + from satpy.enhancements import create_colormap + + cmap_dir = tmp_path / "colormaps" + cmap_dir.mkdir() + cmap_filename = cmap_dir / "my_colormap.npy" + cmap_data = _generate_cmap_test_data(None, "RGBA") + np.save(cmap_filename, cmap_data) + with satpy.config.set(config_path=[tmp_path]): + rel_cmap_filename = os.path.join("colormaps", "my_colormap.npy") + cmap = create_colormap({'filename': rel_cmap_filename, 'colormap_mode': "RGBA"}) + assert cmap.colors.shape[0] == 4 + assert cmap.colors.shape[1] == 4 # RGBA + np.testing.assert_equal(cmap.colors[0], [128 / 255., 1.0, 0, 0]) + assert cmap.values.shape[0] == 4 + assert cmap.values[0] == 0 + assert cmap.values[-1] == 1.0 def test_cmap_from_trollimage(self): """Test that colormaps in trollimage can be loaded.""" @@ -407,7 +391,8 @@ def test_cmap_from_trollimage(self): def test_cmap_no_colormap(self): """Test that being unable to create a colormap raises an error.""" from satpy.enhancements import create_colormap - self.assertRaises(ValueError, create_colormap, {}) + with pytest.raises(ValueError): + create_colormap({}) def test_cmap_list(self): """Test that colors can be a list/tuple.""" @@ -420,15 +405,15 @@ def test_cmap_list(self): ] values = [2, 4, 6, 8] cmap = create_colormap({'colors': colors, 'color_scale': 1}) - self.assertEqual(cmap.colors.shape[0], 4) + assert cmap.colors.shape[0] == 4 np.testing.assert_equal(cmap.colors[0], [0.0, 0.0, 1.0]) - self.assertEqual(cmap.values.shape[0], 4) - self.assertEqual(cmap.values[0], 0) - self.assertEqual(cmap.values[-1], 1.0) + assert cmap.values.shape[0] == 4 + assert cmap.values[0] == 0 + assert cmap.values[-1] == 1.0 cmap = create_colormap({'colors': colors, 'color_scale': 1, 'values': values}) - self.assertEqual(cmap.colors.shape[0], 4) + assert cmap.colors.shape[0] == 4 np.testing.assert_equal(cmap.colors[0], [0.0, 0.0, 1.0]) - self.assertEqual(cmap.values.shape[0], 4) - self.assertEqual(cmap.values[0], 2) - self.assertEqual(cmap.values[-1], 8) + assert cmap.values.shape[0] == 4 + assert cmap.values[0] == 2 + assert cmap.values[-1] == 8 diff --git a/satpy/tests/enhancement_tests/test_viirs.py b/satpy/tests/enhancement_tests/test_viirs.py index a54292c2e5..5595266034 100644 --- a/satpy/tests/enhancement_tests/test_viirs.py +++ b/satpy/tests/enhancement_tests/test_viirs.py @@ -23,6 +23,8 @@ import numpy as np import xarray as xr +from .test_enhancements import run_and_check_enhancement + class TestVIIRSEnhancement(unittest.TestCase): """Class for testing the VIIRS enhancement function in satpy.enhancements.viirs.""" @@ -70,19 +72,5 @@ def test_viirs(self): from satpy.enhancements.viirs import water_detection expected = [[[1, 7, 8, 8, 8, 9, 10, 11, 14, 8], [20, 23, 26, 10, 12, 15, 18, 21, 24, 27]]] - self._test_enhancement(water_detection, self.da, expected, - palettes=self.palette) - - def _test_enhancement(self, func, data, expected, **kwargs): - from trollimage.xrimage import XRImage - - pre_attrs = data.attrs - img = XRImage(data) - func(img, **kwargs) - - self.assertIsInstance(img.data.data, da.Array) - self.assertListEqual(sorted(pre_attrs.keys()), - sorted(img.data.attrs.keys()), - "DataArray attributes were not preserved") - - np.testing.assert_allclose(img.data.values, expected, atol=1.e-6, rtol=0) + run_and_check_enhancement(water_detection, self.da, expected, + palettes=self.palette) diff --git a/satpy/tests/etc/readers/fake4.yaml b/satpy/tests/etc/readers/fake4.yaml index 4353bd3d27..5ae87fcf73 100644 --- a/satpy/tests/etc/readers/fake4.yaml +++ b/satpy/tests/etc/readers/fake4.yaml @@ -19,11 +19,13 @@ datasets: resolution: 1000 wavelength: [0.1, 0.2, 0.3] file_type: fake_file4 + coordinates: [lons, lats] ds4_b: name: ds4_b - resolution: 1000 + resolution: 250 wavelength: [0.4, 0.5, 0.6] file_type: fake_file4 + coordinates: [lons, lats] file_types: fake_file4: file_reader: !!python/name:satpy.tests.utils.FakeFileHandler diff --git a/satpy/tests/features/feature-load.feature b/satpy/tests/features/feature-load.feature index f6f756da76..62c3af3e43 100644 --- a/satpy/tests/features/feature-load.feature +++ b/satpy/tests/features/feature-load.feature @@ -35,4 +35,4 @@ Feature: Simple and intuitive scene loading (sc. 1) Scenario: Accessing datasets by name prefers less modified datasets Given datasets with the same name When a dataset is retrieved by name - Then the least modified version of the dataset is returned \ No newline at end of file + Then the least modified version of the dataset is returned diff --git a/satpy/tests/features/feature-save.feature b/satpy/tests/features/feature-save.feature index 3e181c1164..2e5421b4f7 100644 --- a/satpy/tests/features/feature-save.feature +++ b/satpy/tests/features/feature-save.feature @@ -21,4 +21,3 @@ Feature: Simple and intuitive saving Given a bunch of datasets are available When the save_datasets command is called Then a bunch of files should be saved on disk - diff --git a/satpy/tests/modifier_tests/test_crefl.py b/satpy/tests/modifier_tests/test_crefl.py index 3ddaf8ae84..5ab156be8c 100644 --- a/satpy/tests/modifier_tests/test_crefl.py +++ b/satpy/tests/modifier_tests/test_crefl.py @@ -23,6 +23,8 @@ from dask import array as da from pyresample.geometry import AreaDefinition +from ..utils import assert_maximum_dask_computes + @contextmanager def mock_cmgdem(tmpdir, url): @@ -88,8 +90,8 @@ class TestReflectanceCorrectorModifier: @staticmethod def data_area_ref_corrector(): """Create test area definition and data.""" - rows = 5 - cols = 10 + rows = 3 + cols = 5 area = AreaDefinition( 'some_area_name', 'On-the-fly area', 'geosabii', {'a': '6378137.0', 'b': '6356752.31414', 'h': '35786023.0', 'lon_0': '-89.5', 'proj': 'geos', 'sweep': 'x', @@ -98,12 +100,33 @@ def data_area_ref_corrector(): (-5434894.954752679, -5434894.964451744, 5434894.964451744, 5434894.954752679)) data = np.zeros((rows, cols)) + 25 - data[3, :] += 25 - data[4:, :] += 50 - data = da.from_array(data, chunks=100) + data[1, :] += 25 + data[2, :] += 50 + data = da.from_array(data, chunks=2) return area, data - def test_reflectance_corrector_abi(self): + @pytest.mark.parametrize( + ("name", "wavelength", "resolution", "exp_mean", "exp_unique"), + [ + ("C01", (0.45, 0.47, 0.49), 1000, 44.757951, + np.array([12.83774603, 14.38767557, 17.24258084, 41.87806142, 44.42472192, 47.89958451, + 48.23343427, 48.53847386, 71.52916035, 72.26078684, 73.10523784])), + ("C02", (0.59, 0.64, 0.69), 500, 51.4901, + np.array([23.69999579, 24.00407203, 24.49390685, 51.4304448, 51.64271324, 51.70519738, + 51.70942859, 51.76064747, 78.37182815, 78.77078522, 78.80199923])), + ("C03", (0.8455, 0.865, 0.8845), 1000, 50.7243, + np.array([24.78444631, 24.86790679, 24.99481254, 50.69670516, 50.72983327, 50.73601728, + 50.75685498, 50.83136276, 76.39973287, 76.5714688, 76.59856607])), + # ("C04", (1.3705, 1.378, 1.3855), 2000, 55.973458829136796, None), + ("C05", (1.58, 1.61, 1.64), 1000, 52.7231, + np.array([26.26568157, 26.43230852, 26.48936244, 52.00527783, 52.13043172, 52.20176747, + 53.01505657, 53.29017112, 78.93907987, 79.49089239, 79.69387535])), + ("C06", (2.225, 2.25, 2.275), 2000, 55.9735, + np.array([27.82291562, 28.2268102, 28.37246323, 54.33639308, 54.61451818, 54.77543748, + 56.62284858, 57.27288821, 83.57235975, 84.81324822, 85.27816457])), + ] + ) + def test_reflectance_corrector_abi(self, name, wavelength, resolution, exp_mean, exp_unique): """Test ReflectanceCorrector modifier with ABI data.""" from satpy.modifiers._crefl import ReflectanceCorrector from satpy.tests.utils import make_dsq @@ -111,15 +134,15 @@ def test_reflectance_corrector_abi(self): make_dsq(name='satellite_azimuth_angle'), make_dsq(name='satellite_zenith_angle'), make_dsq(name='solar_azimuth_angle'), - make_dsq(name='solar_zenith_angle')], name='C01', prerequisites=[], - wavelength=(0.45, 0.47, 0.49), resolution=1000, calibration='reflectance', + make_dsq(name='solar_zenith_angle')], name=name, prerequisites=[], + wavelength=wavelength, resolution=resolution, calibration='reflectance', modifiers=('sunz_corrected', 'rayleigh_corrected_crefl',), sensor='abi') assert ref_cor.attrs['modifiers'] == ('sunz_corrected', 'rayleigh_corrected_crefl') assert ref_cor.attrs['calibration'] == 'reflectance' - assert ref_cor.attrs['wavelength'] == (0.45, 0.47, 0.49) - assert ref_cor.attrs['name'] == 'C01' - assert ref_cor.attrs['resolution'] == 1000 + assert ref_cor.attrs['wavelength'] == wavelength + assert ref_cor.attrs['name'] == name + assert ref_cor.attrs['resolution'] == resolution assert ref_cor.attrs['sensor'] == 'abi' assert ref_cor.attrs['prerequisites'] == [] assert ref_cor.attrs['optional_prerequisites'] == [ @@ -132,48 +155,39 @@ def test_reflectance_corrector_abi(self): c01 = xr.DataArray(dnb, dims=('y', 'x'), attrs={ - 'satellite_longitude': -89.5, 'satellite_latitude': 0.0, - 'satellite_altitude': 35786023.4375, 'platform_name': 'GOES-16', - 'calibration': 'reflectance', 'units': '%', 'wavelength': (0.45, 0.47, 0.49), - 'name': 'C01', 'resolution': 1000, 'sensor': 'abi', + 'platform_name': 'GOES-16', + 'calibration': 'reflectance', 'units': '%', 'wavelength': wavelength, + 'name': name, 'resolution': resolution, 'sensor': 'abi', 'start_time': '2017-09-20 17:30:40.800000', 'end_time': '2017-09-20 17:41:17.500000', - 'area': area, 'ancillary_variables': [] + 'area': area, 'ancillary_variables': [], + 'orbital_parameters': { + 'satellite_nominal_longitude': -89.5, + 'satellite_nominal_latitude': 0.0, + 'satellite_nominal_altitude': 35786023.4375, + }, }) - res = ref_cor([c01], []) + with assert_maximum_dask_computes(0): + res = ref_cor([c01], []) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) - assert res.attrs['satellite_longitude'] == -89.5 - assert res.attrs['satellite_latitude'] == 0.0 - assert res.attrs['satellite_altitude'] == 35786023.4375 assert res.attrs['modifiers'] == ('sunz_corrected', 'rayleigh_corrected_crefl') assert res.attrs['platform_name'] == 'GOES-16' assert res.attrs['calibration'] == 'reflectance' assert res.attrs['units'] == '%' - assert res.attrs['wavelength'] == (0.45, 0.47, 0.49) - assert res.attrs['name'] == 'C01' - assert res.attrs['resolution'] == 1000 + assert res.attrs['wavelength'] == wavelength + assert res.attrs['name'] == name + assert res.attrs['resolution'] == resolution assert res.attrs['sensor'] == 'abi' assert res.attrs['start_time'] == '2017-09-20 17:30:40.800000' assert res.attrs['end_time'] == '2017-09-20 17:41:17.500000' assert res.attrs['area'] == area assert res.attrs['ancillary_variables'] == [] data = res.values - assert abs(np.nanmean(data) - 26.00760944144745) < 1e-10 - assert data.shape == (5, 10) unique = np.unique(data[~np.isnan(data)]) - np.testing.assert_allclose(unique, [-1.0, 4.210745457958135, 6.7833906076177595, 8.730371329824473, - 10.286627569545209, 11.744159436709374, 12.20226097829902, - 13.501444598985305, 15.344399223932212, 17.173329483996515, - 17.28798660754271, 18.29594550575925, 19.076835059905125, - 19.288331720959864, 19.77043407084455, 19.887082168377006, - 20.091028778326375, 20.230341149334617, 20.457671064690196, - 20.82686905639114, 21.021094816441195, 21.129963777952124, - 41.601857910095575, 43.963919057675504, - 46.21672174361075, 46.972099490462085, 47.497072794632835, - 47.80393007974336, 47.956765988770385, 48.043025685032106, - 51.909142813383916, 58.8234273736508, 68.84706145641482, 69.91085190887961, - 71.10179768327806, 71.33161009169649]) + np.testing.assert_allclose(np.nanmean(data), exp_mean, rtol=1e-5) + assert data.shape == (3, 5) + np.testing.assert_allclose(unique, exp_unique, rtol=1e-5) @pytest.mark.parametrize( 'url,dem_mock_cm,dem_sds', @@ -227,7 +241,7 @@ def test_reflectance_corrector_viirs(self, tmpdir, url, dem_mock_cm, dem_sds): c04 = _make_viirs_xarray(data, area, 'solar_azimuth_angle', 'solar_azimuth_angle') c05 = _make_viirs_xarray(data, area, 'solar_zenith_angle', 'solar_zenith_angle') - with dem_mock_cm(tmpdir, url): + with dem_mock_cm(tmpdir, url), assert_maximum_dask_computes(0): res = ref_cor([c01], [c02, c03, c04, c05]) assert isinstance(res, xr.DataArray) @@ -246,10 +260,10 @@ def test_reflectance_corrector_viirs(self, tmpdir, url, dem_mock_cm, dem_sds): assert res.attrs['area'] == area assert res.attrs['ancillary_variables'] == [] data = res.values - assert abs(np.mean(data) - 40.7578684169142) < 1e-10 - assert data.shape == (5, 10) + assert abs(np.mean(data) - 51.12750267805715) < 1e-6 + assert data.shape == (3, 5) unique = np.unique(data) - np.testing.assert_allclose(unique, [25.20341702519979, 52.38819447051263, 75.79089653845898]) + np.testing.assert_allclose(unique, [25.20341703, 52.38819447, 75.79089654]) def test_reflectance_corrector_modis(self): """Test ReflectanceCorrector modifier with MODIS data.""" @@ -314,11 +328,10 @@ def make_xarray(name, calibration, wavelength=None, modifiers=None, resolution=1 assert res.attrs['area'] == area assert res.attrs['ancillary_variables'] == [] data = res.values - if abs(np.mean(data) - 38.734365117099145) >= 1e-10: - raise AssertionError('{} is not within {} of {}'.format(np.mean(data), 1e-10, 38.734365117099145)) - assert data.shape == (5, 10) + assert abs(np.mean(data) - 52.09372623964498) < 1e-6 + assert data.shape == (3, 5) unique = np.unique(data) - np.testing.assert_allclose(unique, [24.641586, 50.431692, 69.315375]) + np.testing.assert_allclose(unique, [25.43670075, 52.93221561, 77.91226236]) def test_reflectance_corrector_bad_prereqs(self): """Test ReflectanceCorrector modifier with wrong number of inputs.""" diff --git a/satpy/tests/reader_tests/_modis_fixtures.py b/satpy/tests/reader_tests/_modis_fixtures.py index fa2819ee88..ab09635d45 100644 --- a/satpy/tests/reader_tests/_modis_fixtures.py +++ b/satpy/tests/reader_tests/_modis_fixtures.py @@ -34,7 +34,7 @@ AVAILABLE_1KM_PRODUCT_NAMES = AVAILABLE_1KM_VIS_PRODUCT_NAMES + AVAILABLE_1KM_IR_PRODUCT_NAMES AVAILABLE_HKM_PRODUCT_NAMES = [str(x) for x in range(3, 8)] AVAILABLE_QKM_PRODUCT_NAMES = ['1', '2'] -SCAN_LEN_5KM = 406 +SCAN_LEN_5KM = 6 # 3 scans of 5km data SCAN_WIDTH_5KM = 270 SCALE_FACTOR = 1 RES_TO_REPEAT_FACTOR = { @@ -75,9 +75,24 @@ def _generate_angle_data(resolution: int) -> np.ndarray: def _generate_visible_data(resolution: int, num_bands: int, dtype=np.uint16) -> np.ndarray: shape = _shape_for_resolution(resolution) data = np.zeros((num_bands, shape[0], shape[1]), dtype=dtype) + + # add fill value to every band + data[:, -1, -1] = 65535 + + # add band 2 saturation and can't aggregate fill values + data[1, -1, -2] = 65533 + data[1, -1, -3] = 65528 return data +def _generate_visible_uncertainty_data(shape: tuple) -> np.ndarray: + uncertainty = np.zeros(shape, dtype=np.uint8) + uncertainty[:, -1, -1] = 15 # fill value + uncertainty[:, -1, -2] = 15 # saturated + uncertainty[:, -1, -3] = 15 # can't aggregate + return uncertainty + + def _get_lonlat_variable_info(resolution: int) -> dict: lon_5km, lat_5km = _generate_lonlat_data(resolution) return { @@ -115,6 +130,7 @@ def _get_angles_variable_info(resolution: int) -> dict: def _get_visible_variable_info(var_name: str, resolution: int, bands: list[str]): num_bands = len(bands) data = _generate_visible_data(resolution, len(bands)) + uncertainty = _generate_visible_uncertainty_data(data.shape) dim_factor = RES_TO_REPEAT_FACTOR[resolution] * 2 band_dim_name = f"Band_{resolution}_{num_bands}_RefSB:MODIS_SWATH_Type_L1B" row_dim_name = f'{dim_factor}*nscans:MODIS_SWATH_Type_L1B' @@ -136,7 +152,7 @@ def _get_visible_variable_info(var_name: str, resolution: int, bands: list[str]) }, }, var_name + '_Uncert_Indexes': { - 'data': np.zeros(data.shape, dtype=np.uint8), + 'data': uncertainty, 'type': SDC.UINT8, 'fill_value': 255, 'attrs': { diff --git a/satpy/tests/reader_tests/test_aapp_l1b.py b/satpy/tests/reader_tests/test_aapp_l1b.py index ecca80e323..da0078ac83 100644 --- a/satpy/tests/reader_tests/test_aapp_l1b.py +++ b/satpy/tests/reader_tests/test_aapp_l1b.py @@ -22,6 +22,7 @@ import tempfile import unittest from contextlib import suppress +from unittest import mock import numpy as np @@ -153,6 +154,131 @@ def test_navigation(self): res = fh.get_dataset(key, info) assert(np.all(res == 0)) + def test_interpolation(self): + """Test reading the lon and lats.""" + with tempfile.TemporaryFile() as tmpfile: + self._header.tofile(tmpfile) + tmpfile.seek(22016, 0) + self._data.tofile(tmpfile) + fh = AVHRRAAPPL1BFile(tmpfile, self.filename_info, self.filetype_info) + lons40km = np.array([ + [-115.9773, -122.3054, -127.7482, -132.464, -136.5788, -140.1951, + -143.3961, -146.2497, -148.8112, -151.1259, -153.2309, -155.1568, + -156.9291, -158.5689, -160.0941, -161.5196, -162.8584, -164.1212, + -165.3176, -166.4557, -167.5426, -168.5846, -169.5872, -170.5555, + -171.4937, -172.406, -173.296, -174.1671, -175.0224, -175.865, + -176.6976, -177.523, -178.3439, -179.1628, -179.9825, 179.1944, + 178.3651, 177.5267, 176.6761, 175.8098, 174.9242, 174.0149, + 173.0773, 172.1057, 171.0935, 170.0326, 168.9128, 167.7211, + 166.4397, 165.0436, 163.4946], + [-115.9639, -122.2967, -127.7441, -132.4639, -136.5824, -140.2018, + -143.4055, -146.2614, -148.8249, -151.1413, -153.2478, -155.175, + -156.9484, -158.5892, -160.1152, -161.5415, -162.8809, -164.1443, + -165.3412, -166.4797, -167.567, -168.6094, -169.6123, -170.5808, + -171.5192, -172.4317, -173.3219, -174.1931, -175.0486, -175.8913, + -176.724, -177.5494, -178.3703, -179.1893, 179.991, 179.168, + 178.3388, 177.5005, 176.6499, 175.7838, 174.8983, 173.9892, + 173.0518, 172.0805, 171.0685, 170.0079, 168.8885, 167.6972, + 166.4164, 165.0209, 163.4726], + [-115.9504, -122.288, -127.7399, -132.4639, -136.5859, -140.2084, + -143.4148, -146.2731, -148.8386, -151.1567, -153.2647, -155.1932, + -156.9677, -158.6095, -160.1363, -161.5634, -162.9034, -164.1674, + -165.3648, -166.5038, -167.5915, -168.6341, -169.6374, -170.6061, + -171.5448, -172.4575, -173.3478, -174.2192, -175.0748, -175.9176, + -176.7503, -177.5758, -178.3968, -179.2157, 179.9646, 179.1416, + 178.3124, 177.4742, 176.6238, 175.7577, 174.8724, 173.9635, + 173.0263, 172.0552, 171.0436, 169.9833, 168.8643, 167.6734, + 166.3931, 164.9982, 163.4507]]) + lats40km = np.array([ + [78.6613, 78.9471, 79.0802, 79.1163, 79.0889, 79.019, 78.9202, + 78.8016, 78.6695, 78.528, 78.38, 78.2276, 78.0721, 77.9145, + 77.7553, 77.5949, 77.4335, 77.2712, 77.1079, 76.9435, 76.7779, + 76.6108, 76.4419, 76.2708, 76.0973, 75.921, 75.7412, 75.5576, + 75.3696, 75.1764, 74.9776, 74.7721, 74.5592, 74.3379, 74.1069, + 73.865, 73.6106, 73.342, 73.057, 72.7531, 72.4273, 72.076, + 71.6945, 71.2773, 70.8171, 70.3046, 69.7272, 69.0676, 68.3014, + 67.3914, 66.2778], + [78.6703, 78.9565, 79.0897, 79.1259, 79.0985, 79.0286, 78.9297, + 78.8111, 78.6789, 78.5373, 78.3892, 78.2367, 78.0811, 77.9233, + 77.764, 77.6035, 77.442, 77.2796, 77.1162, 76.9518, 76.7861, + 76.6188, 76.4498, 76.2787, 76.1051, 75.9287, 75.7488, 75.5651, + 75.377, 75.1838, 74.9848, 74.7793, 74.5663, 74.3448, 74.1138, + 73.8718, 73.6173, 73.3486, 73.0635, 72.7595, 72.4336, 72.0821, + 71.7005, 71.2832, 70.8229, 70.3102, 69.7326, 69.0729, 68.3065, + 67.3963, 66.2825], + [78.6794, 78.9658, 79.0993, 79.1355, 79.1082, 79.0381, 78.9392, + 78.8205, 78.6882, 78.5465, 78.3984, 78.2458, 78.0901, 77.9322, + 77.7728, 77.6122, 77.4506, 77.2881, 77.1246, 76.96, 76.7942, + 76.6269, 76.4578, 76.2866, 76.1129, 75.9364, 75.7564, 75.5727, + 75.3844, 75.1911, 74.9921, 74.7864, 74.5734, 74.3518, 74.1207, + 73.8786, 73.624, 73.3552, 73.0699, 72.7658, 72.4398, 72.0882, + 71.7065, 71.2891, 70.8286, 70.3158, 69.7381, 69.0782, 68.3116, + 67.4012, 66.2872]]) + fh._get_coordinates_in_degrees = mock.MagicMock() + fh._get_coordinates_in_degrees.return_value = (lons40km, lats40km) + (lons, lats) = fh._get_all_interpolated_coordinates() + lon_data = lons.compute() + self.assertTrue(np.max(lon_data) <= 180) + # Not longitdes between -110, 110 in indata + self.assertTrue(np.all(np.abs(lon_data) > 110)) + + def test_interpolation_angles(self): + """Test reading the lon and lats.""" + with tempfile.TemporaryFile() as tmpfile: + self._header.tofile(tmpfile) + tmpfile.seek(22016, 0) + self._data.tofile(tmpfile) + fh = AVHRRAAPPL1BFile(tmpfile, self.filename_info, self.filetype_info) + + sunz40km = np.array( + [[122.42, 121.72, 121.14, 120.63, 120.19, 119.79, 119.43, 119.1, 118.79, 118.51, + 118.24, 117.99, 117.76, 117.53, 117.31, 117.1, 116.9, 116.71, 116.52, 116.33, + 116.15, 115.97, 115.79, 115.61, 115.44, 115.26, 115.08, 114.91, 114.73, 114.55, + 114.36, 114.18, 113.98, 113.79, 113.58, 113.37, 113.15, 112.92, 112.68, 112.43, + 112.15, 111.87, 111.55, 111.22, 110.85, 110.44, 109.99, 109.47, 108.88, 108.18, + 107.33], + [122.41, 121.71, 121.13, 120.62, 120.18, 119.78, 119.42, 119.09, 118.78, 118.5, + 118.24, 117.99, 117.75, 117.52, 117.31, 117.1, 116.9, 116.7, 116.51, 116.32, + 116.14, 115.96, 115.78, 115.6, 115.43, 115.25, 115.08, 114.9, 114.72, 114.54, + 114.36, 114.17, 113.98, 113.78, 113.57, 113.36, 113.14, 112.91, 112.67, 112.42, + 112.15, 111.86, 111.55, 111.21, 110.84, 110.43, 109.98, 109.46, 108.87, 108.17, + 107.32]]) + satz40km = np.array( + [[6.623e+01, 6.281e+01, 5.960e+01, 5.655e+01, 5.360e+01, 5.075e+01, 4.797e+01, + 4.524e+01, 4.256e+01, 3.992e+01, 3.731e+01, 3.472e+01, 3.216e+01, 2.962e+01, + 2.710e+01, 2.460e+01, 2.210e+01, 1.962e+01, 1.714e+01, 1.467e+01, 1.221e+01, + 9.760e+00, 7.310e+00, 4.860e+00, 2.410e+00, 3.000e-02, 2.470e+00, 4.920e+00, + 7.370e+00, 9.820e+00, 1.227e+01, 1.474e+01, 1.720e+01, 1.968e+01, 2.216e+01, + 2.466e+01, 2.717e+01, 2.969e+01, 3.223e+01, 3.479e+01, 3.737e+01, 3.998e+01, + 4.263e+01, 4.531e+01, 4.804e+01, 5.082e+01, 5.368e+01, 5.662e+01, 5.969e+01, + 6.290e+01, 6.633e+01], + [6.623e+01, 6.281e+01, 5.960e+01, 5.655e+01, 5.360e+01, 5.075e+01, 4.797e+01, + 4.524e+01, 4.256e+01, 3.992e+01, 3.731e+01, 3.472e+01, 3.216e+01, 2.962e+01, + 2.710e+01, 2.460e+01, 2.210e+01, 1.962e+01, 1.714e+01, 1.467e+01, 1.221e+01, + 9.760e+00, 7.310e+00, 4.860e+00, 2.410e+00, 3.000e-02, 2.470e+00, 4.920e+00, + 7.370e+00, 9.820e+00, 1.227e+01, 1.474e+01, 1.720e+01, 1.968e+01, 2.216e+01, + 2.466e+01, 2.717e+01, 2.969e+01, 3.223e+01, 3.479e+01, 3.737e+01, 3.998e+01, + 4.263e+01, 4.531e+01, 4.804e+01, 5.082e+01, 5.368e+01, 5.662e+01, 5.969e+01, + 6.290e+01, 6.633e+01]]) + azidiff40km = np.array([ + [56.9, 56.24, 55.71, 55.27, 54.9, 54.57, 54.29, 54.03, 53.8, 53.59, + 53.4, 53.22, 53.05, 52.89, 52.74, 52.6, 52.47, 52.34, 52.22, 52.1, + 51.98, 51.87, 51.76, 51.65, 51.55, 128.55, 128.65, 128.76, 128.86, 128.96, + 129.07, 129.17, 129.27, 129.38, 129.49, 129.6, 129.72, 129.83, 129.95, 130.08, + 130.21, 130.35, 130.5, 130.65, 130.81, 130.99, 131.18, 131.39, 131.63, 131.89, + 132.19], + [56.9, 56.24, 55.72, 55.28, 54.9, 54.58, 54.29, 54.03, 53.8, 53.59, + 53.4, 53.22, 53.05, 52.89, 52.75, 52.6, 52.47, 52.34, 52.22, 52.1, + 51.98, 51.87, 51.76, 51.65, 51.55, 128.55, 128.65, 128.75, 128.86, 128.96, + 129.06, 129.17, 129.27, 129.38, 129.49, 129.6, 129.71, 129.83, 129.95, 130.08, + 130.21, 130.35, 130.49, 130.65, 130.81, 130.99, 131.18, 131.39, 131.62, 131.89, + 132.19]]) + fh._get_tiepoint_angles_in_degrees = mock.MagicMock() + fh._get_tiepoint_angles_in_degrees.return_value = (sunz40km, satz40km, azidiff40km) + (sunz, satz, azidiff) = fh._get_all_interpolated_angles() + self.assertTrue(np.max(sunz) <= 123) + self.assertTrue(np.max(satz) <= 70) + class TestAAPPL1BChannel3AMissing(unittest.TestCase): """Test the filehandler when channel 3a is missing.""" diff --git a/satpy/tests/reader_tests/test_aapp_mhs_amsub_l1c.py b/satpy/tests/reader_tests/test_aapp_mhs_amsub_l1c.py new file mode 100644 index 0000000000..c6ffe26b1b --- /dev/null +++ b/satpy/tests/reader_tests/test_aapp_mhs_amsub_l1c.py @@ -0,0 +1,500 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright (c) 2021 Pytroll + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. + +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +"""Test module for the MHS AAPP level-1c reader.""" + + +import datetime +import tempfile +import unittest + +import numpy as np + +from satpy.readers.aapp_mhs_amsub_l1c import _HEADERTYPE, _SCANTYPE, HEADER_LENGTH, MHS_AMSUB_AAPPL1CFile +from satpy.tests.utils import make_dataid + +SCANLINE1 = [[26798, 27487, 23584, 24816, 26196], + [26188, 27392, 23453, 24832, 26223], + [23777, 26804, 23529, 24876, 26193], + [23311, 26781, 23583, 24898, 26157], + [23194, 26737, 23743, 24917, 26199], + [23811, 26871, 23836, 25017, 26239], + [25000, 27034, 23952, 25118, 26247], + [25937, 26988, 24019, 25058, 26143], + [25986, 26689, 24048, 25081, 25967], + [24689, 26083, 24062, 24975, 25744], + [23719, 25519, 24016, 24938, 25617], + [23327, 25565, 23882, 24960, 25571], + [23214, 25646, 23862, 24847, 25561], + [23473, 25886, 23859, 24832, 25640], + [23263, 25449, 23759, 24730, 25525], + [23335, 25672, 23716, 24727, 25578], + [23477, 25983, 23771, 24847, 25882], + [23141, 25863, 23758, 24971, 26066], + [23037, 25813, 23855, 25113, 26231], + [22908, 25701, 23958, 25130, 26226], + [22608, 25493, 23980, 25223, 26277], + [22262, 25275, 24019, 25356, 26247], + [21920, 25116, 24161, 25375, 26268], + [21559, 24795, 24169, 25351, 26284], + [21269, 24591, 24333, 25503, 26300], + [21028, 24395, 24413, 25498, 26300], + [20887, 24254, 24425, 25479, 26228], + [20882, 24288, 24440, 25463, 26284], + [20854, 24261, 24569, 25438, 26266], + [20911, 24277, 24564, 25464, 26213], + [21069, 24369, 24567, 25473, 26211], + [20994, 24201, 24747, 25552, 26130], + [21909, 24648, 24856, 25546, 26249], + [21936, 24662, 24843, 25612, 26207], + [21142, 24248, 24885, 25616, 26159], + [21180, 24251, 24817, 25553, 26114], + [21236, 24219, 24840, 25569, 26100], + [21057, 24152, 24735, 25535, 26093], + [20825, 24018, 24830, 25528, 26103], + [20731, 23866, 24789, 25579, 26117], + [20924, 23972, 24808, 25512, 26082], + [21228, 24259, 24723, 25501, 26071], + [21307, 24285, 24733, 25491, 26058], + [21558, 24521, 24739, 25511, 26009], + [21562, 24500, 24706, 25538, 26091], + [21568, 24448, 24639, 25504, 26011], + [21636, 24520, 24673, 25462, 26028], + [21895, 24667, 24662, 25494, 26048], + [22251, 24892, 24570, 25435, 25977], + [22459, 25109, 24557, 25340, 26010], + [22426, 25030, 24533, 25310, 25964], + [22419, 24966, 24528, 25316, 25953], + [22272, 24851, 24503, 25318, 25891], + [22261, 24799, 24548, 25326, 25912], + [22445, 25023, 24410, 25333, 25930], + [22371, 24902, 24381, 25323, 25892], + [21791, 24521, 24407, 25362, 25880], + [20930, 23820, 24440, 25287, 25849], + [21091, 24008, 24412, 25251, 25854], + [21575, 24331, 24405, 25272, 25774], + [21762, 24545, 24395, 25216, 25763], + [21891, 24550, 24317, 25256, 25790], + [21865, 24584, 24250, 25205, 25797], + [21431, 24178, 24302, 25228, 25738], + [21285, 23978, 24240, 25205, 25735], + [21935, 24515, 24232, 25240, 25834], + [22372, 24790, 24325, 25311, 25878], + [22621, 24953, 24410, 25395, 25897], + [23642, 25290, 24456, 25428, 25959], + [23871, 25209, 24376, 25369, 25976], + [22846, 24495, 24378, 25347, 25868], + [22490, 24320, 24327, 25374, 25849], + [23237, 24599, 24182, 25298, 25839], + [23134, 24601, 24121, 25306, 25864], + [22647, 24314, 24108, 25248, 25787], + [22499, 24293, 24049, 25165, 25823], + [22247, 23987, 23936, 25131, 25742], + [22291, 23942, 23908, 25028, 25715], + [22445, 24205, 23784, 24997, 25615], + [22487, 24417, 23764, 24921, 25643], + [22386, 24420, 23765, 24865, 25715], + [22217, 24326, 23748, 24823, 25617], + [21443, 23814, 23722, 24750, 25552], + [20354, 22599, 23580, 24722, 25439], + [20331, 22421, 23431, 24655, 25389], + [19925, 21855, 23412, 24623, 25284], + [20240, 22224, 23339, 24545, 25329], + [20368, 22596, 23419, 24474, 25362], + [20954, 23192, 23345, 24416, 25403], + [22292, 24303, 23306, 24330, 25353]] + +ANGLES_SCLINE1 = [[5926, 35786, 7682, 23367], + [5769, 35780, 7709, 23352], + [5614, 35774, 7733, 23339], + [5463, 35769, 7756, 23326], + [5314, 35763, 7777, 23313], + [5167, 35758, 7797, 23302], + [5022, 35753, 7816, 23290], + [4879, 35747, 7834, 23280], + [4738, 35742, 7851, 23269], + [4598, 35737, 7868, 23259], + [4459, 35732, 7883, 23249], + [4321, 35727, 7899, 23240], + [4185, 35721, 7913, 23231], + [4049, 35716, 7927, 23222], + [3914, 35711, 7940, 23213], + [3780, 35706, 7953, 23204], + [3647, 35701, 7966, 23195], + [3515, 35695, 7978, 23187], + [3383, 35690, 7990, 23179], + [3252, 35685, 8001, 23170], + [3121, 35680, 8013, 23162], + [2991, 35674, 8023, 23154], + [2861, 35669, 8034, 23146], + [2732, 35663, 8045, 23138], + [2603, 35658, 8055, 23130], + [2474, 35652, 8065, 23122], + [2346, 35647, 8075, 23114], + [2218, 35641, 8084, 23106], + [2090, 35635, 8094, 23098], + [1963, 35630, 8103, 23090], + [1836, 35624, 8112, 23082], + [1709, 35618, 8121, 23074], + [1582, 35612, 8130, 23066], + [1455, 35605, 8139, 23057], + [1329, 35599, 8148, 23049], + [1203, 35593, 8157, 23041], + [1077, 35586, 8165, 23032], + [951, 35580, 8174, 23023], + [825, 35573, 8182, 23014], + [699, 35566, 8191, 23005], + [573, 35560, 8199, 22996], + [448, 35553, 8208, 22987], + [322, 35548, 8216, 22977], + [196, 35545, 8224, 22968], + [71, 35561, 8233, 22958], + [54, 17463, 8241, 22947], + [179, 17489, 8249, 22937], + [305, 17486, 8258, 22926], + [431, 17479, 8266, 22915], + [556, 17471, 8275, 22903], + [682, 17461, 8283, 22891], + [808, 17451, 8291, 22879], + [934, 17440, 8300, 22866], + [1060, 17428, 8309, 22853], + [1186, 17416, 8317, 22839], + [1312, 17403, 8326, 22824], + [1438, 17390, 8335, 22809], + [1565, 17375, 8344, 22793], + [1692, 17360, 8353, 22776], + [1818, 17344, 8362, 22759], + [1946, 17327, 8371, 22740], + [2073, 17309, 8381, 22720], + [2201, 17289, 8390, 22699], + [2329, 17268, 8400, 22676], + [2457, 17245, 8410, 22652], + [2585, 17220, 8420, 22625], + [2714, 17194, 8431, 22597], + [2843, 17164, 8441, 22566], + [2973, 17132, 8452, 22533], + [3103, 17097, 8463, 22496], + [3234, 17058, 8475, 22455], + [3365, 17014, 8486, 22410], + [3497, 16965, 8498, 22359], + [3629, 16909, 8511, 22301], + [3762, 16844, 8524, 22236], + [3896, 16770, 8537, 22160], + [4031, 16683, 8551, 22071], + [4166, 16578, 8565, 21965], + [4303, 16452, 8580, 21837], + [4440, 16295, 8595, 21679], + [4579, 16096, 8611, 21478], + [4718, 15835, 8628, 21215], + [4860, 15477, 8646, 20856], + [5003, 14963, 8665, 20341], + [5147, 14178, 8684, 19553], + [5294, 12897, 8705, 18270], + [5442, 10778, 8727, 16150], + [5593, 7879, 8751, 13250], + [5747, 5305, 8776, 10674], + [5904, 3659, 8803, 9027]] + +LATLON_SCLINE1 = [[715994, 787602], + [720651, 786999], + [724976, 786407], + [729013, 785827], + [732799, 785255], + [736362, 784692], + [739728, 784134], + [742919, 783583], + [745953, 783035], + [748844, 782492], + [751607, 781951], + [754254, 781412], + [756796, 780875], + [759240, 780338], + [761597, 779801], + [763872, 779264], + [766073, 778726], + [768206, 778186], + [770275, 777644], + [772287, 777100], + [774245, 776552], + [776153, 776000], + [778015, 775444], + [779836, 774882], + [781617, 774316], + [783361, 773743], + [785073, 773163], + [786753, 772576], + [788405, 771981], + [790031, 771377], + [791633, 770764], + [793212, 770140], + [794771, 769506], + [796312, 768860], + [797837, 768201], + [799346, 767528], + [800842, 766841], + [802326, 766138], + [803799, 765419], + [805264, 764681], + [806721, 763924], + [808171, 763147], + [809617, 762347], + [811060, 761523], + [812500, 760673], + [813939, 759796], + [815378, 758888], + [816819, 757949], + [818263, 756974], + [819712, 755962], + [821166, 754909], + [822627, 753812], + [824096, 752666], + [825575, 751468], + [827065, 750213], + [828567, 748894], + [830084, 747507], + [831617, 746043], + [833167, 744496], + [834736, 742855], + [836327, 741112], + [837940, 739253], + [839578, 737265], + [841243, 735132], + [842938, 732835], + [844665, 730352], + [846425, 727656], + [848223, 724716], + [850060, 721492], + [851941, 717939], + [853868, 713998], + [855845, 709597], + [857875, 704644], + [859963, 699024], + [862113, 692583], + [864329, 685119], + [866616, 676358], + [868979, 665918], + [871421, 653256], + [873947, 637570], + [876557, 617626], + [879250, 591448], + [882013, 555681], + [884815, 504285], + [887577, 425703], + [890102, 297538], + [891907, 85636], + [892134, -204309], + [890331, -461741], + [887022, -626300]] + + +class TestMHS_AMSUB_AAPPL1CReadData(unittest.TestCase): + """Test the filehandler.""" + + def setUp(self): + """Set up the test case.""" + self._header = np.zeros(1, dtype=_HEADERTYPE) + self._header['satid'][0] = 3 + self._header['instrument'][0] = 12 + self._header['tempradcnv'][0] = [[2968720, 0, 1000000, 5236956, 0], + [1000000, 6114597, 0, 1000000, 6114597], + [-3100, 1000270, 6348092, 0, 1000000]] + self._data = np.zeros(3, dtype=_SCANTYPE) + self._data['scnlinyr'][:] = 2020 + self._data['scnlindy'][:] = 261 + self._data['scnlintime'][0] = 36368496 + self._data['scnlintime'][1] = 36371163 + self._data['scnlintime'][2] = 36373830 + self._data['qualind'][0] = 0 + self._data['qualind'][1] = 0 + self._data['qualind'][2] = 0 + self._data['scnlinqual'][0] = 16384 + self._data['scnlinqual'][1] = 16384 + self._data['scnlinqual'][2] = 16384 + self._data['chanqual'][0] = [6, 6, 6, 6, 6] + self._data['chanqual'][1] = [6, 6, 6, 6, 6] + self._data['chanqual'][2] = [6, 6, 6, 6, 6] + self._data['instrtemp'][:] = [29520, 29520, 29520] + self._data['dataqual'][:] = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0] + self._data['scalti'][0:3] = [8321, 8321, 8321] + self._data['latlon'][0] = LATLON_SCLINE1 + self._data['angles'][0] = ANGLES_SCLINE1 + self._data['btemps'][0] = SCANLINE1 + self.filename_info = {'platform_shortname': 'metop01', + 'start_time': datetime.datetime(2020, 9, 17, 10, 6), + 'orbit_number': 41509} + + self.filetype_info = {'file_reader': MHS_AMSUB_AAPPL1CFile, + 'file_patterns': + ['mhsl1c_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit_number:05d}.l1c'], + 'file_type': 'mhs_aapp_l1c'} + + def test_platform_name(self): + """Test getting the platform name.""" + with tempfile.TemporaryFile() as tmpfile: + self._header.tofile(tmpfile) + tmpfile.seek(HEADER_LENGTH, 0) + self._data.tofile(tmpfile) + + fh_ = MHS_AMSUB_AAPPL1CFile(tmpfile, self.filename_info, self.filetype_info) + + assert fh_.platform_name == 'Metop-C' + + self._header['satid'][0] = 1 + with tempfile.TemporaryFile() as tmpfile: + self._header.tofile(tmpfile) + tmpfile.seek(HEADER_LENGTH, 0) + self._data.tofile(tmpfile) + + fh_ = MHS_AMSUB_AAPPL1CFile(tmpfile, self.filename_info, self.filetype_info) + + assert fh_.platform_name == 'Metop-B' + + def test_sensor_name(self): + """Test getting the sensor name.""" + with tempfile.TemporaryFile() as tmpfile: + self._header.tofile(tmpfile) + tmpfile.seek(HEADER_LENGTH, 0) + self._data.tofile(tmpfile) + + fh_ = MHS_AMSUB_AAPPL1CFile(tmpfile, self.filename_info, self.filetype_info) + + assert fh_.sensor == 'mhs' + + self._header['instrument'][0] = 11 + with tempfile.TemporaryFile() as tmpfile: + self._header.tofile(tmpfile) + tmpfile.seek(HEADER_LENGTH, 0) + self._data.tofile(tmpfile) + + fh_ = MHS_AMSUB_AAPPL1CFile(tmpfile, self.filename_info, self.filetype_info) + + assert fh_.sensor == 'amsub' + + self._header['instrument'][0] = 10 + + with tempfile.TemporaryFile() as tmpfile: + self._header.tofile(tmpfile) + tmpfile.seek(HEADER_LENGTH, 0) + self._data.tofile(tmpfile) + + with self.assertRaises(IOError): + fh_ = MHS_AMSUB_AAPPL1CFile(tmpfile, self.filename_info, self.filetype_info) + + def test_read(self): + """Test getting the platform name.""" + with tempfile.TemporaryFile() as tmpfile: + self._header.tofile(tmpfile) + tmpfile.seek(HEADER_LENGTH, 0) + self._data.tofile(tmpfile) + + fh_ = MHS_AMSUB_AAPPL1CFile(tmpfile, self.filename_info, self.filetype_info) + + info = {} + + chmin = [199.25, 218.55, 233.06, 243.3, 252.84] + chmax = [267.98, 274.87, 248.85, 256.16, 263.] + for chn, name in enumerate(['1', '2', '3', '4', '5']): + key = make_dataid(name=name, calibration='brightness_temperature') + res = fh_.get_dataset(key, info) + + assert(res.min() == chmin[chn]) + assert(res.max() == chmax[chn]) + + def test_angles(self): + """Test reading the angles.""" + with tempfile.TemporaryFile() as tmpfile: + self._header.tofile(tmpfile) + tmpfile.seek(HEADER_LENGTH, 0) + self._data.tofile(tmpfile) + + fh_ = MHS_AMSUB_AAPPL1CFile(tmpfile, self.filename_info, self.filetype_info) + info = {} + key = make_dataid(name='solar_zenith_angle') + res = fh_.get_dataset(key, info) + + assert(np.all(res[2] == 0)) + assert(np.all(res[1] == 0)) + expected = np.array([76.82, 77.09, 77.33, 77.56, 77.77, 77.97, 78.16, 78.34, 78.51, + 78.68, 78.83, 78.99, 79.13, 79.27, 79.4, 79.53, 79.66, 79.78, + 79.9, 80.01, 80.13, 80.23, 80.34, 80.45, 80.55, 80.65, 80.75, + 80.84, 80.94, 81.03, 81.12, 81.21, 81.3, 81.39, 81.48, 81.57, + 81.65, 81.74, 81.82, 81.91, 81.99, 82.08, 82.16, 82.24, 82.33, + 82.41, 82.49, 82.58, 82.66, 82.75, 82.83, 82.91, 83., 83.09, + 83.17, 83.26, 83.35, 83.44, 83.53, 83.62, 83.71, 83.81, 83.9, + 84., 84.1, 84.2, 84.31, 84.41, 84.52, 84.63, 84.75, 84.86, + 84.98, 85.11, 85.24, 85.37, 85.51, 85.65, 85.8, 85.95, 86.11, + 86.28, 86.46, 86.65, 86.84, 87.05, 87.27, 87.51, 87.76, 88.03]) + + np.testing.assert_allclose(res[0], expected) + + def test_navigation(self): + """Test reading the longitudes and latitudes.""" + with tempfile.TemporaryFile() as tmpfile: + self._header.tofile(tmpfile) + tmpfile.seek(HEADER_LENGTH, 0) + self._data.tofile(tmpfile) + + fh_ = MHS_AMSUB_AAPPL1CFile(tmpfile, self.filename_info, self.filetype_info) + info = {} + key = make_dataid(name='longitude') + res = fh_.get_dataset(key, info) + + assert(np.all(res[2] == 0)) + assert(np.all(res[1] == 0)) + expected = np.array([78.7602, 78.6999, 78.6407, 78.5827, 78.5255, 78.4692, + 78.4134, 78.3583, 78.3035, 78.2492, 78.1951, 78.1412, + 78.0875, 78.0338, 77.9801, 77.9264, 77.8726, 77.8186, + 77.7644, 77.71, 77.6552, 77.6, 77.5444, 77.4882, + 77.4316, 77.3743, 77.3163, 77.2576, 77.1981, 77.1377, + 77.0764, 77.014, 76.9506, 76.886, 76.8201, 76.7528, + 76.6841, 76.6138, 76.5419, 76.4681, 76.3924, 76.3147, + 76.2347, 76.1523, 76.0673, 75.9796, 75.8888, 75.7949, + 75.6974, 75.5962, 75.4909, 75.3812, 75.2666, 75.1468, + 75.0213, 74.8894, 74.7507, 74.6043, 74.4496, 74.2855, + 74.1112, 73.9253, 73.7265, 73.5132, 73.2835, 73.0352, + 72.7656, 72.4716, 72.1492, 71.7939, 71.3998, 70.9597, + 70.4644, 69.9024, 69.2583, 68.5119, 67.6358, 66.5918, + 65.3256, 63.757, 61.7626, 59.1448, 55.5681, 50.4285, + 42.5703, 29.7538, 8.5636, -20.4309, -46.1741, -62.63]) + + np.testing.assert_allclose(res[0], expected) + + key = make_dataid(name='latitude') + res = fh_.get_dataset(key, info) + + assert(np.all(res[2] == 0)) + assert(np.all(res[1] == 0)) + expected = np.array([71.5994, 72.0651, 72.4976, 72.9013, 73.2799, 73.6362, 73.9728, + 74.2919, 74.5953, 74.8844, 75.1607, 75.4254, 75.6796, 75.924, + 76.1597, 76.3872, 76.6073, 76.8206, 77.0275, 77.2287, 77.4245, + 77.6153, 77.8015, 77.9836, 78.1617, 78.3361, 78.5073, 78.6753, + 78.8405, 79.0031, 79.1633, 79.3212, 79.4771, 79.6312, 79.7837, + 79.9346, 80.0842, 80.2326, 80.3799, 80.5264, 80.6721, 80.8171, + 80.9617, 81.106, 81.25, 81.3939, 81.5378, 81.6819, 81.8263, + 81.9712, 82.1166, 82.2627, 82.4096, 82.5575, 82.7065, 82.8567, + 83.0084, 83.1617, 83.3167, 83.4736, 83.6327, 83.794, 83.9578, + 84.1243, 84.2938, 84.4665, 84.6425, 84.8223, 85.006, 85.1941, + 85.3868, 85.5845, 85.7875, 85.9963, 86.2113, 86.4329, 86.6616, + 86.8979, 87.1421, 87.3947, 87.6557, 87.925, 88.2013, 88.4815, + 88.7577, 89.0102, 89.1907, 89.2134, 89.0331, 88.7022]) + + np.testing.assert_allclose(res[0], expected) diff --git a/satpy/tests/reader_tests/test_abi_l2_nc.py b/satpy/tests/reader_tests/test_abi_l2_nc.py index 19c1ac1dad..02aaad977b 100644 --- a/satpy/tests/reader_tests/test_abi_l2_nc.py +++ b/satpy/tests/reader_tests/test_abi_l2_nc.py @@ -74,6 +74,18 @@ def _create_cmip_dataset(): return fake_dataset +def _compare_subdict(actual_dict, exp_sub_dict): + for key, value in exp_sub_dict.items(): + assert key in actual_dict + assert actual_dict[key] == value + + +def _assert_orbital_parameters(orb_params): + assert orb_params['satellite_nominal_longitude'] == -89.5 + assert orb_params['satellite_nominal_latitude'] == 0.0 + assert orb_params['satellite_nominal_altitude'] == 35786020.0 + + def _create_mcmip_dataset(): fake_dataset = _create_cmip_dataset() fake_dataset = fake_dataset.copy(deep=True) @@ -91,10 +103,18 @@ def setUp(self): fake_cmip_dataset = _create_cmip_dataset() with mock.patch('satpy.readers.abi_base.xr') as xr_: xr_.open_dataset.return_value = fake_cmip_dataset - self.reader = NC_ABI_L2('filename', - {'platform_shortname': 'G16', - 'scan_mode': 'M3'}, - {'file_type': 'info'}) + self.reader = NC_ABI_L2( + 'filename', + { + 'platform_shortname': 'G16', + 'scan_mode': 'M3', + 'scene_abbr': 'M1', + }, + { + 'file_type': 'info', + 'observation_type': 'ACHA', + }, + ) class Test_NC_ABI_L2_get_dataset(Test_NC_ABI_L2_base): @@ -112,21 +132,21 @@ def test_get_dataset(self): exp_attrs = {'instrument_ID': None, 'modifiers': (), 'name': 'HT', + 'observation_type': 'ACHA', 'orbital_slot': None, 'platform_name': 'GOES-16', 'platform_shortname': 'G16', 'production_site': None, - 'satellite_altitude': 35786020., - 'satellite_latitude': 0.0, - 'satellite_longitude': -89.5, 'scan_mode': 'M3', + 'scene_abbr': 'M1', 'scene_id': None, 'sensor': 'abi', 'timeline_ID': None, 'units': 'm'} self.assertTrue(np.allclose(res.data, exp_data, equal_nan=True)) - self.assertDictEqual(dict(res.attrs), exp_attrs) + _compare_subdict(res.attrs, exp_attrs) + _assert_orbital_parameters(res.attrs['orbital_parameters']) class TestMCMIPReading: @@ -153,15 +173,14 @@ def test_mcmip_get_dataset(self, xr_): exp_attrs = {'instrument_ID': None, 'modifiers': (), 'name': 'C14', + 'observation_type': 'MCMIP', 'orbital_slot': None, 'reader': 'abi_l2_nc', 'platform_name': 'GOES-16', 'platform_shortname': 'G16', 'production_site': None, - 'satellite_altitude': 35786020., - 'satellite_latitude': 0.0, - 'satellite_longitude': -89.5, 'scan_mode': 'M6', + 'scene_abbr': 'F', 'scene_id': None, 'sensor': 'abi', 'timeline_ID': None, @@ -175,10 +194,8 @@ def test_mcmip_get_dataset(self, xr_): res = scn['C14'] np.testing.assert_allclose(res.data, exp_data, equal_nan=True) assert isinstance(res.attrs['area'], AreaDefinition) - # don't complicate the comparison below - for key in ('area', '_satpy_id'): - del res.attrs[key] - assert dict(res.attrs) == exp_attrs + _compare_subdict(res.attrs, exp_attrs) + _assert_orbital_parameters(res.attrs['orbital_parameters']) class Test_NC_ABI_L2_area_fixedgrid(Test_NC_ABI_L2_base): diff --git a/satpy/tests/reader_tests/test_ahi_hrit.py b/satpy/tests/reader_tests/test_ahi_hrit.py index 7aeb71ec96..1681906785 100644 --- a/satpy/tests/reader_tests/test_ahi_hrit.py +++ b/satpy/tests/reader_tests/test_ahi_hrit.py @@ -29,13 +29,18 @@ class TestHRITJMAFileHandler(unittest.TestCase): """Test the HRITJMAFileHandler.""" @mock.patch('satpy.readers.hrit_jma.HRITFileHandler.__init__') - def _get_reader(self, mocked_init, mda, filename_info=None): + def _get_reader(self, mocked_init, mda, filename_info=None, filetype_info=None, reader_kwargs=None): from satpy.readers.hrit_jma import HRITJMAFileHandler if not filename_info: filename_info = {} + if not filetype_info: + filetype_info = {} + if not reader_kwargs: + reader_kwargs = {} HRITJMAFileHandler.filename = 'filename' HRITJMAFileHandler.mda = mda - return HRITJMAFileHandler('filename', filename_info, {}) + HRITJMAFileHandler._start_time = filename_info.get('start_time') + return HRITJMAFileHandler('filename', filename_info, filetype_info, **reader_kwargs) def _get_acq_time(self, nlines): """Get sample header entry for scanline acquisition times. @@ -279,9 +284,6 @@ def test_get_dataset(self, base_get_dataset): self.assertEqual(res.attrs['units'], '%') self.assertEqual(res.attrs['sensor'], 'ahi') self.assertEqual(res.attrs['platform_name'], HIMAWARI8) - self.assertEqual(res.attrs['satellite_longitude'], 140.7) - self.assertEqual(res.attrs['satellite_latitude'], 0.) - self.assertEqual(res.attrs['satellite_altitude'], 35785831.0) self.assertDictEqual(res.attrs['orbital_parameters'], {'projection_longitude': 140.7, 'projection_latitude': 0., 'projection_altitude': 35785831.0}) @@ -321,3 +323,26 @@ def test_get_acq_time(self): np.testing.assert_allclose(reader.acq_time.astype(np.int64), acq_time_exp.astype(np.int64), atol=45000) + + def test_start_time_from_filename(self): + """Test that by default the datetime in the filename is returned.""" + import datetime as dt + start_time = dt.datetime(2022, 1, 20, 12, 10) + for platform in ['Himawari-8', 'MTSAT-2']: + mda = self._get_mda(platform=platform) + reader = self._get_reader( + mda=mda, + filename_info={'start_time': start_time}) + assert reader._start_time == start_time + + def test_start_time_from_aqc_time(self): + """Test that by the datetime from the metadata returned when `use_acquisition_time_as_start_time=True`.""" + import datetime as dt + start_time = dt.datetime(2022, 1, 20, 12, 10) + for platform in ['Himawari-8', 'MTSAT-2']: + mda = self._get_mda(platform=platform) + reader = self._get_reader( + mda=mda, + filename_info={'start_time': start_time}, + reader_kwargs={'use_acquisition_time_as_start_time': True}) + assert reader.start_time == reader.acq_time[0].astype(dt.datetime) diff --git a/satpy/tests/reader_tests/test_ahi_hsd.py b/satpy/tests/reader_tests/test_ahi_hsd.py index 15775273d8..1112ebe5a5 100644 --- a/satpy/tests/reader_tests/test_ahi_hsd.py +++ b/satpy/tests/reader_tests/test_ahi_hsd.py @@ -16,17 +16,86 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """The ahi_hsd reader tests package.""" +from __future__ import annotations +import contextlib import unittest import warnings from datetime import datetime +from typing import Any, Dict from unittest import mock import dask.array as da import numpy as np +import pytest from satpy.readers.ahi_hsd import AHIHSDFileHandler from satpy.readers.utils import get_geostationary_mask +from satpy.tests.utils import make_dataid + +InfoDict = Dict[str, Any] + +FAKE_BASIC_INFO: InfoDict = { + 'blocklength': 0, + 'satellite': 'Himawari-8', + 'observation_area': 'FLDK', + 'observation_start_time': 58413.12523839, + 'observation_end_time': 58413.12562439, + 'observation_timeline': '0300', +} +FAKE_DATA_INFO: InfoDict = { + 'blocklength': 50, + 'compression_flag_for_data': 0, + 'hblock_number': 2, + 'number_of_bits_per_pixel': 16, + 'number_of_columns': 11000, + 'number_of_lines': 1100, + 'spare': '', +} +FAKE_PROJ_INFO: InfoDict = { + 'CFAC': 40932549, + 'COFF': 5500.5, + 'LFAC': 40932549, + 'LOFF': 5500.5, + 'blocklength': 127, + 'coeff_for_sd': 1737122264.0, + 'distance_from_earth_center': 42164.0, + 'earth_equatorial_radius': 6378.137, + 'earth_polar_radius': 6356.7523, + 'hblock_number': 3, + 'req2_rpol2': 1.006739501, + 'req2_rpol2_req2': 0.0066943844, + 'resampling_size': 4, + 'resampling_types': 0, + 'rpol2_req2': 0.993305616, + 'spare': '', + 'sub_lon': 140.7, +} +FAKE_NAV_INFO: InfoDict = { + 'SSP_longitude': 140.65699999999998, + 'SSP_latitude': 0.0042985719753897015, + 'distance_earth_center_to_satellite': 42165.04, + 'nadir_longitude': 140.25253875463318, + 'nadir_latitude': 0.01674775121155575, +} +FAKE_CAL_INFO: InfoDict = {'blocklength': 0, 'band_number': [4]} +FAKE_IRVISCAL_INFO: InfoDict = {} +FAKE_INTERCAL_INFO: InfoDict = {'blocklength': 0} +FAKE_SEGMENT_INFO: InfoDict = {'blocklength': 0} +FAKE_NAVCORR_INFO: InfoDict = {'blocklength': 0, 'numof_correction_info_data': [1]} +FAKE_NAVCORR_SUBINFO: InfoDict = {} +FAKE_OBS_TIME_INFO: InfoDict = {'blocklength': 0, 'number_of_observation_times': [1]} +FAKE_OBS_LINETIME_INFO: InfoDict = {} +FAKE_ERROR_INFO: InfoDict = {'blocklength': 0, 'number_of_error_info_data': [1]} +FAKE_ERROR_LINE_INFO: InfoDict = {} +FAKE_SPARE_INFO: InfoDict = {'blocklength': 0} + + +def _new_unzip(fname): + """Fake unzipping.""" + if fname[-3:] == 'bz2': + return fname[:-4] + return fname class TestAHIHSDNavigation(unittest.TestCase): @@ -131,108 +200,43 @@ def test_segment(self, fromfile, np2str): 5500000.035542117, -2200000.0142168473)) -class TestAHIHSDFileHandler(unittest.TestCase): - """Test case for the file reading.""" - - def new_unzip(fname): - """Fake unzipping.""" - if fname[-3:] == 'bz2': - return fname[:-4] - return fname - - @staticmethod - def _create_fake_file_handler(in_fname, filename_info=None, filetype_info=None): - if filename_info is None: - filename_info = {'segment': 8, 'total_segments': 10} - if filetype_info is None: - filetype_info = {'file_type': 'hsd_b01'} - fh = AHIHSDFileHandler(in_fname, filename_info, filetype_info) - - # Check that the filename is altered for bz2 format files - assert in_fname != fh.filename - - fh.proj_info = { - 'CFAC': 40932549, - 'COFF': 5500.5, - 'LFAC': 40932549, - 'LOFF': 5500.5, - 'blocklength': 127, - 'coeff_for_sd': 1737122264.0, - 'distance_from_earth_center': 42164.0, - 'earth_equatorial_radius': 6378.137, - 'earth_polar_radius': 6356.7523, - 'hblock_number': 3, - 'req2_rpol2': 1.006739501, - 'req2_rpol2_req2': 0.0066943844, - 'resampling_size': 4, - 'resampling_types': 0, - 'rpol2_req2': 0.993305616, - 'spare': '', - 'sub_lon': 140.7 - } - fh.nav_info = { - 'SSP_longitude': 140.66, - 'SSP_latitude': 0.03, - 'distance_earth_center_to_satellite': 42165.04, - 'nadir_longitude': 140.67, - 'nadir_latitude': 0.04 - } - fh.data_info = { - 'blocklength': 50, - 'compression_flag_for_data': 0, - 'hblock_number': 2, - 'number_of_bits_per_pixel': 16, - 'number_of_columns': 11000, - 'number_of_lines': 1100, - 'spare': '' - } - fh.basic_info = { - 'observation_area': np.array(['FLDK']), - 'observation_start_time': np.array([58413.12523839]), - 'observation_end_time': np.array([58413.12562439]), - 'observation_timeline': np.array([300]), - } - fh.observation_area = fh.basic_info['observation_area'] - return fh +class TestAHIHSDFileHandler: + """Tests for the AHI HSD file handler.""" + + def test_bad_calibration(self): + """Test that a bad calibration mode causes an exception.""" + with pytest.raises(ValueError): + with _fake_hsd_handler(fh_kwargs={"calib_mode": "BAD_MODE"}): + pass + + @pytest.mark.parametrize( + ("round_actual_position", "expected_result"), + [ + (False, (140.65699999999998, 0.0042985719753897015, 35786903.00011936)), + (True, (140.657, 0.0, 35786850.0)) + ] + ) + def test_actual_satellite_position(self, round_actual_position, expected_result): + """Test that rounding of the actual satellite position can be controlled.""" + with _fake_hsd_handler(fh_kwargs={"round_actual_position": round_actual_position}) as fh: + ds_id = make_dataid(name="B01") + ds_info = { + "units": "%", + "standard_name": "some_name", + "wavelength": (0.1, 0.2, 0.3), + } + metadata = fh._get_metadata(ds_id, ds_info) + orb_params = metadata["orbital_parameters"] + assert orb_params["satellite_actual_longitude"] == expected_result[0] + assert orb_params["satellite_actual_latitude"] == expected_result[1] + assert orb_params["satellite_actual_altitude"] == expected_result[2] - @mock.patch('satpy.readers.ahi_hsd.np2str') - @mock.patch('satpy.readers.ahi_hsd.np.fromfile') - @mock.patch('satpy.readers.ahi_hsd.unzip_file', - mock.MagicMock(side_effect=new_unzip)) - def setUp(self, fromfile, np2str): - """Create a test file handler.""" - np2str.side_effect = lambda x: x - m = mock.mock_open() - with mock.patch('satpy.readers.ahi_hsd.open', m, create=True): - # Check if file handler raises exception for invalid calibration mode - with self.assertRaises(ValueError): - AHIHSDFileHandler('somefile', - {'segment': 8, 'total_segments': 10}, - filetype_info={'file_type': 'hsd_b01'}, - calib_mode='BAD_MODE') - in_fname = 'test_file.bz2' - self.fh = self._create_fake_file_handler(in_fname) - - def test_time_properties(self): - """Test start/end/scheduled time properties.""" - self.assertEqual(self.fh.start_time, datetime(2018, 10, 22, 3, 0, 20, 596896)) - self.assertEqual(self.fh.end_time, datetime(2018, 10, 22, 3, 0, 53, 947296)) - self.assertEqual(self.fh.scheduled_time, datetime(2018, 10, 22, 3, 0, 0, 0)) + @mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._check_fpos') + def test_read_header(self, *mocks): + """Test header reading.""" + with _fake_hsd_handler() as fh: + fh._read_header(mock.MagicMock()) - def test_scanning_frequencies(self): - """Test scanning frequencies.""" - self.fh.observation_area = 'JP04' - self.assertEqual(self.fh.scheduled_time, datetime(2018, 10, 22, 3, 7, 30, 0)) - self.fh.observation_area = 'R304' - self.assertEqual(self.fh.scheduled_time, datetime(2018, 10, 22, 3, 7, 30, 0)) - self.fh.observation_area = 'R420' - self.assertEqual(self.fh.scheduled_time, datetime(2018, 10, 22, 3, 9, 30, 0)) - self.fh.observation_area = 'R520' - self.assertEqual(self.fh.scheduled_time, datetime(2018, 10, 22, 3, 9, 30, 0)) - self.fh.observation_area = 'FLDK' - self.assertEqual(self.fh.scheduled_time, datetime(2018, 10, 22, 3, 0, 0, 0)) - - @mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._read_header') @mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._read_data') @mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._mask_invalid') @mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler.calibrate') @@ -240,36 +244,48 @@ def test_read_band(self, calibrate, *mocks): """Test masking of space pixels.""" nrows = 25 ncols = 100 - self.fh.data_info['number_of_columns'] = ncols - self.fh.data_info['number_of_lines'] = nrows calibrate.return_value = np.ones((nrows, ncols)) - m = mock.mock_open() - with mock.patch('satpy.readers.ahi_hsd.open', m, create=True): - im = self.fh.read_band(info=mock.MagicMock(), key=mock.MagicMock()) + with _fake_hsd_handler() as fh: + fh.data_info['number_of_columns'] = ncols + fh.data_info['number_of_lines'] = nrows + im = fh.read_band(mock.MagicMock(), mock.MagicMock()) # Note: Within the earth's shape get_geostationary_mask() is True but the numpy.ma mask # is False mask = im.to_masked_array().mask - ref_mask = np.logical_not(get_geostationary_mask(self.fh.area).compute()) - self.assertTrue(np.all(mask == ref_mask)) + ref_mask = np.logical_not(get_geostationary_mask(fh.area).compute()) + np.testing.assert_equal(mask, ref_mask) # Test attributes orb_params_exp = {'projection_longitude': 140.7, 'projection_latitude': 0., 'projection_altitude': 35785863.0, - 'satellite_actual_longitude': 140.66, - 'satellite_actual_latitude': 0.03, - 'nadir_longitude': 140.67, - 'nadir_latitude': 0.04} - self.assertTrue(set(orb_params_exp.items()).issubset(set(im.attrs['orbital_parameters'].items()))) - self.assertTrue(np.isclose(im.attrs['orbital_parameters']['satellite_actual_altitude'], 35786903.00581372)) + 'satellite_actual_longitude': 140.657, + 'satellite_actual_latitude': 0.0, + 'satellite_actual_altitude': 35786850, + 'nadir_longitude': 140.252539, + 'nadir_latitude': 0.01674775} + actual_obs_params = im.attrs['orbital_parameters'] + for key, value in orb_params_exp.items(): + assert key in actual_obs_params + np.testing.assert_allclose(value, actual_obs_params[key]) + + time_params_exp = { + 'nominal_start_time': datetime(2018, 10, 22, 3, 0, 0, 0), + 'nominal_end_time': datetime(2018, 10, 22, 3, 0, 0, 0), + 'observation_start_time': datetime(2018, 10, 22, 3, 0, 20, 596896), + 'observation_end_time': datetime(2018, 10, 22, 3, 0, 53, 947296), + } + actual_time_params = im.attrs['time_parameters'] + for key, value in time_params_exp.items(): + assert key in actual_time_params + assert value == actual_time_params[key] # Test if masking space pixels disables with appropriate flag - self.fh.mask_space = False + fh.mask_space = False with mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._mask_space') as mask_space: - self.fh.read_band(info=mock.MagicMock(), key=mock.MagicMock()) + fh.read_band(mock.MagicMock(), mock.MagicMock()) mask_space.assert_not_called() - @mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._read_header') @mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._read_data') @mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._mask_invalid') @mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler.calibrate') @@ -279,62 +295,86 @@ def test_scene_loading(self, calibrate, *mocks): nrows = 25 ncols = 100 calibrate.return_value = np.ones((nrows, ncols)) - m = mock.mock_open() - with mock.patch('satpy.readers.ahi_hsd.open', m, create=True), \ - mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler') as fh_cls: - fh_cls.return_value = self.fh - self.fh.filename_info['total_segments'] = 1 - self.fh.filename_info['segment'] = 1 - self.fh.data_info['number_of_columns'] = ncols - self.fh.data_info['number_of_lines'] = nrows - scn = Scene(reader='ahi_hsd', filenames=['HS_H08_20210225_0700_B07_FLDK_R20_S0110.DAT']) - scn.load(['B07']) - im = scn['B07'] - - # Make sure space masking worked - mask = im.to_masked_array().mask - ref_mask = np.logical_not(get_geostationary_mask(self.fh.area).compute()) - self.assertTrue(np.all(mask == ref_mask)) + with _fake_hsd_handler() as fh: + with mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler') as fh_cls: + fh_cls.return_value = fh + fh.filename_info['total_segments'] = 1 + fh.filename_info['segment'] = 1 + fh.data_info['number_of_columns'] = ncols + fh.data_info['number_of_lines'] = nrows + scn = Scene(reader='ahi_hsd', filenames=['HS_H08_20210225_0700_B07_FLDK_R20_S0110.DAT']) + scn.load(['B07']) + im = scn['B07'] + + # Make sure space masking worked + mask = im.to_masked_array().mask + ref_mask = np.logical_not(get_geostationary_mask(fh.area).compute()) + np.testing.assert_equal(mask, ref_mask) + + def test_time_properties(self): + """Test start/end/scheduled time properties.""" + with _fake_hsd_handler() as fh: + assert fh.start_time == datetime(2018, 10, 22, 3, 0) + assert fh.end_time == datetime(2018, 10, 22, 3, 0) + assert fh.observation_start_time == datetime(2018, 10, 22, 3, 0, 20, 596896) + assert fh.observation_end_time == datetime(2018, 10, 22, 3, 0, 53, 947296) + assert fh.nominal_start_time == datetime(2018, 10, 22, 3, 0, 0, 0) + assert fh.nominal_end_time == datetime(2018, 10, 22, 3, 0, 0, 0) + + def test_scanning_frequencies(self): + """Test scanning frequencies.""" + with _fake_hsd_handler() as fh: + fh.observation_area = 'JP04' + assert fh.nominal_start_time == datetime(2018, 10, 22, 3, 7, 30, 0) + assert fh.nominal_end_time == datetime(2018, 10, 22, 3, 7, 30, 0) + fh.observation_area = 'R304' + assert fh.nominal_start_time == datetime(2018, 10, 22, 3, 7, 30, 0) + assert fh.nominal_end_time == datetime(2018, 10, 22, 3, 7, 30, 0) + fh.observation_area = 'R420' + assert fh.nominal_start_time == datetime(2018, 10, 22, 3, 9, 30, 0) + assert fh.nominal_end_time == datetime(2018, 10, 22, 3, 9, 30, 0) + fh.observation_area = 'R520' + assert fh.nominal_start_time == datetime(2018, 10, 22, 3, 9, 30, 0) + assert fh.nominal_end_time == datetime(2018, 10, 22, 3, 9, 30, 0) + fh.observation_area = 'FLDK' + assert fh.nominal_start_time == datetime(2018, 10, 22, 3, 0, 0, 0) + assert fh.nominal_end_time == datetime(2018, 10, 22, 3, 0, 0, 0) def test_blocklen_error(self, *mocks): """Test erraneous blocklength.""" open_name = '%s.open' % __name__ fpos = 50 - with mock.patch(open_name, create=True) as mock_open: - with mock_open(mock.MagicMock(), 'r') as fp_: - # Expected and actual blocklength match - fp_.tell.return_value = 50 - with warnings.catch_warnings(record=True) as w: - self.fh._check_fpos(fp_, fpos, 0, 'header 1') - self.assertTrue(len(w) == 0) - - # Expected and actual blocklength do not match - fp_.tell.return_value = 100 - with warnings.catch_warnings(record=True) as w: - self.fh._check_fpos(fp_, fpos, 0, 'header 1') - self.assertTrue(len(w) > 0) - - @mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._check_fpos') - def test_read_header(self, *mocks): - """Test header reading.""" - nhdr = [ - {'blocklength': 0}, - {'blocklength': 0}, - {'blocklength': 0}, - {'blocklength': 0}, - {'blocklength': 0, 'band_number': [4]}, - {'blocklength': 0}, - {'blocklength': 0}, - {'blocklength': 0}, - {'blocklength': 0, 'numof_correction_info_data': [1]}, - {'blocklength': 0}, - {'blocklength': 0, 'number_of_observation_times': [1]}, - {'blocklength': 0}, - {'blocklength': 0, 'number_of_error_info_data': [1]}, - {'blocklength': 0}, - {'blocklength': 0}] - with mock.patch('numpy.fromfile', side_effect=nhdr): - self.fh._read_header(mock.MagicMock()) + with _fake_hsd_handler() as fh, \ + mock.patch(open_name, create=True) as mock_open, \ + mock_open(mock.MagicMock(), 'r') as fp_: + # Expected and actual blocklength match + fp_.tell.return_value = 50 + with warnings.catch_warnings(record=True) as w: + fh._check_fpos(fp_, fpos, 0, 'header 1') + assert len(w) == 0 + + # Expected and actual blocklength do not match + fp_.tell.return_value = 100 + with warnings.catch_warnings(record=True) as w: + fh._check_fpos(fp_, fpos, 0, 'header 1') + assert len(w) > 0 + + def test_is_valid_time(self): + """Test that valid times are correctly identified.""" + assert AHIHSDFileHandler._is_valid_timeline(FAKE_BASIC_INFO['observation_timeline']) + assert not AHIHSDFileHandler._is_valid_timeline('65526') + + def test_time_rounding(self): + """Test rounding of the nominal time.""" + mocker = mock.MagicMock() + in_date = datetime(2020, 1, 1, 12, 0, 0) + + with mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._is_valid_timeline', mocker): + with _fake_hsd_handler() as fh: + mocker.return_value = True + assert fh._modify_observation_time_for_nominal(in_date) == datetime(2020, 1, 1, 3, 0, 0) + mocker.return_value = False + assert fh._modify_observation_time_for_nominal(in_date) == datetime(2020, 1, 1, 12, 0, 0) class TestAHICalibration(unittest.TestCase): @@ -381,7 +421,7 @@ def test_default_calibrate(self, *mocks): # Radiance rad_exp = np.array([[15.2, 11.5], - [7.8, 0]]) + [7.8, -3.3]]) rad = self.fh.calibrate(data=self.counts, calibration='radiance') self.assertTrue(np.allclose(rad, rad_exp)) @@ -405,7 +445,7 @@ def test_updated_calibrate(self): # Standard operation self.fh.calib_mode = 'UPDATE' rad_exp = np.array([[30.4, 23.0], - [15.6, 0.]]) + [15.6, -6.6]]) rad = self.fh.calibrate(data=self.counts, calibration='radiance') self.assertTrue(np.allclose(rad, rad_exp)) @@ -427,7 +467,7 @@ def test_updated_calibrate(self): } rad = self.fh.calibrate(data=self.counts, calibration='radiance') rad_exp = np.array([[15.2, 11.5], - [7.8, 0]]) + [7.8, -3.3]]) self.assertTrue(np.allclose(rad, rad_exp)) def test_user_calibration(self): @@ -438,7 +478,7 @@ def test_user_calibration(self): self.fh.band_name = 'B13' rad = self.fh.calibrate(data=self.counts, calibration='radiance').compute() rad_exp = np.array([[16.10526316, 12.21052632], - [8.31578947, 0.10526316]]) + [8.31578947, -3.36842105]]) self.assertTrue(np.allclose(rad, rad_exp)) # This is for DN calibration @@ -447,7 +487,77 @@ def test_user_calibration(self): 'type': 'DN'} self.fh.band_name = 'B13' rad = self.fh.calibrate(data=self.counts, calibration='radiance').compute() - print(rad) rad_exp = np.array([[15.2, 12.], - [8.8, 0.]]) + [8.8, -0.8]]) self.assertTrue(np.allclose(rad, rad_exp)) + + +@contextlib.contextmanager +def _fake_hsd_handler(fh_kwargs=None): + """Create a test file handler.""" + m = mock.mock_open() + with mock.patch('satpy.readers.ahi_hsd.np.fromfile', _custom_fromfile), \ + mock.patch('satpy.readers.ahi_hsd.unzip_file', mock.MagicMock(side_effect=_new_unzip)), \ + mock.patch('satpy.readers.ahi_hsd.open', m, create=True): + in_fname = 'test_file.bz2' + fh = _create_fake_file_handler(in_fname, fh_kwargs=fh_kwargs) + yield fh + + +def _custom_fromfile(*args, **kwargs): + from satpy.readers.ahi_hsd import ( + _BASIC_INFO_TYPE, + _CAL_INFO_TYPE, + _DATA_INFO_TYPE, + _ERROR_INFO_TYPE, + _ERROR_LINE_INFO_TYPE, + _INTER_CALIBRATION_INFO_TYPE, + _IRCAL_INFO_TYPE, + _NAV_INFO_TYPE, + _NAVIGATION_CORRECTION_INFO_TYPE, + _NAVIGATION_CORRECTION_SUBINFO_TYPE, + _OBSERVATION_LINE_TIME_INFO_TYPE, + _OBSERVATION_TIME_INFO_TYPE, + _PROJ_INFO_TYPE, + _SEGMENT_INFO_TYPE, + _SPARE_TYPE, + _VISCAL_INFO_TYPE, + ) + dtype = kwargs.get("dtype") + fake_info_map = { + _BASIC_INFO_TYPE: FAKE_BASIC_INFO, + _DATA_INFO_TYPE: FAKE_DATA_INFO, + _NAV_INFO_TYPE: FAKE_NAV_INFO, + _PROJ_INFO_TYPE: FAKE_PROJ_INFO, + _CAL_INFO_TYPE: FAKE_CAL_INFO, + _VISCAL_INFO_TYPE: FAKE_IRVISCAL_INFO, + _IRCAL_INFO_TYPE: FAKE_IRVISCAL_INFO, + _INTER_CALIBRATION_INFO_TYPE: FAKE_INTERCAL_INFO, + _SEGMENT_INFO_TYPE: FAKE_SEGMENT_INFO, + _NAVIGATION_CORRECTION_INFO_TYPE: FAKE_NAVCORR_INFO, + _NAVIGATION_CORRECTION_SUBINFO_TYPE: FAKE_NAVCORR_SUBINFO, + _OBSERVATION_TIME_INFO_TYPE: FAKE_OBS_TIME_INFO, + _OBSERVATION_LINE_TIME_INFO_TYPE: FAKE_OBS_LINETIME_INFO, + _ERROR_INFO_TYPE: FAKE_ERROR_INFO, + _ERROR_LINE_INFO_TYPE: FAKE_ERROR_LINE_INFO, + _SPARE_TYPE: FAKE_SPARE_INFO, + } + info_dict = fake_info_map[dtype] + fake_arr = np.zeros((1,), dtype=dtype) + for key, value in info_dict.items(): + fake_arr[key] = value + return fake_arr + + +def _create_fake_file_handler(in_fname, filename_info=None, filetype_info=None, fh_kwargs=None): + if filename_info is None: + filename_info = {'segment': 8, 'total_segments': 10} + if filetype_info is None: + filetype_info = {'file_type': 'hsd_b01'} + if fh_kwargs is None: + fh_kwargs = {} + fh = AHIHSDFileHandler(in_fname, filename_info, filetype_info, **fh_kwargs) + + # Check that the filename is altered for bz2 format files + assert in_fname != fh.filename + return fh diff --git a/satpy/tests/reader_tests/test_cmsaf_claas.py b/satpy/tests/reader_tests/test_cmsaf_claas.py index 36ddaecd6b..fd421d7d95 100644 --- a/satpy/tests/reader_tests/test_cmsaf_claas.py +++ b/satpy/tests/reader_tests/test_cmsaf_claas.py @@ -19,88 +19,89 @@ import datetime import os -from unittest import mock import numpy as np -import numpy.testing import pytest import xarray as xr +from pyresample.geometry import AreaDefinition -from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler - - -class FakeNetCDF4FileHandler2(FakeNetCDF4FileHandler): - """Class for faking the NetCDF4 Filehandler.""" - - _nrows = 30 - _ncols = 40 - - def __init__(self, *args, auto_maskandscale, **kwargs): - """Init the file handler.""" - # make sure that CLAAS2 reader asks NetCDF4FileHandler for having - # auto_maskandscale enabled - assert auto_maskandscale - super().__init__(*args, **kwargs) - - def _get_global_attributes(self): - data = {} - attrs = { - "CMSAF_proj4_params": "+a=6378169.0 +h=35785831.0 " - "+b=6356583.8 +lon_0=0 +proj=geos", - "CMSAF_area_extent": np.array( - [-5456233.41938636, -5453233.01608472, - 5453233.01608472, 5456233.41938636]), - "time_coverage_start": "1985-08-13T13:15:00Z", - "time_coverage_end": "2085-08-13T13:15:00Z", - } - for (k, v) in attrs.items(): - data["/attr/" + k] = v - return data - - def _get_data(self): - data = { - "cph": xr.DataArray( - np.arange(self._nrows*self._ncols, dtype="i4").reshape( - (1, self._nrows, self._ncols))/100, - dims=("time", "y", "x")), - "ctt": xr.DataArray( - np.arange(self._nrows*self._ncols, 0, -1, - dtype="i4").reshape( - (self._nrows, self._ncols))/100, - dims=("y", "x")), - "time_bnds": xr.DataArray( - [[12436.91666667, 12436.92534722]], - dims=("time", "time_bnds"))} - for k in set(data.keys()): - data[f"{k:s}/dimensions"] = data[k].dims - data[f"{k:s}/attr/fruit"] = "apple" - data[f"{k:s}/attr/scale_factor"] = np.float32(0.01) - return data - - def _get_dimensions(self): - data = { - "/dimension/x": self._nrows, - "/dimension/y": self._ncols, - "/dimension/time": 1, - "/dimension/time_bnds": 2, - } - return data - - def get_test_content(self, filename, filename_info, filetype_info): - """Get the content of the test data.""" - # mock global attributes - # - root groups global - # - other groups global - # mock data variables - # mock dimensions - # - # ... but only what satpy is using ... - - D = {} - D.update(self._get_data()) - D.update(self._get_dimensions()) - D.update(self._get_global_attributes()) - return D +from satpy.tests.utils import make_dataid + + +@pytest.fixture( + params=[datetime.datetime(2017, 12, 5), datetime.datetime(2017, 12, 6)] +) +def start_time(request): + """Get start time of the dataset.""" + return request.param + + +@pytest.fixture +def start_time_str(start_time): + """Get string representation of the start time.""" + return start_time.strftime("%Y-%m-%dT%H:%M:%SZ") + + +@pytest.fixture() +def fake_dataset(start_time_str): + """Create a CLAAS-like test dataset.""" + cph = xr.DataArray( + [[[0, 1], [2, 0]]], + dims=("time", "y", "x") + ) + ctt = xr.DataArray( + [[280, 290], [300, 310]], + dims=("y", "x") + ) + time_bounds = xr.DataArray( + [[12436.91666667, 12436.92534722]], + dims=("time", "bndsize") + ) + attrs = { + "CMSAF_proj4_params": "+a=6378169.0 +h=35785831.0 " + "+b=6356583.8 +lon_0=0 +proj=geos", + "CMSAF_area_extent": np.array( + [-5456233.41938636, -5453233.01608472, + 5453233.01608472, 5456233.41938636]), + "time_coverage_start": start_time_str, + "time_coverage_end": "2085-08-13T13:15:00Z", + } + return xr.Dataset( + { + "cph": cph, + "ctt": ctt, + "time_bnds": time_bounds + }, + attrs=attrs + ) + + +@pytest.fixture +def encoding(): + """Dataset encoding.""" + return { + "ctt": {"scale_factor": np.float32(0.01)}, + } + + +@pytest.fixture +def fake_file(fake_dataset, encoding, tmp_path): + """Write a fake dataset to file.""" + filename = tmp_path / "CPPin20140101001500305SVMSG01MD.nc" + fake_dataset.to_netcdf(filename, encoding=encoding) + yield filename + + +@pytest.fixture +def fake_files(fake_dataset, encoding, tmp_path): + """Write the same fake dataset into two different files.""" + filenames = [ + tmp_path / "CPPin20140101001500305SVMSG01MD.nc", + tmp_path / "CPPin20140101003000305SVMSG01MD.nc", + ] + for filename in filenames: + fake_dataset.to_netcdf(filename, encoding=encoding) + yield filenames @pytest.fixture @@ -115,20 +116,6 @@ def reader(): return reader -@pytest.fixture(autouse=True, scope="class") -def fake_handler(): - """Wrap NetCDF4 FileHandler with our own fake handler.""" - # implementation strongly inspired by test_viirs_l1b.py - from satpy.readers.cmsaf_claas2 import CLAAS2 - p = mock.patch.object( - CLAAS2, - "__bases__", - (FakeNetCDF4FileHandler2,)) - with p: - p.is_local = True - yield p - - def test_file_pattern(reader): """Test file pattern matching.""" filenames = [ @@ -142,25 +129,105 @@ def test_file_pattern(reader): assert len(files) == 3 -def test_load(reader): - """Test loading.""" - from satpy.tests.utils import make_dataid - - # testing two filenames to test correctly combined - filenames = [ - "CTXin20040120091500305SVMSG01MD.nc", - "CTXin20040120093000305SVMSG01MD.nc"] - - loadables = reader.select_files_from_pathnames(filenames) - reader.create_filehandlers(loadables) - res = reader.load( - [make_dataid(name=name) for name in ["cph", "ctt"]]) - assert 2 == len(res) - assert reader.start_time == datetime.datetime(1985, 8, 13, 13, 15) - assert reader.end_time == datetime.datetime(2085, 8, 13, 13, 15) - np.testing.assert_array_almost_equal( - res["cph"].data, - np.tile(np.arange(0.0, 12.0, 0.01).reshape((30, 40)), [2, 1])) - np.testing.assert_array_almost_equal( - res["ctt"].data, - np.tile(np.arange(12.0, 0.0, -0.01).reshape((30, 40)), [2, 1])) +class TestCLAAS2MultiFile: + """Test reading multiple CLAAS-2 files.""" + + @pytest.fixture + def multi_file_reader(self, reader, fake_files): + """Create a multi-file reader.""" + loadables = reader.select_files_from_pathnames(fake_files) + reader.create_filehandlers(loadables) + return reader + + @pytest.fixture + def multi_file_dataset(self, multi_file_reader): + """Load datasets from multiple files.""" + ds_ids = [make_dataid(name=name) for name in ["cph", "ctt"]] + datasets = multi_file_reader.load(ds_ids) + return datasets + + def test_combine_timestamps(self, multi_file_reader, start_time): + """Test combination of timestamps.""" + assert multi_file_reader.start_time == start_time + assert multi_file_reader.end_time == datetime.datetime(2085, 8, 13, 13, 15) + + @pytest.mark.parametrize( + "ds_name,expected", + [ + ("cph", [[0, 1], [2, 0], [0, 1], [2, 0]]), + ("ctt", [[280, 290], [300, 310], [280, 290], [300, 310]]), + ] + ) + def test_combine_datasets(self, multi_file_dataset, ds_name, expected): + """Test combination of datasets.""" + np.testing.assert_array_almost_equal( + multi_file_dataset[ds_name].data, expected + ) + + def test_number_of_datasets(self, multi_file_dataset): + """Test number of datasets.""" + assert 2 == len(multi_file_dataset) + + +class TestCLAAS2SingleFile: + """Test reading a single CLAAS2 file.""" + + @pytest.fixture + def file_handler(self, fake_file): + """Return a CLAAS-2 file handler.""" + from satpy.readers.cmsaf_claas2 import CLAAS2 + return CLAAS2(fake_file, {}, {}) + + @pytest.fixture + def area_extent_exp(self, start_time): + """Get expected area extent.""" + if start_time < datetime.datetime(2017, 12, 6): + return (-5454733.160460291, -5454733.160460292, 5454733.160460292, 5454733.160460291) + return (-5456233.362099582, -5453232.958821001, 5453232.958821001, 5456233.362099582) + + @pytest.fixture + def area_exp(self, area_extent_exp): + """Get expected area definition.""" + proj_dict = { + "a": 6378169.0, + "b": 6356583.8, + "h": 35785831.0, + "lon_0": 0.0, + "proj": "geos", + "units": "m", + } + return AreaDefinition( + area_id="msg_seviri_fes_3km", + description="MSG SEVIRI Full Earth Scanning service area definition with 3 km resolution", + proj_id="geos", + projection=proj_dict, + area_extent=area_extent_exp, + width=3636, + height=3636, + ) + + def test_get_area_def(self, file_handler, area_exp): + """Test area definition.""" + area = file_handler.get_area_def(make_dataid(name="foo")) + assert area == area_exp + + @pytest.mark.parametrize( + "ds_name,expected", + [ + ("ctt", xr.DataArray([[280, 290], [300, 310]], dims=('y', 'x'))), + ("cph", xr.DataArray([[0, 1], [2, 0]], dims=('y', 'x'))), + ] + ) + def test_get_dataset(self, file_handler, ds_name, expected): + """Test dataset loading.""" + dsid = make_dataid(name=ds_name) + ds = file_handler.get_dataset(dsid, {}) + xr.testing.assert_allclose(ds, expected) + + def test_start_time(self, file_handler, start_time): + """Test start time property.""" + assert file_handler.start_time == start_time + + def test_end_time(self, file_handler): + """Test end time property.""" + assert file_handler.end_time == datetime.datetime(2085, 8, 13, 13, 15) diff --git a/satpy/tests/reader_tests/test_eps_l1b.py b/satpy/tests/reader_tests/test_eps_l1b.py index 2bfd62be8c..2f7bb99f87 100644 --- a/satpy/tests/reader_tests/test_eps_l1b.py +++ b/satpy/tests/reader_tests/test_eps_l1b.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2019 Satpy developers +# Copyright (c) 2019, 2022 Satpy developers # # This file is part of satpy. # @@ -119,6 +119,7 @@ def test_dataset(self): assert(res.attrs['sensor'] == 'avhrr-3') assert(res.attrs['name'] == '1') assert(res.attrs['calibration'] == 'reflectance') + assert(res.attrs['units'] == '%') did = make_dataid(name='4', calibration='brightness_temperature') res = self.fh.get_dataset(did, {}) @@ -127,6 +128,7 @@ def test_dataset(self): assert(res.attrs['sensor'] == 'avhrr-3') assert(res.attrs['name'] == '4') assert(res.attrs['calibration'] == 'brightness_temperature') + assert(res.attrs['units'] == 'K') def test_navigation(self): """Test the navigation.""" @@ -147,8 +149,7 @@ def test_angles(self): assert(res.attrs['name'] == 'solar_zenith_angle') @mock.patch('satpy.readers.eps_l1b.EPSAVHRRFile.__getitem__') - @mock.patch('satpy.readers.eps_l1b.EPSAVHRRFile.__init__') - def test_get_full_angles_twice(self, mock__init__, mock__getitem__): + def test_get_full_angles_twice(self, mock__getitem__): """Test get full angles twice.""" geotiemock = mock.Mock() metop20kmto1km = geotiemock.metop20kmto1km @@ -160,9 +161,13 @@ def mock_getitem(key): "ANGULAR_RELATIONS_LAST": np.zeros((7, 4)), "NAV_SAMPLE_RATE": 20} return data[key] - mock__init__.return_value = None mock__getitem__.side_effect = mock_getitem - avhrr_reader = satpy.readers.eps_l1b.EPSAVHRRFile() + + avhrr_reader = satpy.readers.eps_l1b.EPSAVHRRFile( + filename="foo", + filename_info={"start_time": "foo", "end_time": "bar"}, + filetype_info={"foo": "bar"} + ) avhrr_reader.scanlines = 7 avhrr_reader.pixels = 2048 diff --git a/satpy/tests/reader_tests/test_eum_base.py b/satpy/tests/reader_tests/test_eum_base.py index 78ace51fe5..d5490421a6 100644 --- a/satpy/tests/reader_tests/test_eum_base.py +++ b/satpy/tests/reader_tests/test_eum_base.py @@ -30,6 +30,7 @@ time_cds_short, timecds2datetime, ) +from satpy.readers.seviri_base import mpef_product_header class TestMakeTimeCdsDictionary(unittest.TestCase): @@ -77,9 +78,10 @@ def test_fun(self): class TestRecarray2Dict(unittest.TestCase): """Test TestRecarray2Dict.""" - def test_fun(self): + def test_timestamps(self): """Test function for TestRecarray2Dict.""" # datatype definition + pat_dt = np.dtype([ ('TrueRepeatCycleStart', time_cds_expanded), ('PlanForwardScanEnd', time_cds_expanded), @@ -101,6 +103,22 @@ def test_fun(self): self.assertEqual(recarray2dict(pat), expected) + def test_mpef_product_header(self): + """Test function for TestRecarray2Dict and mpef product header.""" + names = ['ImageLocation', 'GsicsCalMode', 'GsicsCalValidity', + 'Padding', 'OffsetToData', 'Padding2'] + mpef_header = np.dtype([(name, mpef_product_header.fields[name][0]) + for name in names]) + mph_struct = np.array([('OPE', True, False, 'XX', 1000, '12345678')], dtype=mpef_header) + test_mph = {'ImageLocation': "OPE", + 'GsicsCalMode': True, + 'GsicsCalValidity': False, + 'Padding': 'XX', + 'OffsetToData': 1000, + 'Padding2': '12345678' + } + self.assertEqual(recarray2dict(mph_struct), test_mph) + class TestGetServiceMode(unittest.TestCase): """Test the get_service_mode function.""" diff --git a/satpy/tests/reader_tests/test_fci_l1c_nc.py b/satpy/tests/reader_tests/test_fci_l1c_nc.py index ea304200d9..a10c39fe64 100644 --- a/satpy/tests/reader_tests/test_fci_l1c_nc.py +++ b/satpy/tests/reader_tests/test_fci_l1c_nc.py @@ -62,6 +62,7 @@ def _get_test_content_for_channel(self, pat, ch): rad = meas + "/effective_radiance" qual = meas + "/pixel_quality" index_map = meas + "/index_map" + rad_conv_coeff = meas + "/radiance_unit_conversion_coefficient" pos = meas + "/{:s}_position_{:s}" shp = rad + "/shape" x = meas + "/x" @@ -122,11 +123,12 @@ def _get_test_content_for_channel(self, pat, ch): data[qual.format(ch_str)] = xrda( da.arange(nrows * ncols, dtype="uint8").reshape(nrows, ncols) % 128, dims=("y", "x")) - # add dummy data for index map starting from 1 + # add dummy data for index map starting from 100 data[index_map.format(ch_str)] = xrda( - (da.arange(nrows * ncols, dtype="uint16").reshape(nrows, ncols) % 6000) + 1, + (da.arange(nrows * ncols, dtype="uint16").reshape(nrows, ncols) % 6000) + 100, dims=("y", "x")) + data[rad_conv_coeff.format(ch_str)] = xrda(1234.56) data[pos.format(ch_str, "start", "row")] = xrda(0) data[pos.format(ch_str, "start", "column")] = xrda(0) data[pos.format(ch_str, "end", "row")] = xrda(nrows) @@ -191,6 +193,7 @@ def _get_test_content_aux_data(self): # compute the last data entry to simulate the FCI caching data[list(AUX_DATA.values())[-1]] = data[list(AUX_DATA.values())[-1]].compute() + data['index'] = xrda(da.arange(indices_dim, dtype="uint16")+100, dims=("index")) return data def _get_global_attributes(self): @@ -240,6 +243,29 @@ def _get_test_calib_for_channel_vis(self, chroot, meas): return data +class FakeNetCDF4FileHandler4(FakeNetCDF4FileHandler2): + """Mock bad data for IDPF TO-DO's.""" + + def _get_test_calib_for_channel_vis(self, chroot, meas): + data = super()._get_test_calib_for_channel_vis(chroot, meas) + data["state/celestial/earth_sun_distance"] = xr.DataArray(da.repeat(da.array([30000000]), 6000)) + return data + + def _get_test_content_all_channels(self): + data = super()._get_test_content_all_channels() + data['data/vis_04/measured/x'].attrs['scale_factor'] *= -1 + data['data/vis_04/measured/x'].attrs['scale_factor'] = \ + np.float32(data['data/vis_04/measured/x'].attrs['scale_factor']) + data['data/vis_04/measured/x'].attrs['add_offset'] = \ + np.float32(data['data/vis_04/measured/x'].attrs['add_offset']) + data['data/vis_04/measured/y'].attrs['scale_factor'] = \ + np.float32(data['data/vis_04/measured/y'].attrs['scale_factor']) + data['data/vis_04/measured/y'].attrs['add_offset'] = \ + np.float32(data['data/vis_04/measured/y'].attrs['add_offset']) + + return data + + @pytest.fixture def reader_configs(): """Return reader configs for FCI.""" @@ -337,7 +363,7 @@ def test_load_counts(self, reader_configs): assert res[ch].shape == (200 * 2, 11136) assert res[ch].dtype == np.uint16 assert res[ch].attrs["calibration"] == "counts" - assert res[ch].attrs["units"] == "1" + assert res[ch].attrs["units"] == "count" if ch == 'ir_38': numpy.testing.assert_array_equal(res[ch][~0], 1) numpy.testing.assert_array_equal(res[ch][0], 5000) @@ -363,7 +389,8 @@ def test_load_radiance(self, reader_configs): assert res[ch].shape == (200, 11136) assert res[ch].dtype == np.float64 assert res[ch].attrs["calibration"] == "radiance" - assert res[ch].attrs["units"] == 'mW.m-2.sr-1.(cm-1)-1' + assert res[ch].attrs["units"] == 'mW m-2 sr-1 (cm-1)-1' + assert res[ch].attrs["radiance_unit_conversion_coefficient"] == 1234.56 if ch == 'ir_38': numpy.testing.assert_array_equal(res[ch][~0], 15) numpy.testing.assert_array_equal(res[ch][0], 9700) @@ -435,7 +462,7 @@ def test_load_index_map(self, reader_configs): assert 16 == len(res) for ch in self._chans["solar"] + self._chans["terran"]: assert res[ch + '_index_map'].shape == (200, 11136) - numpy.testing.assert_array_equal(res[ch + '_index_map'][1, 1], 5138) + numpy.testing.assert_array_equal(res[ch + '_index_map'][1, 1], 5237) def test_load_aux_data(self, reader_configs): """Test loading of auxiliary data.""" @@ -589,3 +616,41 @@ def test_handling_bad_data_vis(self, reader_configs, caplog): name="vis_04", calibration="reflectance")], pad_data=False) assert "cannot produce reflectance" in caplog.text + + +class TestFCIL1cNCReaderBadDataFromIDPF(TestFCIL1cNCReader): + """Test the FCI L1c NetCDF Reader for bad data input.""" + + _alt_handler = FakeNetCDF4FileHandler4 + + def test_handling_bad_earthsun_distance(self, reader_configs, caplog): + """Test handling of bad earth-sun distance data.""" + from satpy.tests.utils import make_dataid + + filenames = [ + "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" + "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" + "20170410113925_20170410113934_N__C_0070_0067.nc", + ] + + reader = _get_reader_with_filehandlers(filenames, reader_configs) + + res = reader.load([make_dataid(name=["vis_04"], calibration="reflectance")], pad_data=False) + numpy.testing.assert_array_almost_equal(res["vis_04"], 100 * 15 * 1 * np.pi / 50) + + def test_bad_xy_coords(self, reader_configs): + """Test that the geolocation computation is correct.""" + filenames = [ + "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--" + "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_" + "20170410113925_20170410113934_N__C_0070_0067.nc", + ] + + reader = _get_reader_with_filehandlers(filenames, reader_configs) + res = reader.load(['vis_04'], pad_data=False) + + area_def = res['vis_04'].attrs['area'] + # test area extents computation + np.testing.assert_array_almost_equal(np.array(area_def.area_extent), + np.array([-5568062.270889, 5168057.806632, + 16704186.298937, 5568062.270889])) diff --git a/satpy/tests/reader_tests/test_fci_l2_nc.py b/satpy/tests/reader_tests/test_fci_l2_nc.py index e8b45f15a3..9ebbdb32e7 100644 --- a/satpy/tests/reader_tests/test_fci_l2_nc.py +++ b/satpy/tests/reader_tests/test_fci_l2_nc.py @@ -18,7 +18,6 @@ """The fci_cld_l2_nc reader tests package.""" -import datetime import os import unittest import uuid @@ -26,9 +25,32 @@ from unittest import mock import numpy as np +import pytest from netCDF4 import Dataset - -from satpy.readers.fci_l2_nc import PRODUCT_DATA_DURATION_MINUTES, FciL2NCFileHandler, FciL2NCSegmentFileHandler +from pyresample import geometry + +from satpy.readers.fci_l2_nc import FciL2NCFileHandler, FciL2NCSegmentFileHandler +from satpy.tests.utils import make_dataid + +AREA_DEF = geometry.AreaDefinition( + 'mtg_fci_fdss_2km', + 'MTG FCI Full Disk Scanning Service area definition with 2 km resolution', + "", + {'h': 35786400., 'lon_0': 0.0, 'ellps': 'WGS84', 'proj': 'geos', 'units': 'm'}, + 5568, + 5568, + (-5567999.9942, 5567999.9942, 5567999.9942, -5567999.9942) +) + +SEG_AREA_DEF = geometry.AreaDefinition( + 'mtg_fci_fdss_32km', + 'MTG FCI Full Disk Scanning Service area definition with 32 km resolution', + "", + {'h': 35786400., 'lon_0': 0.0, 'ellps': 'WGS84', 'proj': 'geos', 'units': 'm'}, + 348, + 348, + (-5567999.9942, 5567999.9942, 5567999.9942, -5567999.9942) +) class TestFciL2NCFileHandler(unittest.TestCase): @@ -48,8 +70,6 @@ def setUp(self): # add global attributes nc.data_source = 'test_data_source' nc.platform = 'test_platform' - nc.time_coverage_start = '20170920173040' - nc.time_coverage_end = '20170920174117' # Add datasets x = nc.createVariable('x', np.float32, dimensions=('number_of_columns',)) @@ -57,9 +77,12 @@ def setUp(self): x[:] = np.arange(10) y = nc.createVariable('y', np.float32, dimensions=('number_of_rows',)) - x.standard_name = 'projection_y_coordinate' + y.standard_name = 'projection_y_coordinate' y[:] = np.arange(100) + s = nc.createVariable('product_quality', np.int8) + s[:] = 99. + one_layer_dataset = nc.createVariable('test_one_layer', np.float32, dimensions=('number_of_rows', 'number_of_columns')) one_layer_dataset[:] = np.ones((100, 10)) @@ -74,124 +97,124 @@ def setUp(self): two_layers_dataset[1, :, :] = 2 * np.ones((100, 10)) mtg_geos_projection = nc.createVariable('mtg_geos_projection', int, dimensions=()) - mtg_geos_projection.longitude_of_projection_origin = 10.0 + mtg_geos_projection.longitude_of_projection_origin = 0.0 mtg_geos_projection.semi_major_axis = 6378137. - mtg_geos_projection.semi_minor_axis = 6356752. + mtg_geos_projection.inverse_flattening = 298.257223563 mtg_geos_projection.perspective_point_height = 35786400. - self.reader = FciL2NCFileHandler( - filename=self.test_file, - filename_info={ - 'creation_time': datetime.datetime(year=2017, month=9, day=20, - hour=12, minute=30, second=30), - }, - filetype_info={} - ) + self.fh = FciL2NCFileHandler(filename=self.test_file, filename_info={}, filetype_info={}) def tearDown(self): """Remove the previously created test file.""" - # First delete the reader, forcing the file to be closed if still open - del self.reader + # First delete the file handler, forcing the file to be closed if still open + del self.fh # Then we can safely remove the file from the system with suppress(OSError): os.remove(self.test_file) def test_all_basic(self): """Test all basic functionalities.""" - self.assertEqual(PRODUCT_DATA_DURATION_MINUTES, 20) + self.assertEqual(self.fh.spacecraft_name, 'test_platform') + self.assertEqual(self.fh.sensor_name, 'test_data_source') + self.assertEqual(self.fh.ssp_lon, 0.0) - self.assertEqual(self.reader._start_time, - datetime.datetime(year=2017, month=9, day=20, - hour=17, minute=30, second=40)) - - self.assertEqual(self.reader._end_time, - datetime.datetime(year=2017, month=9, day=20, - hour=17, minute=41, second=17)) - - self.assertEqual(self.reader._spacecraft_name, 'test_platform') - self.assertEqual(self.reader._sensor_name, 'test_data_source') - self.assertEqual(self.reader.ssp_lon, 10.0) - - global_attributes = self.reader._get_global_attributes() + global_attributes = self.fh._get_global_attributes() expected_global_attributes = { 'filename': self.test_file, - 'start_time': datetime.datetime(year=2017, month=9, day=20, - hour=17, minute=30, second=40), - 'end_time': datetime.datetime(year=2017, month=9, day=20, - hour=17, minute=41, second=17), 'spacecraft_name': 'test_platform', - 'ssp_lon': 10.0, + 'ssp_lon': 0.0, 'sensor': 'test_data_source', - 'creation_time': datetime.datetime(year=2017, month=9, day=20, - hour=12, minute=30, second=30), 'platform_name': 'test_platform' } self.assertEqual(global_attributes, expected_global_attributes) - @mock.patch('satpy.readers.fci_l2_nc.get_area_definition') + @mock.patch('satpy.readers.fci_l2_nc.geometry.AreaDefinition') @mock.patch('satpy.readers.fci_l2_nc.make_ext') def test_area_definition(self, me_, gad_): """Test the area definition computation.""" - self.reader._compute_area_def() + self.fh._compute_area_def(make_dataid(name='test_area_def', resolution=2000)) # Asserts that the make_ext function was called with the correct arguments me_.assert_called_once() - name, args, kwargs = me_.mock_calls[0] - self.assertTrue(np.allclose(args[0], 0.0)) - self.assertTrue(np.allclose(args[1], 515.6620)) - self.assertTrue(np.allclose(args[2], 0.0)) - self.assertTrue(np.allclose(args[3], 5672.28217)) - self.assertTrue(np.allclose(args[4], 35786400.)) - - p_dict = { - 'nlines': 100, - 'ncols': 10, - 'ssp_lon': 10.0, - 'a': 6378137., - 'b': 6356752., - 'h': 35786400., - 'a_name': 'FCI Area', - 'a_desc': 'Area for FCI instrument', - 'p_id': 'geos' - } + args, kwargs = me_.call_args + np.testing.assert_allclose(args, [-0.0, -515.6620, 5672.28217, 0.0, 35786400.]) + + proj_dict = {'a': 6378137., + 'lon_0': 0.0, + 'h': 35786400, + "rf": 298.257223563, + 'proj': 'geos', + 'units': 'm', + 'sweep': 'y'} # Asserts that the get_area_definition function was called with the correct arguments gad_.assert_called_once() - name, args, kwargs = gad_.mock_calls[0] - self.assertEqual(args[0], p_dict) - # The second argument must be the return result of the make_ext function - self.assertEqual(args[1]._extract_mock_name(), 'make_ext()') + args, kwargs = gad_.call_args + self.assertEqual(args[0], 'mtg_fci_fdss_2km') + self.assertEqual(args[1], 'MTG FCI Full Disk Scanning Service area definition with 2 km resolution') + self.assertEqual(args[2], '') + self.assertEqual(args[3], proj_dict) + self.assertEqual(args[4], 10) + self.assertEqual(args[5], 100) def test_dataset(self): - """Test the execution of the get_dataset function.""" - # Checks the correct execution of the get_dataset function with a valid file_key - dataset = self.reader.get_dataset(None, - {'file_key': 'test_one_layer', - 'fill_value': -999, 'mask_value': 0., - 'file_type': 'test_file_type'}) - - self.assertTrue(np.allclose(dataset.values, np.ones((100, 10)))) + """Test the correct execution of the get_dataset function with a valid file_key.""" + dataset = self.fh.get_dataset(make_dataid(name='test_one_layer', resolution=2000), + {'name': 'test_one_layer', + 'file_key': 'test_one_layer', + 'fill_value': -999, + 'file_type': 'test_file_type'}) + + np.testing.assert_allclose(dataset.values, np.ones((100, 10))) self.assertEqual(dataset.attrs['test_attr'], 'attr') self.assertEqual(dataset.attrs['units'], 'test_units') self.assertEqual(dataset.attrs['fill_value'], -999) - # Checks the correct execution of the get_dataset function with a valid file_key & layer - dataset = self.reader.get_dataset(None, - {'file_key': 'test_two_layers', 'layer': 1, - 'fill_value': -999, 'mask_value': 0, - 'file_type': 'test_file_type'}) - self.assertTrue(np.allclose(dataset.values, 2 * np.ones((100, 10)))) + def test_dataset_with_layer(self): + """Check the correct execution of the get_dataset function with a valid file_key & layer.""" + dataset = self.fh.get_dataset(make_dataid(name='test_two_layers', resolution=2000), + {'name': 'test_two_layers', + 'file_key': 'test_two_layers', 'layer': 1, + 'fill_value': -999, + 'file_type': 'test_file_type'}) + np.testing.assert_allclose(dataset.values, 2 * np.ones((100, 10))) self.assertEqual(dataset.attrs['units'], None) self.assertEqual(dataset.attrs['spacecraft_name'], 'test_platform') - # Checks the correct execution of the get_dataset function with an invalid file_key - invalid_dataset = self.reader.get_dataset(None, - {'file_key': 'test_invalid', - 'fill_value': -999, 'mask_value': 0, - 'file_type': 'test_file_type'}) - # Checks that the function returns None + def test_dataset_with_invalid_filekey(self): + """Test the correct execution of the get_dataset function with an invalid file_key.""" + invalid_dataset = self.fh.get_dataset(make_dataid(name='test_invalid', resolution=2000), + {'name': 'test_invalid', + 'file_key': 'test_invalid', + 'fill_value': -999, + 'file_type': 'test_file_type'}) self.assertEqual(invalid_dataset, None) + def test_dataset_with_total_cot(self): + """Test the correct execution of the get_dataset function for total COT (add contributions from two layers).""" + dataset = self.fh.get_dataset(make_dataid(name='retrieved_cloud_optical_thickness', resolution=2000), + {'name': 'retrieved_cloud_optical_thickness', + 'file_key': 'test_two_layers', + 'fill_value': -999, + 'file_type': 'test_file_type'}) + # Checks that the function returns None + expected_sum = np.empty((100, 10)) + expected_sum[:] = np.log10(10**2 + 10**1) + np.testing.assert_allclose(dataset.values, expected_sum) + + def test_dataset_with_scalar(self): + """Test the execution of the get_dataset function for scalar values.""" + # Checks returned scalar value + dataset = self.fh.get_dataset(make_dataid(name='test_scalar'), + {'name': 'product_quality', + 'file_key': 'product_quality', + 'file_type': 'test_file_type'}) + self.assertEqual(dataset.values, 99.) + + # Checks that no AreaDefintion is implemented for scalar values + with pytest.raises(NotImplementedError): + self.fh.get_area_def(None) + class TestFciL2NCSegmentFileHandler(unittest.TestCase): """Test the FciL2NCSegmentFileHandler reader.""" @@ -202,25 +225,26 @@ def setUp(self): self.seg_test_file = str(uuid.uuid4()) + ".nc" with Dataset(self.seg_test_file, 'w') as nc: # Create dimensions - nc.createDimension('number_of_FoR_cols', 10) - nc.createDimension('number_of_FoR_rows', 100) + nc.createDimension('number_of_FoR_cols', 348) + nc.createDimension('number_of_FoR_rows', 348) nc.createDimension('number_of_channels', 8) nc.createDimension('number_of_categories', 6) # add global attributes nc.data_source = 'test_fci_data_source' nc.platform = 'test_fci_platform' - nc.time_coverage_start = '20170920173040' - nc.time_coverage_end = '20170920174117' # Add datasets x = nc.createVariable('x', np.float32, dimensions=('number_of_FoR_cols',)) x.standard_name = 'projection_x_coordinate' - x[:] = np.arange(10) + x[:] = np.arange(348) y = nc.createVariable('y', np.float32, dimensions=('number_of_FoR_rows',)) - x.standard_name = 'projection_y_coordinate' - y[:] = np.arange(100) + y.standard_name = 'projection_y_coordinate' + y[:] = np.arange(348) + + s = nc.createVariable('product_quality', np.int8) + s[:] = 99. chans = nc.createVariable('channels', np.float32, dimensions=('number_of_channels',)) chans.standard_name = 'fci_channels' @@ -233,152 +257,180 @@ def setUp(self): test_dataset = nc.createVariable('test_values', np.float32, dimensions=('number_of_FoR_rows', 'number_of_FoR_cols', 'number_of_channels', 'number_of_categories')) - test_dataset[:] = np.ones((100, 10, 8, 6)) + + test_dataset[:] = self._get_unique_array(range(8), range(6)) test_dataset.test_attr = 'attr' test_dataset.units = 'test_units' - self.segment_reader = FciL2NCSegmentFileHandler( - filename=self.seg_test_file, - filename_info={ - 'creation_time': datetime.datetime(year=2017, month=9, day=20, - hour=12, minute=30, second=30), - }, - filetype_info={} - ) - def tearDown(self): """Remove the previously created test file.""" - # First delete the reader, forcing the file to be closed if still open - del self.segment_reader + # First delete the fh, forcing the file to be closed if still open + del self.fh # Then can safely remove it from the system with suppress(OSError): os.remove(self.seg_test_file) def test_all_basic(self): """Test all basic functionalities.""" - self.assertEqual(PRODUCT_DATA_DURATION_MINUTES, 20) + self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) - self.assertEqual(self.segment_reader._start_time, - datetime.datetime(year=2017, month=9, day=20, - hour=17, minute=30, second=40)) + assert self.fh.spacecraft_name == 'test_fci_platform' + assert self.fh.sensor_name == 'test_fci_data_source' + assert self.fh.ssp_lon == 0.0 - self.assertEqual(self.segment_reader._end_time, - datetime.datetime(year=2017, month=9, day=20, - hour=17, minute=41, second=17)) - - self.assertEqual(self.segment_reader._spacecraft_name, 'test_fci_platform') - self.assertEqual(self.segment_reader._sensor_name, 'test_fci_data_source') - self.assertEqual(self.segment_reader.ssp_lon, 0.0) - - global_attributes = self.segment_reader._get_global_attributes() + global_attributes = self.fh._get_global_attributes() expected_global_attributes = { 'filename': self.seg_test_file, - 'start_time': datetime.datetime(year=2017, month=9, day=20, - hour=17, minute=30, second=40), - 'end_time': datetime.datetime(year=2017, month=9, day=20, - hour=17, minute=41, second=17), 'spacecraft_name': 'test_fci_platform', 'ssp_lon': 0.0, 'sensor': 'test_fci_data_source', - 'creation_time': datetime.datetime(year=2017, month=9, day=20, - hour=12, minute=30, second=30), 'platform_name': 'test_fci_platform' } self.assertEqual(global_attributes, expected_global_attributes) def test_dataset(self): - """Test the execution of the get_dataset function.""" + """Test the correct execution of the get_dataset function with valid file_key.""" + self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) + # Checks the correct execution of the get_dataset function with a valid file_key - dataset = self.segment_reader.get_dataset(None, - {'file_key': 'test_values', - 'fill_value': -999, 'mask_value': 0, }) - self.assertTrue(np.allclose(dataset.values, np.ones((100, 10, 8, 6)))) + dataset = self.fh.get_dataset(make_dataid(name='test_values', resolution=32000), + {'name': 'test_values', + 'file_key': 'test_values', + 'fill_value': -999, }) + expected_dataset = self._get_unique_array(range(8), range(6)) + np.testing.assert_allclose(dataset.values, expected_dataset) self.assertEqual(dataset.attrs['test_attr'], 'attr') self.assertEqual(dataset.attrs['units'], 'test_units') self.assertEqual(dataset.attrs['fill_value'], -999) + # Checks that no AreaDefintion is implemented + with pytest.raises(NotImplementedError): + self.fh.get_area_def(None) + + def test_dataset_with_invalid_filekey(self): + """Test the correct execution of the get_dataset function with an invalid file_key.""" + self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) + # Checks the correct execution of the get_dataset function with an invalid file_key - invalid_dataset = self.segment_reader.get_dataset(None, - {'file_key': 'test_invalid', - 'fill_value': -999, 'mask_value': 0}) + invalid_dataset = self.fh.get_dataset(make_dataid(name='test_invalid', resolution=32000), + {'name': 'test_invalid', + 'file_key': 'test_invalid', + 'fill_value': -999, }) # Checks that the function returns None self.assertEqual(invalid_dataset, None) + def test_dataset_with_adef(self): + """Test the correct execution of the get_dataset function with `with_area_definition=True`.""" + self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}, + with_area_definition=True) -class TestFciL2NCErrorFileHandler(unittest.TestCase): - """Test the FciL2NCFileHandler reader.""" - - def setUp(self): - """Set up the test by creating a test file and opening it with the reader.""" - # Easiest way to test the reader is to create a test netCDF file on the fly - self.test_error_file = str(uuid.uuid4()) + ".nc" - with Dataset(self.test_error_file, 'w') as nc_err: - # Create dimensions - nc_err.createDimension('number_of_FoR_cols', 10) - nc_err.createDimension('number_of_FoR_rows', 100) - nc_err.createDimension('number_of_channels', 8) - nc_err.createDimension('number_of_categories', 6) - # add erroneous global attributes - nc_err.data_source = 'test_fci_data_source' # Error in key name - nc_err.platform_err = 'test_fci_platform' # Error in key name - nc_err.time_coverage_start = '2017092017304000' # Error in time format - nc_err.time_coverage_end_err = '20170920174117' # Error in key name - - # Add datasets - x = nc_err.createVariable('x', np.float32, dimensions=('number_of_FoR_cols',)) - x.standard_name = 'projection_x_coordinate' - x[:] = np.arange(10) - - y = nc_err.createVariable('y', np.float32, dimensions=('number_of_FoR_rows',)) - x.standard_name = 'projection_y_coordinate' - y[:] = np.arange(100) - - chans = nc_err.createVariable('channels', np.float32, dimensions=('number_of_channels',)) - chans.standard_name = 'fci_channels' - chans[:] = np.arange(8) - - cats = nc_err.createVariable('categories', np.float32, dimensions=('number_of_categories',)) - cats.standard_name = 'product_categories' - cats[:] = np.arange(6) - - test_dataset = nc_err.createVariable('test_values', np.float32, - dimensions=('number_of_FoR_rows', 'number_of_FoR_cols', - 'number_of_channels', 'number_of_categories')) - test_dataset[:] = np.ones((100, 10, 8, 6)) - test_dataset.test_attr = 'attr' - test_dataset.units = 'test_units' - - self.error_reader = FciL2NCSegmentFileHandler( - filename=self.test_error_file, - filename_info={ - 'creation_time': datetime.datetime(year=2017, month=9, day=20, - hour=12, minute=30, second=30), - }, - filetype_info={} - ) - - def tearDown(self): - """Remove the previously created test file.""" - # First delete the reader, forcing the file to be closed if still open - del self.error_reader - # Then can safely remove it from the system - with suppress(OSError): - os.remove(self.test_error_file) - - def test_errors(self): - """Test that certain properties cause errors.""" - self.assertRaises(TypeError, self.error_reader._start_time, - datetime.datetime(year=2017, month=9, day=20, - hour=17, minute=30, second=40)) - - self.assertRaises(TypeError, self.error_reader._end_time, - datetime.datetime(year=2017, month=9, day=20, - hour=17, minute=41, second=17)) - - self.assertRaises(TypeError, self.error_reader._spacecraft_name) + # Checks the correct execution of the get_dataset function with a valid file_key + dataset = self.fh.get_dataset(make_dataid(name='test_values', resolution=32000), + {'name': 'test_values', + 'file_key': 'test_values', + 'fill_value': -999, + 'coordinates': ('test_lon', 'test_lat'), }) + expected_dataset = self._get_unique_array(range(8), range(6)) + np.testing.assert_allclose(dataset.values, expected_dataset) + self.assertEqual(dataset.attrs['test_attr'], 'attr') + self.assertEqual(dataset.attrs['units'], 'test_units') + self.assertEqual(dataset.attrs['fill_value'], -999) - self.assertRaises(TypeError, self.error_reader._sensor_name) + # Checks returned AreaDefinition against reference + adef = self.fh.get_area_def(None) + self.assertEqual(adef, SEG_AREA_DEF) + + def test_dataset_with_adef_and_wrongs_dims(self): + """Test the correct execution of the get_dataset function with dims that don't match expected AreaDefinition.""" + self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}, + with_area_definition=True) + with pytest.raises(NotImplementedError): + self.fh.get_dataset(make_dataid(name='test_wrong_dims', resolution=6000), + {'name': 'test_wrong_dims', 'file_key': 'test_values', 'fill_value': -999} + ) + + def test_dataset_with_scalar(self): + """Test the execution of the get_dataset function for scalar values.""" + self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) + # Checks returned scalar value + dataset = self.fh.get_dataset(make_dataid(name='test_scalar'), + {'name': 'product_quality', + 'file_key': 'product_quality', + 'file_type': 'test_file_type'}) + self.assertEqual(dataset.values, 99.) + + # Checks that no AreaDefintion is implemented for scalar values + with pytest.raises(NotImplementedError): + self.fh.get_area_def(None) + + def test_dataset_slicing_catid(self): + """Test the correct execution of the _slice_dataset function with 'category_id' set.""" + self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) + + dataset = self.fh.get_dataset(make_dataid(name='test_values', resolution=32000), + {'name': 'test_values', + 'file_key': 'test_values', + 'fill_value': -999, + 'category_id': 5}) + expected_dataset = self._get_unique_array(range(8), 5) + np.testing.assert_allclose(dataset.values, expected_dataset) + + def test_dataset_slicing_chid_catid(self): + """Test the correct execution of the _slice_dataset function with 'channel_id' and 'category_id' set.""" + self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) + + dataset = self.fh.get_dataset(make_dataid(name='test_values', resolution=32000), + {'name': 'test_values', + 'file_key': 'test_values', + 'fill_value': -999, + 'channel_id': 0, 'category_id': 1}) + expected_dataset = self._get_unique_array(0, 1) + np.testing.assert_allclose(dataset.values, expected_dataset) + + def test_dataset_slicing_visid_catid(self): + """Test the correct execution of the _slice_dataset function with 'vis_channel_id' and 'category_id' set.""" + self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) + + self.fh.nc = self.fh.nc.rename_dims({'number_of_channels': 'number_of_vis_channels'}) + dataset = self.fh.get_dataset(make_dataid(name='test_values', resolution=32000), + {'name': 'test_values', + 'file_key': 'test_values', + 'fill_value': -999, + 'vis_channel_id': 3, 'category_id': 3}) + expected_dataset = self._get_unique_array(3, 3) + np.testing.assert_allclose(dataset.values, expected_dataset) + + def test_dataset_slicing_irid(self): + """Test the correct execution of the _slice_dataset function with 'ir_channel_id' set.""" + self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={}) + + self.fh.nc = self.fh.nc.rename_dims({'number_of_channels': 'number_of_ir_channels'}) + dataset = self.fh.get_dataset(make_dataid(name='test_values', resolution=32000), + {'name': 'test_values', + 'file_key': 'test_values', + 'fill_value': -999, + 'ir_channel_id': 4}) + expected_dataset = self._get_unique_array(4, range(6)) + np.testing.assert_allclose(dataset.values, expected_dataset) + + @staticmethod + def _get_unique_array(iarr, jarr): + if not hasattr(iarr, '__iter__'): + iarr = [iarr] + + if not hasattr(jarr, '__iter__'): + jarr = [jarr] + + array = np.zeros((348, 348, 8, 6)) + for i in iarr: + for j in jarr: + array[:, :, i, j] = (i * 10) + j + + array = array[:, :, list(iarr), :] + array = array[:, :, :, list(jarr)] + + return np.squeeze(array) class TestFciL2NCReadingByteData(unittest.TestCase): @@ -393,6 +445,10 @@ def setUp(self): nc_byte.createDimension('number_of_columns', 1) nc_byte.createDimension('number_of_rows', 1) + # add global attributes + nc_byte.data_source = 'test_data_source' + nc_byte.platform = 'test_platform' + # Add datasets x = nc_byte.createVariable('x', np.float32, dimensions=('number_of_columns',)) x.standard_name = 'projection_x_coordinate' @@ -403,9 +459,9 @@ def setUp(self): y[:] = np.arange(1) mtg_geos_projection = nc_byte.createVariable('mtg_geos_projection', int, dimensions=()) - mtg_geos_projection.longitude_of_projection_origin = 10.0 + mtg_geos_projection.longitude_of_projection_origin = 0.0 mtg_geos_projection.semi_major_axis = 6378137. - mtg_geos_projection.semi_minor_axis = 6356752. + mtg_geos_projection.inverse_flattening = 298.257223563 mtg_geos_projection.perspective_point_height = 35786400. test_dataset = nc_byte.createVariable('cloud_mask_test_flag', np.float32, @@ -416,16 +472,13 @@ def setUp(self): self.byte_reader = FciL2NCFileHandler( filename=self.test_byte_file, - filename_info={ - 'creation_time': datetime.datetime(year=2017, month=9, day=20, - hour=12, minute=30, second=30), - }, + filename_info={}, filetype_info={} ) def tearDown(self): """Remove the previously created test file.""" - # First delete the reader, forcing the file to be closed if still open + # First delete the file handler, forcing the file to be closed if still open del self.byte_reader # Then can safely remove it from the system with suppress(OSError): @@ -434,9 +487,10 @@ def tearDown(self): def test_byte_extraction(self): """Test the execution of the get_dataset function.""" # Value of 1 is expected to be returned for this test - dataset = self.byte_reader.get_dataset(None, - {'file_key': 'cloud_mask_test_flag', - 'fill_value': -999, 'mask_value': 0., + dataset = self.byte_reader.get_dataset(make_dataid(name='cloud_mask_test_flag', resolution=2000), + {'name': 'cloud_mask_test_flag', + 'file_key': 'cloud_mask_test_flag', + 'fill_value': -999, 'file_type': 'nc_fci_test_clm', 'extract_byte': 1, }) @@ -444,8 +498,9 @@ def test_byte_extraction(self): self.assertEqual(dataset.values, 1) # Value of 0 is expected fto be returned or this test - dataset = self.byte_reader.get_dataset(None, - {'file_key': 'cloud_mask_test_flag', + dataset = self.byte_reader.get_dataset(make_dataid(name='cloud_mask_test_flag', resolution=2000), + {'name': 'cloud_mask_test_flag', + 'file_key': 'cloud_mask_test_flag', 'fill_value': -999, 'mask_value': 0., 'file_type': 'nc_fci_test_clm', 'extract_byte': 23, diff --git a/satpy/tests/reader_tests/test_generic_image.py b/satpy/tests/reader_tests/test_generic_image.py index a29b3a4e12..ecab67b08c 100644 --- a/satpy/tests/reader_tests/test_generic_image.py +++ b/satpy/tests/reader_tests/test_generic_image.py @@ -42,7 +42,7 @@ def setUp(self): # Create area definition pcs_id = 'ETRS89 / LAEA Europe' - proj4_dict = {'init': 'epsg:3035'} + proj4_dict = "EPSG:3035" self.x_size = 100 self.y_size = 100 area_extent = (2426378.0132, 1528101.2618, 6293974.6215, 5446513.5222) diff --git a/satpy/tests/reader_tests/test_ghrsst_l2.py b/satpy/tests/reader_tests/test_ghrsst_l2.py new file mode 100644 index 0000000000..e33cec467a --- /dev/null +++ b/satpy/tests/reader_tests/test_ghrsst_l2.py @@ -0,0 +1,152 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2018, 2022 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Module for testing the satpy.readers.ghrsst_l2 module.""" + +import os +import tarfile +from datetime import datetime +from pathlib import Path + +import numpy as np +import pytest +import xarray as xr + +from satpy.readers.ghrsst_l2 import GHRSSTL2FileHandler + + +class TestGHRSSTL2Reader: + """Test Sentinel-3 SST L2 reader.""" + + def setup_method(self, tmp_path): + """Create a fake osisaf ghrsst dataset.""" + self.base_data = np.array(([-32768, 1135, 1125], [1138, 1128, 1080])) + self.lon_data = np.array(([-13.43, 1.56, 11.25], [-11.38, 1.28, 10.80])) + self.lat_data = np.array(([43.43, 55.56, 61.25], [41.38, 50.28, 60.80])) + self.lon = xr.DataArray( + self.lon_data, + dims=('nj', 'ni'), + attrs={'standard_name': 'longitude', + 'units': 'degrees_east', + } + ) + self.lat = xr.DataArray( + self.lat_data, + dims=('nj', 'ni'), + attrs={'standard_name': 'latitude', + 'units': 'degrees_north', + } + ) + self.sst = xr.DataArray( + self.base_data, + dims=('nj', 'ni'), + attrs={'scale_factor': 0.01, 'add_offset': 273.15, + '_FillValue': -32768, 'units': 'kelvin', + } + ) + self.fake_dataset = xr.Dataset( + data_vars={ + 'sea_surface_temperature': self.sst, + 'longitude': self.lon, + 'latitude': self.lat, + }, + attrs={ + "start_time": "20220321T112640Z", + "stop_time": "20220321T145711Z", + "platform": 'NOAA20', + "sensor": "VIIRS", + }, + ) + + def _create_tarfile_with_testdata(self, mypath): + """Create a 'fake' testdata set in a tar file.""" + slstr_fakename = "S3A_SL_2_WST_MAR_O_NR_003.SEN3" + tarfile_fakename = "S3A_SL_2_WST_MAR_O_NR_003.SEN3.tar" + + slstrdir = mypath / slstr_fakename + slstrdir.mkdir(parents=True, exist_ok=True) + tarfile_path = mypath / tarfile_fakename + + ncfilename = slstrdir / 'L2P_GHRSST-SSTskin-202204131200.nc' + self.fake_dataset.to_netcdf(os.fspath(ncfilename)) + xmlfile_path = slstrdir / 'xfdumanifest.xml' + xmlfile_path.touch() + + with tarfile.open(name=tarfile_path, mode='w') as tar: + tar.add(os.fspath(ncfilename), arcname=Path(slstr_fakename) / ncfilename.name) + tar.add(os.fspath(xmlfile_path), arcname=Path(slstr_fakename) / xmlfile_path.name) + + return tarfile_path + + def test_instantiate_single_netcdf_file(self, tmp_path): + """Test initialization of file handlers - given a single netCDF file.""" + filename_info = {} + tmp_filepath = tmp_path / 'fake_dataset.nc' + self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) + + GHRSSTL2FileHandler(os.fspath(tmp_filepath), filename_info, None) + + def test_instantiate_tarfile(self, tmp_path): + """Test initialization of file handlers - given a tar file as in the case of the SAFE format.""" + filename_info = {} + tarfile_path = self._create_tarfile_with_testdata(tmp_path) + + GHRSSTL2FileHandler(os.fspath(tarfile_path), filename_info, None) + + def test_get_dataset(self, tmp_path): + """Test retrieval of datasets.""" + filename_info = {} + tmp_filepath = tmp_path / 'fake_dataset.nc' + self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) + + test = GHRSSTL2FileHandler(os.fspath(tmp_filepath), filename_info, None) + + test.get_dataset('longitude', {'standard_name': 'longitude'}) + test.get_dataset('latitude', {'standard_name': 'latitude'}) + test.get_dataset('sea_surface_temperature', {'standard_name': 'sea_surface_temperature'}) + + with pytest.raises(KeyError): + test.get_dataset('erroneous dataset', {'standard_name': 'erroneous dataset'}) + + def test_get_sensor(self, tmp_path): + """Test retrieval of the sensor name from the netCDF file.""" + dt_valid = datetime(2022, 3, 21, 11, 26, 40) # 202203211200Z + filename_info = {'field_type': 'NARSST', 'generating_centre': 'FRA_', + 'satid': 'NOAA20_', 'valid_time': dt_valid} + + tmp_filepath = tmp_path / 'fake_dataset.nc' + self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) + + test = GHRSSTL2FileHandler(os.fspath(tmp_filepath), filename_info, None) + assert test.sensor == 'viirs' + + def test_get_start_and_end_times(self, tmp_path): + """Test retrieval of the sensor name from the netCDF file.""" + dt_valid = datetime(2022, 3, 21, 11, 26, 40) # 202203211200Z + good_start_time = datetime(2022, 3, 21, 11, 26, 40) # 20220321T112640Z + good_stop_time = datetime(2022, 3, 21, 14, 57, 11) # 20220321T145711Z + + filename_info = {'field_type': 'NARSST', 'generating_centre': 'FRA_', + 'satid': 'NOAA20_', 'valid_time': dt_valid} + + tmp_filepath = tmp_path / 'fake_dataset.nc' + self.fake_dataset.to_netcdf(os.fspath(tmp_filepath)) + + test = GHRSSTL2FileHandler(os.fspath(tmp_filepath), filename_info, None) + + assert test.start_time == good_start_time + assert test.end_time == good_stop_time diff --git a/satpy/tests/reader_tests/test_goes_imager_hrit.py b/satpy/tests/reader_tests/test_goes_imager_hrit.py index 4f8ce50e3c..02b9632335 100644 --- a/satpy/tests/reader_tests/test_goes_imager_hrit.py +++ b/satpy/tests/reader_tests/test_goes_imager_hrit.py @@ -22,10 +22,13 @@ from unittest import mock import numpy as np +from pyresample.utils import proj4_radius_parameters from xarray import DataArray from satpy.readers.goes_imager_hrit import ( ALTITUDE, + EQUATOR_RADIUS, + POLE_RADIUS, HRITGOESFileHandler, HRITGOESPrologueFileHandler, make_gvar_float, @@ -166,3 +169,31 @@ def test_get_dataset(self, base_get_dataset): {'projection_longitude': self.reader.mda['projection_parameters']['SSP_longitude'], 'projection_latitude': 0.0, 'projection_altitude': ALTITUDE}) + + def test_get_area_def(self): + """Test getting the area definition.""" + self.reader.mda.update({ + 'cfac': 10216334, + 'lfac': 10216334, + 'coff': 1408.0, + 'loff': 944.0, + 'number_of_lines': 464, + 'number_of_columns': 2816 + }) + dsid = make_dataid(name="CH1", calibration='reflectance', + resolution=3000) + area = self.reader.get_area_def(dsid) + + a, b = proj4_radius_parameters(area.proj_dict) + assert a == EQUATOR_RADIUS + assert b == POLE_RADIUS + assert area.proj_dict['h'] == ALTITUDE + assert area.proj_dict['lon_0'] == 100.1640625 + assert area.proj_dict['proj'] == 'geos' + assert area.proj_dict['units'] == 'm' + assert area.width == 2816 + assert area.height == 464 + assert area.area_id == 'goes-15_goes_imager_fd_3km' + area_extent_exp = (-5639254.900260435, 1925159.4881528523, + 5643261.475678028, 3784210.48191544) + np.testing.assert_allclose(area.area_extent, area_extent_exp) diff --git a/satpy/tests/reader_tests/test_goes_imager_nc.py b/satpy/tests/reader_tests/test_goes_imager_nc.py index 14f7130eb3..be2332c77e 100644 --- a/satpy/tests/reader_tests/test_goes_imager_nc.py +++ b/satpy/tests/reader_tests/test_goes_imager_nc.py @@ -280,9 +280,6 @@ def test_get_dataset_counts(self): 'projection_latitude': 0.0, 'projection_altitude': ALTITUDE, 'yaw_flip': True}, - 'satellite_longitude': -75.0, - 'satellite_latitude': 0.0, - 'satellite_altitude': ALTITUDE, 'platform_name': 'GOES-15', 'sensor': 'goes_imager', 'sector': UNKNOWN_SECTOR, diff --git a/satpy/tests/reader_tests/test_hrit_base.py b/satpy/tests/reader_tests/test_hrit_base.py index 0b5e2e2e85..06cac88372 100644 --- a/satpy/tests/reader_tests/test_hrit_base.py +++ b/satpy/tests/reader_tests/test_hrit_base.py @@ -20,11 +20,13 @@ import os import unittest from datetime import datetime +from io import BytesIO from tempfile import NamedTemporaryFile, gettempdir from unittest import mock import numpy as np +from satpy.readers import FSFile from satpy.readers.hrit_base import HRITFileHandler, decompress, get_xritdecompress_cmd, get_xritdecompress_outfile @@ -79,6 +81,19 @@ def test_decompress(self, popen): self.assertEqual(res, os.path.join('.', 'bla.__')) +def new_get_hd(instance, hdr_info): + """Generate some metadata.""" + instance.mda = {'spectral_channel_id': 1} + instance.mda.setdefault('number_of_bits_per_pixel', 10) + + instance.mda['projection_parameters'] = {'a': 6378169.00, + 'b': 6356583.80, + 'h': 35785831.00, + 'SSP_longitude': 0.0} + instance.mda['orbital_parameters'] = {} + instance.mda['total_header_length'] = 12 + + class TestHRITFileHandler(unittest.TestCase): """Test the HRITFileHandler.""" @@ -89,8 +104,13 @@ def setUp(self, fromfile): fromfile.return_value = np.array([(1, 2)], dtype=[('total_header_length', int), ('hdr_id', int)]) - with mock.patch('satpy.readers.hrit_base.open', m, create=True) as newopen: + with mock.patch('satpy.readers.hrit_base.open', m, create=True) as newopen, \ + mock.patch('satpy.readers.utils.open', m, create=True) as utilopen, \ + mock.patch.object(HRITFileHandler, '_get_hd', new=new_get_hd): + newopen.return_value.__enter__.return_value.tell.return_value = 1 + FAKE_DATA_LARGE_ENOUGH_FOR_TESTING = bytes([0]*8192) + utilopen.return_value.__enter__.return_value.read.return_value = FAKE_DATA_LARGE_ENOUGH_FOR_TESTING self.reader = HRITFileHandler('filename', {'platform_shortname': 'MSG3', 'start_time': datetime(2016, 3, 3, 0, 0)}, @@ -150,11 +170,26 @@ def test_get_area_def(self): 30310525626438.438, 3720765401003.719)) @mock.patch('satpy.readers.hrit_base.np.memmap') - def test_read_band(self, memmap): - """Test reading a single band.""" + def test_read_band_filepath(self, memmap): + """Test reading a single band from a filepath.""" nbits = self.reader.mda['number_of_bits_per_pixel'] memmap.return_value = np.random.randint(0, 256, size=int((464 * 3712 * nbits) / 8), dtype=np.uint8) res = self.reader.read_band('VIS006', None) self.assertEqual(res.compute().shape, (464, 3712)) + + @mock.patch('satpy.readers.FSFile.open') + def test_read_band_FSFile(self, fsfile_open): + """Test reading a single band from a FSFile.""" + nbits = self.reader.mda['number_of_bits_per_pixel'] + self.reader.filename = FSFile(self.reader.filename) # convert str to FSFile + fsfile_open.return_value = BytesIO( + np.random.randint( + 0, 256, + size=int((464 * 3712 * nbits) / 8) + self.reader.mda['total_header_length'], + dtype=np.uint8 + ).tobytes() + ) + res = self.reader.read_band('VIS006', None) + self.assertEqual(res.compute().shape, (464, 3712)) diff --git a/satpy/tests/reader_tests/test_modis_l1b.py b/satpy/tests/reader_tests/test_modis_l1b.py index 981fac39bb..245eb91395 100644 --- a/satpy/tests/reader_tests/test_modis_l1b.py +++ b/satpy/tests/reader_tests/test_modis_l1b.py @@ -162,3 +162,27 @@ def test_load_vis(self, modis_l1b_nasa_mod021km_file): assert dataset.shape == _shape_for_resolution(1000) assert dataset.attrs['resolution'] == 1000 _check_shared_metadata(dataset) + + @pytest.mark.parametrize("mask_saturated", [False, True]) + def test_load_vis_saturation(self, mask_saturated, modis_l1b_nasa_mod021km_file): + """Test loading visible band.""" + scene = Scene(reader='modis_l1b', filenames=modis_l1b_nasa_mod021km_file, + reader_kwargs={"mask_saturated": mask_saturated}) + dataset_name = '2' + scene.load([dataset_name]) + dataset = scene[dataset_name] + assert dataset.shape == _shape_for_resolution(1000) + assert dataset.attrs['resolution'] == 1000 + _check_shared_metadata(dataset) + + # check saturation fill values + data = dataset.values + assert np.isnan(data[-1, -1]) # normal fill value + if mask_saturated: + assert np.isnan(data[-1, -2]) # saturation + assert np.isnan(data[-1, -3]) # can't aggregate + else: + # test data factor/offset are 1/0 + # albedos are converted to % + assert data[-1, -2] >= 32767 * 100.0 # saturation + assert data[-1, -3] >= 32767 * 100.0 # can't aggregate diff --git a/satpy/tests/reader_tests/test_msu_gsa_l1b.py b/satpy/tests/reader_tests/test_msu_gsa_l1b.py new file mode 100644 index 0000000000..bfb51d7873 --- /dev/null +++ b/satpy/tests/reader_tests/test_msu_gsa_l1b.py @@ -0,0 +1,181 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2019 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Tests for the 'msu_gsa_l1b' reader.""" +import os +from unittest import mock + +import dask.array as da +import numpy as np +import pytest +import xarray as xr + +from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler +from satpy.tests.utils import make_dataid + +SOLCONST = '273.59' + + +class FakeHDF5FileHandler2(FakeHDF5FileHandler): + """Swap-in HDF5 File Handler.""" + + def _get_data(self, num_scans, num_cols): + data = { + 'Data/resolution_1km/Solar_Zenith_Angle': + xr.DataArray( + da.ones((num_scans*4, num_cols*4), chunks=1024, + dtype=np.uint16), + attrs={ + 'scale': 0.01, 'offset': 0., 'fill_value': -999. + }, + dims=('x', 'y')), + 'Geolocation/resolution_1km/Latitude': + xr.DataArray( + da.ones((num_scans*4, num_cols*4), chunks=1024, + dtype=np.uint16), + attrs={ + 'scale': 0.01, 'offset': 0., 'fill_value': -999. + }, + dims=('x', 'y')), + 'Geolocation/resolution_1km/Longitude': + xr.DataArray( + da.ones((num_scans*4, num_cols*4), chunks=1024, + dtype=np.uint16), + attrs={ + 'scale': 0.01, 'offset': 0., 'fill_value': -999. + }, + dims=('x', 'y')), + 'Data/resolution_1km/Radiance_01': + xr.DataArray( + da.ones((num_scans*4, num_cols*4), chunks=1024, + dtype=np.uint16), + attrs={ + 'scale': 0.01, 'offset': 0., 'fill_value': -999., 'F_solar_constant': SOLCONST + }, + dims=('x', 'y')), + 'Data/resolution_4km/Solar_Zenith_Angle': + xr.DataArray( + da.ones((num_scans, num_cols), chunks=1024, + dtype=np.uint16), + attrs={ + 'scale': 0.01, 'offset': 0., 'fill_value': -999. + }, + dims=('x', 'y')), + 'Geolocation/resolution_4km/Latitude': + xr.DataArray( + da.ones((num_scans, num_cols), chunks=1024, + dtype=np.uint16), + attrs={ + 'scale': 0.01, 'offset': 0., 'fill_value': -999. + }, + dims=('x', 'y')), + 'Geolocation/resolution_4km/Longitude': + xr.DataArray( + da.ones((num_scans, num_cols), chunks=1024, + dtype=np.uint16), + attrs={ + 'scale': 0.01, 'offset': 0., 'fill_value': -999. + }, + dims=('x', 'y')), + 'Data/resolution_4km/Brightness_Temperature_09': + xr.DataArray( + da.ones((num_scans, num_cols), chunks=1024, + dtype=np.uint16), + attrs={ + 'scale': 0.01, 'offset': 0., 'fill_value': -999. + }, + dims=('x', 'y')), + } + return data + + def get_test_content(self, filename, filename_info, filetype_info): + """Mimic reader input file content.""" + num_scans = 20 + num_cols = 2048 + global_attrs = { + '/attr/timestamp_without_timezone': '2022-01-13T12:45:00', + '/attr/satellite_observation_point_height': '38500.0', + '/attr/satellite_observation_point_latitude': '71.25', + '/attr/satellite_observation_point_longitude': '21.44', + } + + data = self._get_data(num_scans, num_cols) + + test_content = {} + test_content.update(global_attrs) + test_content.update(data) + return test_content + + +class TestMSUGSABReader: + """Test MSU GS/A L1B Reader.""" + + yaml_file = "msu_gsa_l1b.yaml" + + def setup(self): + """Wrap HDF5 file handler with our own fake handler.""" + from satpy._config import config_search_paths + from satpy.readers import load_reader + from satpy.readers.msu_gsa_l1b import MSUGSAFileHandler + self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) + # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library + self.p = mock.patch.object(MSUGSAFileHandler, '__bases__', (FakeHDF5FileHandler2,)) + self.fake_handler = self.p.start() + self.p.is_local = True + + filenames = ['ArcticaM1_202201131245.h5'] + self.reader = load_reader(self.reader_configs) + files = self.reader.select_files_from_pathnames(filenames) + self.reader.create_filehandlers(files) + + def teardown(self): + """Stop wrapping the HDF5 file handler.""" + self.p.stop() + + def test_irbt(self): + """Test retrieval in brightness temperature.""" + ds_ids = [make_dataid(name='C09', calibration='brightness_temperature')] + res = self.reader.load(ds_ids) + assert 'C09' in res + assert res['C09'].attrs['calibration'] == 'brightness_temperature' + assert res['C09'].attrs['platform_name'] == 'Arctica-M-N1' + assert res['C09'].attrs['sat_latitude'] == 71.25 + assert res['C09'].attrs['sat_longitude'] == 21.44 + assert res['C09'].attrs['sat_altitude'] == 38500. + assert res['C09'].attrs['resolution'] == 4000 + + def test_nocounts(self): + """Test we can't get IR or VIS data as counts.""" + ds_ids = [make_dataid(name='C01', calibration='counts')] + with pytest.raises(KeyError): + self.reader.load(ds_ids) + + ds_ids = [make_dataid(name='C09', calibration='counts')] + with pytest.raises(KeyError): + self.reader.load(ds_ids) + + def test_vis_cal(self): + """Test that we can retrieve VIS data as both radiance and reflectance.""" + ds_ids = [make_dataid(name='C01', calibration='radiance')] + res = self.reader.load(ds_ids) + rad = res['C01'].data + ds_ids = [make_dataid(name='C01', calibration='reflectance')] + res = self.reader.load(ds_ids) + refl = res['C01'].data + + # Check the RAD->REFL conversion + np.testing.assert_allclose(100 * np.pi * rad / float(SOLCONST), refl) diff --git a/satpy/tests/reader_tests/test_nwcsaf_msg.py b/satpy/tests/reader_tests/test_nwcsaf_msg.py index 0a3feea506..5e3053058e 100644 --- a/satpy/tests/reader_tests/test_nwcsaf_msg.py +++ b/satpy/tests/reader_tests/test_nwcsaf_msg.py @@ -24,6 +24,8 @@ import h5py import numpy as np +from satpy.tests.reader_tests.utils import fill_h5 + CTYPE_TEST_ARRAY = (np.random.rand(1856, 3712) * 255).astype(np.uint8) CTYPE_TEST_FRAME = (np.arange(100).reshape(10, 10) / 100. * 20).astype(np.uint8) CTYPE_TEST_ARRAY[1000:1010, 1000:1010] = CTYPE_TEST_FRAME @@ -450,32 +452,19 @@ def setUp(self): "SAFNWC_MSG3_CTTH_201611090800_MSG-N_______.PLAX.CTTH.0.h5", ) - def fill_h5(root, stuff): - for key, val in stuff.items(): - if key in ["value", "attrs"]: - continue - if "value" in val: - root[key] = val["value"] - else: - grp = root.create_group(key) - fill_h5(grp, stuff[key]) - if "attrs" in val: - for attrs, val in val["attrs"].items(): - if isinstance(val, str) and val.startswith( - "" - ): - root[key].attrs[attrs] = root[val[24:]].ref - else: - root[key].attrs[attrs] = val + def cut_h5_object_ref(root, attr): + if isinstance(attr, str) and attr.startswith(""): + return root[attr[24:]].ref + return attr h5f = h5py.File(self.filename_ct, mode="w") - fill_h5(h5f, fake_ct) + fill_h5(h5f, fake_ct, attr_processor=cut_h5_object_ref) for attr, val in fake_ct["attrs"].items(): h5f.attrs[attr] = val h5f.close() h5f = h5py.File(self.filename_ctth, mode="w") - fill_h5(h5f, fake_ctth) + fill_h5(h5f, fake_ctth, attr_processor=cut_h5_object_ref) for attr, val in fake_ctth["attrs"].items(): h5f.attrs[attr] = val h5f.close() diff --git a/satpy/tests/reader_tests/test_nwcsaf_nc.py b/satpy/tests/reader_tests/test_nwcsaf_nc.py index 7d4ba57a70..baa1090cad 100644 --- a/satpy/tests/reader_tests/test_nwcsaf_nc.py +++ b/satpy/tests/reader_tests/test_nwcsaf_nc.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -# Copyright (c) 2018, 2020 Satpy developers +# Copyright (c) 2018-2022 Satpy developers # # This file is part of satpy. # @@ -19,6 +19,10 @@ import unittest from unittest import mock +import numpy as np +import pytest +import xarray as xr + PROJ_KM = {'gdal_projection': '+proj=geos +a=6378.137000 +b=6356.752300 +lon_0=0.000000 +h=35785.863000', 'gdal_xgeo_up_left': -5569500.0, 'gdal_ygeo_up_left': 5437500.0, @@ -35,55 +39,59 @@ class TestNcNWCSAF(unittest.TestCase): """Test the NcNWCSAF reader.""" @mock.patch('satpy.readers.nwcsaf_nc.unzip_file') - @mock.patch('satpy.readers.nwcsaf_nc.xr') - def setUp(self, xr_, unzip): + @mock.patch('satpy.readers.nwcsaf_nc.xr.open_dataset') + def setUp(self, xr_open_dataset, unzip): """Set up the test case.""" from satpy.readers.nwcsaf_nc import NcNWCSAF - xr_.return_value = mock.Mock(attrs={}) + xr_open_dataset.return_value = xr.Dataset({"nx": xr.DataArray(), "ny": xr.DataArray()}, + attrs={"source": "bla", + "satellite_identifier": "blu"}) + self.fake_dataset = xr_open_dataset.return_value unzip.return_value = '' - self.scn = NcNWCSAF('filename', {}, {}) + self.filehandler_class = NcNWCSAF + self.fh = self.filehandler_class('filename', {}, {}) def test_sensor_name(self): """Test that the correct sensor name is being set.""" - self.scn.set_platform_and_sensor(platform_name='Metop-B') - self.assertEqual(self.scn.sensor, set(['avhrr-3'])) - self.assertEqual(self.scn.sensor_names, set(['avhrr-3'])) + self.fh.set_platform_and_sensor(platform_name='Metop-B') + self.assertEqual(self.fh.sensor, set(['avhrr-3'])) + self.assertEqual(self.fh.sensor_names, set(['avhrr-3'])) - self.scn.set_platform_and_sensor(platform_name='NOAA-20') - self.assertEqual(self.scn.sensor, set(['viirs'])) - self.assertEqual(self.scn.sensor_names, set(['viirs'])) + self.fh.set_platform_and_sensor(platform_name='NOAA-20') + self.assertEqual(self.fh.sensor, set(['viirs'])) + self.assertEqual(self.fh.sensor_names, set(['viirs'])) - self.scn.set_platform_and_sensor(platform_name='Himawari-8') - self.assertEqual(self.scn.sensor, set(['ahi'])) - self.assertEqual(self.scn.sensor_names, set(['ahi'])) + self.fh.set_platform_and_sensor(platform_name='Himawari-8') + self.assertEqual(self.fh.sensor, set(['ahi'])) + self.assertEqual(self.fh.sensor_names, set(['ahi'])) - self.scn.set_platform_and_sensor(sat_id='GOES16') - self.assertEqual(self.scn.sensor, set(['abi'])) - self.assertEqual(self.scn.sensor_names, set(['abi'])) + self.fh.set_platform_and_sensor(sat_id='GOES16') + self.assertEqual(self.fh.sensor, set(['abi'])) + self.assertEqual(self.fh.sensor_names, set(['abi'])) - self.scn.set_platform_and_sensor(platform_name='GOES-17') - self.assertEqual(self.scn.sensor, set(['abi'])) - self.assertEqual(self.scn.sensor_names, set(['abi'])) + self.fh.set_platform_and_sensor(platform_name='GOES-17') + self.assertEqual(self.fh.sensor, set(['abi'])) + self.assertEqual(self.fh.sensor_names, set(['abi'])) - self.scn.set_platform_and_sensor(sat_id='MSG4') - self.assertEqual(self.scn.sensor, set(['seviri'])) + self.fh.set_platform_and_sensor(sat_id='MSG4') + self.assertEqual(self.fh.sensor, set(['seviri'])) - self.scn.set_platform_and_sensor(platform_name='Meteosat-11') - self.assertEqual(self.scn.sensor, set(['seviri'])) - self.assertEqual(self.scn.sensor_names, set(['seviri'])) + self.fh.set_platform_and_sensor(platform_name='Meteosat-11') + self.assertEqual(self.fh.sensor, set(['seviri'])) + self.assertEqual(self.fh.sensor_names, set(['seviri'])) def test_get_area_def(self): """Test that get_area_def() returns proper area.""" dsid = {'name': 'foo'} - self.scn.nc[dsid['name']].shape = (5, 10) + self.fh.nc[dsid['name']] = xr.DataArray(np.zeros((5, 10))) # a, b and h in kilometers - self.scn.nc.attrs = PROJ_KM - _check_area_def(self.scn.get_area_def(dsid)) + self.fh.nc.attrs = PROJ_KM + _check_area_def(self.fh.get_area_def(dsid)) # a, b and h in meters - self.scn.nc.attrs = PROJ - _check_area_def(self.scn.get_area_def(dsid)) + self.fh.nc.attrs = PROJ + _check_area_def(self.fh.get_area_def(dsid)) def test_scale_dataset_attr_removal(self): """Test the scaling of the dataset and removal of obsolete attributes.""" @@ -93,7 +101,7 @@ def test_scale_dataset_attr_removal(self): attrs = {'scale_factor': np.array(10), 'add_offset': np.array(20)} var = xr.DataArray([1, 2, 3], attrs=attrs) - var = self.scn.scale_dataset('dummy', var, 'dummy') + var = self.fh.scale_dataset(var, 'dummy') np.testing.assert_allclose(var, [30, 40, 50]) self.assertNotIn('scale_factor', var.attrs) self.assertNotIn('add_offset', var.attrs) @@ -106,7 +114,7 @@ def test_scale_dataset_floating(self): 'add_offset': np.array(2.5), '_FillValue': 1} var = xr.DataArray([1, 2, 3], attrs=attrs) - var = self.scn.scale_dataset('dummy', var, 'dummy') + var = self.fh.scale_dataset(var, 'dummy') np.testing.assert_allclose(var, [np.nan, 5.5, 7]) self.assertNotIn('scale_factor', var.attrs) self.assertNotIn('add_offset', var.attrs) @@ -115,7 +123,7 @@ def test_scale_dataset_floating(self): 'add_offset': np.array(2.5), 'valid_min': 1.1} var = xr.DataArray([1, 2, 3], attrs=attrs) - var = self.scn.scale_dataset('dummy', var, 'dummy') + var = self.fh.scale_dataset(var, 'dummy') np.testing.assert_allclose(var, [np.nan, 5.5, 7]) self.assertNotIn('scale_factor', var.attrs) self.assertNotIn('add_offset', var.attrs) @@ -124,7 +132,7 @@ def test_scale_dataset_floating(self): 'add_offset': np.array(2.5), 'valid_max': 2.1} var = xr.DataArray([1, 2, 3], attrs=attrs) - var = self.scn.scale_dataset('dummy', var, 'dummy') + var = self.fh.scale_dataset(var, 'dummy') np.testing.assert_allclose(var, [4, 5.5, np.nan]) self.assertNotIn('scale_factor', var.attrs) self.assertNotIn('add_offset', var.attrs) @@ -133,7 +141,7 @@ def test_scale_dataset_floating(self): 'add_offset': np.array(2.5), 'valid_range': (1.1, 2.1)} var = xr.DataArray([1, 2, 3], attrs=attrs) - var = self.scn.scale_dataset('dummy', var, 'dummy') + var = self.fh.scale_dataset(var, 'dummy') np.testing.assert_allclose(var, [np.nan, 5.5, np.nan]) self.assertNotIn('scale_factor', var.attrs) self.assertNotIn('add_offset', var.attrs) @@ -143,13 +151,170 @@ def test_scale_dataset_floating(self): 'add_offset': np.array(-2000.), 'valid_range': (0., 27000.)} var = xr.DataArray([1, 2, 3], attrs=attrs) - var = self.scn.scale_dataset('dummy', var, 'dummy') + var = self.fh.scale_dataset(var, 'dummy') np.testing.assert_allclose(var, [-1999., -1998., -1997.]) self.assertNotIn('scale_factor', var.attrs) self.assertNotIn('add_offset', var.attrs) self.assertEqual(var.attrs['valid_range'][0], -2000.) self.assertEqual(var.attrs['valid_range'][1], 25000.) + def test_get_dataset_scales_and_offsets(self): + """Test that get_dataset() returns scaled and offseted data.""" + dsid = {'name': 'cpp_cot'} + scale = 4 + offset = 8 + the_array = xr.DataArray(np.ones((5, 10)), attrs={"scale_factor": np.array(scale, dtype=float), + "add_offset": np.array(offset, dtype=float)}) + self.fh.nc[dsid['name']] = the_array + + info = dict(name="cpp_cot", + file_type="nc_nwcsaf_cpp") + + res = self.fh.get_dataset(dsid, info) + np.testing.assert_allclose(res, the_array * scale + offset) + + def test_get_dataset_scales_and_offsets_palette_meanings_using_other_dataset(self): + """Test that get_dataset() returns scaled palette_meanings while another dataset as scaling source.""" + dsid = {'name': 'cpp_cot'} + scale = 4 + offset = 8 + array = xr.DataArray(np.ones((5, 3)), attrs={"palette_meanings": "1 2 3 4", + "fill_value_color": (0, 0, 0)}) + self.fh.nc[dsid['name']] = array + + so_array = xr.DataArray(np.ones((10, 10)), + attrs={"scale_factor": np.array(scale, dtype=float), + "add_offset": np.array(offset, dtype=float)}, + dims=["lines", "colors"]) + + info = dict(name="cpp_cot", + file_type="nc_nwcsaf_cpp", + scale_offset_dataset="scaleoffset") + self.fh.nc["scaleoffset"] = so_array + + res = self.fh.get_dataset(dsid, info) + np.testing.assert_allclose(res.attrs["palette_meanings"], np.arange(5) * scale + offset) + + def test_get_dataset_raises_when_dataset_missing(self): + """Test that get_dataset() raises an error when the requested dataset is missing.""" + dsid = {'name': 'cpp_cot'} + info = dict(name="cpp_cot", + file_type="nc_nwcsaf_cpp") + with pytest.raises(KeyError): + self.fh.get_dataset(dsid, info) + + def test_get_dataset_uses_file_key_if_present(self): + """Test that get_dataset() uses a file_key if present.""" + dsid_cpp = {'name': 'cpp_cot'} + dsid_cmic = {'name': 'cmic_cot'} + scale = 4 + offset = 8 + the_array = xr.DataArray(np.ones((5, 10)), attrs={"scale_factor": np.array(scale, dtype=float), + "add_offset": np.array(offset, dtype=float)}) + file_key = "cmic_cot" + self.fh.nc[file_key] = the_array + + info_cpp = dict(name="cpp_cot", + file_key=file_key, + file_type="nc_nwcsaf_cpp") + + res_cpp = self.fh.get_dataset(dsid_cpp, info_cpp) + + info_cmic = dict(name="cmic_cot", + file_type="nc_nwcsaf_cpp") + + res_cmic = self.fh.get_dataset(dsid_cmic, info_cmic) + np.testing.assert_allclose(res_cpp, res_cmic) + + def test_get_dataset_can_handle_file_key_list(self): + """Test that get_dataset() can handle a list of file_keys.""" + dsid_cpp = {'name': 'cpp_reff'} + dsid_cmic = {'name': 'cmic_cre'} + scale = 4 + offset = 8 + data_array = xr.DataArray(np.ones((5, 10)), attrs={"scale_factor": np.array(scale, dtype=float), + "add_offset": np.array(offset, dtype=float)}) + + self.fh.nc["cpp_reff"] = data_array + self.fh.nc["cmic_cre"] = data_array + self.fh.file_key_prefix = 'cpp_' + + info_cpp = dict(name="cmic_reff", + file_key=['reff', 'cre'], + file_type="nc_nwcsaf_cpp") + + res_cpp = self.fh.get_dataset(dsid_cpp, info_cpp) + + info_cmic = dict(name="cmic_reff", + file_key=['reff', 'cre'], + file_type="nc_nwcsaf_cpp") + + res_cmic = self.fh.get_dataset(dsid_cmic, info_cmic) + np.testing.assert_allclose(res_cpp, res_cmic) + + +class TestNcNWCSAFFileKeyPrefix(unittest.TestCase): + """Test the NcNWCSAF reader when using a file key prefix.""" + + @mock.patch('satpy.readers.nwcsaf_nc.unzip_file') + @mock.patch('satpy.readers.nwcsaf_nc.xr.open_dataset') + def setUp(self, xr_open_dataset, unzip): + """Set up the test case.""" + from satpy.readers.nwcsaf_nc import NcNWCSAF + xr_open_dataset.return_value = xr.Dataset({"nx": xr.DataArray(), "ny": xr.DataArray()}, + attrs={"source": "bla", + "satellite_identifier": "blu"}) + self.fake_dataset = xr_open_dataset.return_value + unzip.return_value = '' + self.filehandler_class = NcNWCSAF + self.file_key_prefix = "cmic_" + self.fh = self.filehandler_class('filename', {}, {"file_key_prefix": self.file_key_prefix}) + + def test_get_dataset_uses_file_key_prefix(self): + """Test that get_dataset() uses a file_key_prefix.""" + dsid_cpp = {'name': 'cpp_cot'} + dsid_cmic = {'name': 'cmic_cot'} + scale = 4 + offset = 8 + the_array = xr.DataArray(np.ones((5, 10)), attrs={"scale_factor": np.array(scale, dtype=float), + "add_offset": np.array(offset, dtype=float)}) + file_key = "cot" + self.fh.nc[self.file_key_prefix + file_key] = the_array + + info_cpp = dict(name="cpp_cot", + file_key=file_key, + file_type="nc_nwcsaf_cpp") + + res_cpp = self.fh.get_dataset(dsid_cpp, info_cpp) + + info_cmic = dict(name="cmic_cot", + file_type="nc_nwcsaf_cpp") + + res_cmic = self.fh.get_dataset(dsid_cmic, info_cmic) + np.testing.assert_allclose(res_cpp, res_cmic) + + def test_get_dataset_scales_and_offsets_palette_meanings_using_other_dataset(self): + """Test that get_dataset() returns scaled palette_meanings using another dataset as scaling source.""" + dsid = {'name': 'cpp_cot_pal'} + scale = 4 + offset = 8 + array = xr.DataArray(np.ones((5, 3)), attrs={"palette_meanings": "1 2 3 4", + "fill_value_color": (0, 0, 0)}) + self.fh.nc[dsid['name']] = array + + so_array = xr.DataArray(np.ones((10, 10)), + attrs={"scale_factor": np.array(scale, dtype=float), + "add_offset": np.array(offset, dtype=float)}, + dims=["lines", "colors"]) + + info = dict(name="cpp_cot_pal", + file_type="nc_nwcsaf_cpp", + scale_offset_dataset="scaleoffset") + self.fh.nc[self.file_key_prefix + "scaleoffset"] = so_array + + res = self.fh.get_dataset(dsid, info) + np.testing.assert_allclose(res.attrs["palette_meanings"], np.arange(5) * scale + offset) + def _check_area_def(area_definition): correct_h = float(PROJ['gdal_projection'].split('+h=')[-1]) diff --git a/satpy/tests/reader_tests/test_sar_c_safe.py b/satpy/tests/reader_tests/test_sar_c_safe.py index 13d39b7758..c7fadc4341 100644 --- a/satpy/tests/reader_tests/test_sar_c_safe.py +++ b/satpy/tests/reader_tests/test_sar_c_safe.py @@ -423,6 +423,142 @@ def __init__(self, *args): """ +noise_xml_with_holes = b""" + + + + 2020-03-15T05:04:28.137817 + 0 + 0 2 4 6 8 9 + 0.00000e+00 2.00000e+00 4.00000e+00 6.00000e+00 8.00000e+00 9.00000e+00 + + + 2020-03-15T05:04:28.137817 + 5 + 0 2 4 7 8 9 + 0.00000e+00 2.00000e+00 4.00000e+00 7.00000e+00 8.00000e+00 9.00000e+00 + + + 2020-03-15T05:04:28.137817 + 9 + 0 2 5 7 8 9 + 0.00000e+00 2.00000e+00 5.00000e+00 7.00000e+00 8.00000e+00 9.00000e+00 + + + + + IW1 + 0 + 3 + 2 + 5 + 0 + 1.000000e+00 + + + IW1 + 1 + 0 + 5 + 1 + 2 4 5 + 2.000000e+00 2.000000e+00 2.000000e+00 + + + IW2 + 2 + 8 + 4 + 9 + 2 4 + 3.000000e+00 3.000000e+00 + + + IW3 + 3 + 2 + 5 + 3 + 3 5 + 4.000000e+00 4.000000e+00 + + + IW2 + 3 + 4 + 4 + 5 + 3 4 + 5.000000e+00 5.000000e+00 + + + IW3 + 4 + 6 + 4 + 7 + 4 + 6.000000e+00 + + + IW2 + 5 + 4 + 7 + 6 + 5 7 + 7.000000e+00 7.000000e+00 + + + IW3 + 5 + 7 + 7 + 9 + 6 + 8.000000e+00 + + + IW2 + 6 + 0 + 7 + 3 + 6 7 + 9.000000e+00 9.000000e+00 + + + IW3 + 8 + 0 + 9 + 0 + 8 + 10.000000e+00 + + + IW2 + 8 + 2 + 9 + 3 + 8 9 + 11.000000e+00 11.000000e+00 + + + IW3 + 8 + 4 + 8 + 5 + 8 + 12.000000e+00 + + + +""" + + calibration_xml = b""" @@ -515,11 +651,31 @@ def setUp(self): [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], ]) + self.noise_fh_with_holes = SAFEXMLNoise(BytesIO(noise_xml_with_holes), filename_info, mock.MagicMock(), + self.annotation_fh) + self.expected_azimuth_noise_with_holes = np.array( + [[np.nan, np.nan, np.nan, 1, 1, 1, np.nan, np.nan, np.nan, np.nan], + [2, 2, np.nan, 1, 1, 1, np.nan, np.nan, np.nan, np.nan], + [2, 2, np.nan, 1, 1, 1, np.nan, np.nan, 3, 3], + [2, 2, 4, 4, 5, 5, np.nan, np.nan, 3, 3], + [2, 2, 4, 4, 5, 5, 6, 6, 3, 3], + [2, 2, 4, 4, 7, 7, 7, 8, 8, 8], + [9, 9, 9, 9, 7, 7, 7, 8, 8, 8], + [9, 9, 9, 9, 7, 7, 7, 8, 8, 8], + [10, np.nan, 11, 11, 12, 12, np.nan, np.nan, np.nan, np.nan], + [10, np.nan, 11, 11, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan] + ]) + def test_azimuth_noise_array(self): """Test reading the azimuth-noise array.""" res = self.noise_fh.azimuth_noise_reader.read_azimuth_noise_array() np.testing.assert_array_equal(res, self.expected_azimuth_noise) + def test_azimuth_noise_array_with_holes(self): + """Test reading the azimuth-noise array.""" + res = self.noise_fh_with_holes.azimuth_noise_reader.read_azimuth_noise_array() + np.testing.assert_array_equal(res, self.expected_azimuth_noise_with_holes) + def test_range_noise_array(self): """Test reading the range-noise array.""" res = self.noise_fh.read_range_noise_array(chunks=5) diff --git a/satpy/tests/reader_tests/test_scmi.py b/satpy/tests/reader_tests/test_scmi.py index 05ba43c561..6d42720c8d 100644 --- a/satpy/tests/reader_tests/test_scmi.py +++ b/satpy/tests/reader_tests/test_scmi.py @@ -123,6 +123,11 @@ def test_data_load(self): self.assertNotIn('_FillValue', res.attrs) self.assertEqual(res.attrs['standard_name'], 'toa_bidirectional_reflectance') + assert 'orbital_parameters' in res.attrs + orb_params = res.attrs['orbital_parameters'] + assert orb_params['projection_longitude'] == -90.0 + assert orb_params['projection_latitude'] == 0.0 + assert orb_params['projection_altitude'] == 35785831.0 class TestSCMIFileHandlerArea(unittest.TestCase): diff --git a/satpy/tests/reader_tests/test_seadas_l2.py b/satpy/tests/reader_tests/test_seadas_l2.py new file mode 100644 index 0000000000..dfa55a557d --- /dev/null +++ b/satpy/tests/reader_tests/test_seadas_l2.py @@ -0,0 +1,167 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2021 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Tests for the 'seadas_l2' reader.""" + +import numpy as np +import pytest +from pyhdf.SD import SD, SDC +from pyresample.geometry import SwathDefinition +from pytest_lazyfixture import lazy_fixture + +from satpy import Scene, available_readers + + +@pytest.fixture(scope="module") +def seadas_l2_modis_chlor_a(tmp_path_factory): + """Create MODIS SEADAS file.""" + filename = "a1.21322.1758.seadas.hdf" + full_path = str(tmp_path_factory.mktemp("seadas_l2") / filename) + return _create_seadas_chlor_a_file(full_path, "Aqua", "MODISA") + + +@pytest.fixture(scope="module") +def seadas_l2_viirs_npp_chlor_a(tmp_path_factory): + """Create VIIRS NPP SEADAS file.""" + filename = "SEADAS_npp_d20211118_t1728125_e1739327.hdf" + full_path = str(tmp_path_factory.mktemp("seadas") / filename) + return _create_seadas_chlor_a_file(full_path, "NPP", "VIIRSN") + + +@pytest.fixture(scope="module") +def seadas_l2_viirs_j01_chlor_a(tmp_path_factory): + """Create VIIRS JPSS-01 SEADAS file.""" + filename = "SEADAS_j01_d20211118_t1728125_e1739327.hdf" + full_path = str(tmp_path_factory.mktemp("seadas") / filename) + return _create_seadas_chlor_a_file(full_path, "JPSS-1", "VIIRSJ1") + + +def _create_seadas_chlor_a_file(full_path, mission, sensor): + h = SD(full_path, SDC.WRITE | SDC.CREATE) + setattr(h, "Sensor Name", sensor) + h.Mission = mission + setattr(h, "Start Time", "2021322175853191") + setattr(h, "End Time", "2021322180551214") + + lon_info = { + "type": SDC.FLOAT32, + "data": np.zeros((5, 5), dtype=np.float32), + "dim_labels": ["Number of Scan Lines", "Number of Pixel Control Points"], + "attrs": { + "long_name": "Longitude\x00", + "standard_name": "longitude\x00", + "units": "degrees_east\x00", + "valid_range": (-180.0, 180.0), + } + } + lat_info = { + "type": SDC.FLOAT32, + "data": np.zeros((5, 5), np.float32), + "dim_labels": ["Number of Scan Lines", "Number of Pixel Control Points"], + "attrs": { + "long_name": "Latitude\x00", + "standard_name": "latitude\x00", + "units": "degrees_north\x00", + "valid_range": (-90.0, 90.0), + } + } + _add_variable_to_file(h, "longitude", lon_info) + _add_variable_to_file(h, "latitude", lat_info) + + chlor_a_info = { + "type": SDC.FLOAT32, + "data": np.ones((5, 5), np.float32), + "dim_labels": ["Number of Scan Lines", "Number of Pixel Control Points"], + "attrs": { + "long_name": "Chlorophyll Concentration, OCI Algorithm\x00", + "units": "mg m^-3\x00", + "standard_name": "mass_concentration_of_chlorophyll_in_sea_water\x00", + "valid_range": (0.001, 100.0), + } + } + _add_variable_to_file(h, "chlor_a", chlor_a_info) + + l2_flags = np.zeros((5, 5), dtype=np.int32) + l2_flags[2, 2] = -1 + l2_flags_info = { + "type": SDC.INT32, + "data": l2_flags, + "dim_labels": ["Number of Scan Lines", "Number of Pixel Control Points"], + "attrs": {}, + } + _add_variable_to_file(h, "l2_flags", l2_flags_info) + return [full_path] + + +def _add_variable_to_file(h, var_name, var_info): + v = h.create(var_name, var_info['type'], var_info['data'].shape) + v[:] = var_info['data'] + for dim_count, dimension_name in enumerate(var_info['dim_labels']): + v.dim(dim_count).setname(dimension_name) + if var_info.get('fill_value'): + v.setfillvalue(var_info['fill_value']) + for attr_key, attr_val in var_info['attrs'].items(): + setattr(v, attr_key, attr_val) + + +class TestSEADAS: + """Test the SEADAS L2 file reader.""" + + def test_available_reader(self): + """Test that SEADAS L2 reader is available.""" + assert 'seadas_l2' in available_readers() + + @pytest.mark.parametrize( + "input_files", + [ + lazy_fixture("seadas_l2_modis_chlor_a"), + lazy_fixture("seadas_l2_viirs_npp_chlor_a"), + lazy_fixture("seadas_l2_viirs_j01_chlor_a"), + ]) + def test_scene_available_datasets(self, input_files): + """Test that datasets are available.""" + scene = Scene(reader='seadas_l2', filenames=input_files) + available_datasets = scene.all_dataset_names() + assert len(available_datasets) > 0 + assert 'chlor_a' in available_datasets + + @pytest.mark.parametrize( + ("input_files", "exp_plat", "exp_sensor", "exp_rps"), + [ + (lazy_fixture("seadas_l2_modis_chlor_a"), "Aqua", {"modis"}, 10), + (lazy_fixture("seadas_l2_viirs_npp_chlor_a"), "Suomi-NPP", {"viirs"}, 16), + (lazy_fixture("seadas_l2_viirs_j01_chlor_a"), "NOAA-20", {"viirs"}, 16), + ]) + @pytest.mark.parametrize("apply_quality_flags", [False, True]) + def test_load_chlor_a(self, input_files, exp_plat, exp_sensor, exp_rps, apply_quality_flags): + """Test that we can load 'chlor_a'.""" + reader_kwargs = {"apply_quality_flags": apply_quality_flags} + scene = Scene(reader='seadas_l2', filenames=input_files, reader_kwargs=reader_kwargs) + scene.load(['chlor_a']) + data_arr = scene['chlor_a'] + assert data_arr.attrs['platform_name'] == exp_plat + assert data_arr.attrs['sensor'] == exp_sensor + assert data_arr.attrs['units'] == 'mg m^-3' + assert data_arr.dtype.type == np.float32 + assert isinstance(data_arr.attrs["area"], SwathDefinition) + assert data_arr.attrs["rows_per_scan"] == exp_rps + data = data_arr.data.compute() + if apply_quality_flags: + assert np.isnan(data[2, 2]) + assert np.count_nonzero(np.isnan(data)) == 1 + else: + assert np.count_nonzero(np.isnan(data)) == 0 diff --git a/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py b/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py index 5475098863..fa344b7396 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py @@ -57,6 +57,7 @@ def get_fake_file_handler(start_time, nlines, ncols, projection_longitude=0, m = mock.mock_open() with mock.patch('satpy.readers.seviri_l1b_hrit.np.fromfile') as fromfile, \ mock.patch('satpy.readers.hrit_base.open', m, create=True) as newopen, \ + mock.patch('satpy.readers.utils.open', m, create=True) as utilopen, \ mock.patch('satpy.readers.seviri_l1b_hrit.CHANNEL_NAMES'), \ mock.patch.object(HRITMSGFileHandler, '_get_hd', new=new_get_hd), \ mock.patch.object(HRITMSGPrologueFileHandler, 'read_prologue', @@ -68,6 +69,10 @@ def get_fake_file_handler(start_time, nlines, ncols, projection_longitude=0, ('hdr_id', int)] ) newopen.return_value.__enter__.return_value.tell.return_value = 1 + # The size of the return value hereafter was chosen arbitrarily with the expectation + # that it would return sufficiently many bytes for testing the fake-opening of HRIT + # files. + utilopen.return_value.__enter__.return_value.read.return_value = bytes([0]*8192) prologue = HRITMSGPrologueFileHandler( filename='dummy_prologue_filename', filename_info=filename_info, @@ -207,9 +212,6 @@ def get_attrs_exp(projection_longitude=0.0): 'standard_name': 'standard_name', 'platform_name': 'Meteosat-11', 'sensor': 'seviri', - 'satellite_longitude': projection_longitude, - 'satellite_latitude': 0.0, - 'satellite_altitude': 35785831.0, 'orbital_parameters': {'projection_longitude': projection_longitude, 'projection_latitude': 0., 'projection_altitude': 35785831.0, diff --git a/satpy/tests/reader_tests/test_seviri_l1b_icare.py b/satpy/tests/reader_tests/test_seviri_l1b_icare.py index 103fa5788f..5ca8ac1a2e 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_icare.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_icare.py @@ -20,6 +20,7 @@ import unittest from unittest import mock +import dask.array as da import numpy as np from satpy.readers import load_reader @@ -64,7 +65,7 @@ def get_test_content(self, filename, filename_info, filename_type): file_content['Brightness_Temperature/attr/add_offset'] = 0. file_content['Brightness_Temperature/shape'] = DEFAULT_FILE_SHAPE - # convert tp xarrays + # convert to xarrays from xarray import DataArray for key, val in file_content.items(): if isinstance(val, np.ndarray): @@ -72,10 +73,7 @@ def get_test_content(self, filename, filename_info, filename_type): for a in ['_FillValue', 'scale_factor', 'add_offset']: if key + '/attr/' + a in file_content: attrs[a] = file_content[key + '/attr/' + a] - file_content[key] = DataArray(val, dims=('fakeDim0', 'fakeDim1'), attrs=attrs) - if 'y' not in file_content['Normalized_Radiance'].dims: - file_content['Normalized_Radiance'] = file_content['Normalized_Radiance'].rename({'fakeDim0': 'x', - 'fakeDim1': 'y'}) + file_content[key] = DataArray(da.from_array(val), dims=('x', 'y'), attrs=attrs) return file_content @@ -184,7 +182,6 @@ def test_sensor_names(self): 'Meteosat-11': 'MSG4/SEVIRI'} with mock.patch('satpy.tests.reader_tests.test_seviri_l1b_icare.' 'FakeHDF4FileHandler2.get_test_content') as patched_func: - def _run_target(): patched_func.return_value = file_data return self.p.target(mock.MagicMock(), @@ -206,3 +203,16 @@ def test_bad_bandname(self): self.p.target(mock.MagicMock(), mock.MagicMock(), mock.MagicMock())._get_dsname({'name': 'badband'}) + + def test_nocompute(self): + """Test that dask does not compute anything in the reader itself.""" + import dask + + from satpy.tests.utils import CustomScheduler + with dask.config.set(scheduler=CustomScheduler(max_computes=0)): + r = load_reader(self.reader_configs) + loadables = r.select_files_from_pathnames([ + 'GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf' + ]) + r.create_filehandlers(loadables) + r.load(['VIS008']) diff --git a/satpy/tests/reader_tests/test_seviri_l2_bufr.py b/satpy/tests/reader_tests/test_seviri_l2_bufr.py index 080066957a..20fab5fa6b 100644 --- a/satpy/tests/reader_tests/test_seviri_l2_bufr.py +++ b/satpy/tests/reader_tests/test_seviri_l2_bufr.py @@ -22,14 +22,19 @@ from datetime import datetime from unittest import mock +import dask.array as da import numpy as np +import pytest +from pyresample import geometry -FILETYPE_INFO = {'file_type': 'seviri_l2_bufr_csr'} +from satpy.tests.utils import make_dataid + +FILETYPE_INFO = {'file_type': 'seviri_l2_bufr_asr'} FILENAME_INFO = {'start_time': '20191112000000', - 'spacecraft': 'MSG4'} + 'spacecraft': 'MSG1'} FILENAME_INFO2 = {'start_time': '20191112000000', - 'spacecraft': 'MSG4', + 'spacecraft': 'MSG1', 'server': 'TESTSERVER'} MPEF_PRODUCT_HEADER = { 'NominalTime': datetime(2019, 11, 6, 18, 0), @@ -38,79 +43,215 @@ } DATASET_INFO = { + 'name': 'testdata', 'key': '#1#brightnessTemperature', + 'coordinates': ('longitude', 'latitude'), 'fill_value': 0 } +DATASET_INFO_LAT = { + 'name': 'latitude', + 'key': 'latitude', + 'fill_value': -1.e+100 +} + +DATASET_INFO_LON = { + 'name': 'longitude', + 'key': 'longitude', + 'fill_value': -1.e+100 +} + + DATASET_ATTRS = { 'platform_name': 'MET08', 'ssp_lon': 41.5, 'seg_size': 16 } +AREA_DEF = geometry.AreaDefinition( + 'msg_seviri_iodc_48km', + 'MSG SEVIRI Indian Ocean Data Coverage service area definition with 48 km resolution', + "", + {'a': 6378169., 'b': 6356583.8, 'lon_0': DATASET_ATTRS['ssp_lon'], + 'h': 35785831., 'proj': 'geos', 'units': 'm'}, + 232, + 232, + (-5570248.6866, -5567248.2834, 5567248.2834, 5570248.6866) +) + +AREA_DEF_FES = geometry.AreaDefinition( + 'msg_seviri_res_48km', + 'MSG SEVIRI Full Earth Scanning service area definition with 48 km resolution', + "", + {'a': 6378169., 'b': 6356583.8, 'lon_0': 0.0, + 'h': 35785831., 'proj': 'geos', 'units': 'm'}, + 232, + 232, + (-5570248.6866, -5567248.2834, 5567248.2834, 5570248.6866) +) -class TestSeviriL2Bufr(unittest.TestCase): - """Test NativeMSGBufrHandler.""" +AREA_DEF_EXT = geometry.AreaDefinition( + 'msg_seviri_iodc_9km_ext', + 'MSG SEVIRI Indian Ocean Data Coverage service area definition with 9 km resolution ' + '(extended outside original 3km grid)', + "", + {'a': 6378169., 'b': 6356583.8, 'lon_0': DATASET_ATTRS['ssp_lon'], + 'h': 35785831., 'proj': 'geos', 'units': 'm'}, + 1238, + 1238, + (-5571748.888268564, -5571748.888155806, 5571748.888155806, 5571748.888268564) +) + +TEST_FILES = [ + 'ASRBUFRProd_20191106130000Z_00_OMPEFS01_MET08_FES_E0000', + 'MSG1-SEVI-MSGASRE-0101-0101-20191106130000.000000000Z-20191106131702-1362128.bfr', + 'MSG1-SEVI-MSGASRE-0101-0101-20191106101500.000000000Z-20191106103218-1362148' +] + +# Test data +DATA = np.random.uniform(low=250, high=350, size=(128,)) +LAT = np.random.uniform(low=-80, high=80, size=(128,)) +LON = np.random.uniform(low=-38.5, high=121.5, size=(128,)) + + +class SeviriL2BufrData: + """Mock SEVIRI L2 BUFR data.""" @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows") - def seviri_l2_bufr_test(self, filename): - """Test the SEVIRI BUFR handler.""" + def __init__(self, filename, with_adef=False, rect_lon='default'): + """Initialize by mocking test data for testing the SEVIRI L2 BUFR reader.""" import eccodes as ec from satpy.readers.seviri_l2_bufr import SeviriL2BufrFileHandler - buf1 = ec.codes_bufr_new_from_samples('BUFR4_local_satellite') - ec.codes_set(buf1, 'unpack', 1) - samp1 = np.random.uniform(low=250, high=350, size=(128,)) + self.buf1 = ec.codes_bufr_new_from_samples('BUFR4_local_satellite') + ec.codes_set(self.buf1, 'unpack', 1) # write the bufr test data twice as we want to read in and the concatenate the data in the reader - # 55 id corresponds to METEOSAT 8 - ec.codes_set(buf1, 'satelliteIdentifier', 55) - ec.codes_set_array(buf1, '#1#brightnessTemperature', samp1) - ec.codes_set_array(buf1, '#1#brightnessTemperature', samp1) + # 55 id corresponds to METEOSAT 8` + ec.codes_set(self.buf1, 'satelliteIdentifier', 55) + ec.codes_set_array(self.buf1, 'latitude', LAT) + ec.codes_set_array(self.buf1, 'latitude', LAT) + ec.codes_set_array(self.buf1, 'longitude', LON) + ec.codes_set_array(self.buf1, 'longitude', LON) + ec.codes_set_array(self.buf1, '#1#brightnessTemperature', DATA) + ec.codes_set_array(self.buf1, '#1#brightnessTemperature', DATA) - m = mock.mock_open() + self.m = mock.mock_open() # only our offline product contain MPEF product headers so we get the metadata from there if ('BUFRProd' in filename): with mock.patch('satpy.readers.seviri_l2_bufr.np.fromfile') as fromfile: fromfile.return_value = MPEF_PRODUCT_HEADER with mock.patch('satpy.readers.seviri_l2_bufr.recarray2dict') as recarray2dict: recarray2dict.side_effect = (lambda x: x) - fh = SeviriL2BufrFileHandler(filename, FILENAME_INFO2, FILETYPE_INFO) - fh.mpef_header = MPEF_PRODUCT_HEADER + self.fh = SeviriL2BufrFileHandler(filename, FILENAME_INFO2, FILETYPE_INFO, + with_area_definition=with_adef, rectification_longitude=rect_lon) + self.fh.mpef_header = MPEF_PRODUCT_HEADER else: # No Mpef Header so we get the metadata from the BUFR messages - with mock.patch('satpy.readers.seviri_l2_bufr.open', m, create=True): + with mock.patch('satpy.readers.seviri_l2_bufr.open', self.m, create=True): with mock.patch('eccodes.codes_bufr_new_from_file', - side_effect=[buf1, None, buf1, None, buf1, None]) as ec1: + side_effect=[self.buf1, None, self.buf1, None, self.buf1, None]) as ec1: ec1.return_value = ec1.side_effect with mock.patch('eccodes.codes_set') as ec2: ec2.return_value = 1 with mock.patch('eccodes.codes_release') as ec5: ec5.return_value = 1 - fh = SeviriL2BufrFileHandler(filename, FILENAME_INFO, FILETYPE_INFO) + self.fh = SeviriL2BufrFileHandler(filename, FILENAME_INFO, FILETYPE_INFO, + with_area_definition=with_adef, + rectification_longitude=rect_lon) - with mock.patch('satpy.readers.seviri_l2_bufr.open', m, create=True): + def get_data(self, dataset_info): + """Read data from mock file.""" + with mock.patch('satpy.readers.seviri_l2_bufr.open', self.m, create=True): with mock.patch('eccodes.codes_bufr_new_from_file', - side_effect=[buf1, buf1, None]) as ec1: + side_effect=[self.buf1, self.buf1, None]) as ec1: ec1.return_value = ec1.side_effect with mock.patch('eccodes.codes_set') as ec2: ec2.return_value = 1 with mock.patch('eccodes.codes_release') as ec5: ec5.return_value = 1 - z = fh.get_dataset(None, DATASET_INFO) - # concatenate the original test arrays as - # get dataset will have read and concatented the data - x1 = np.concatenate((samp1, samp1), axis=0) - np.testing.assert_array_equal(z.values, x1) - self.assertEqual(z.attrs['platform_name'], - DATASET_ATTRS['platform_name']) - self.assertEqual(z.attrs['ssp_lon'], - DATASET_ATTRS['ssp_lon']) - self.assertEqual(z.attrs['seg_size'], - DATASET_ATTRS['seg_size']) - - def test_seviri_l2_bufr(self): - """Call the test function.""" - self.seviri_l2_bufr_test('GIIBUFRProduct_20191106130000Z_00_OMPEFS04_MET11_FES_E0000') - self.seviri_l2_bufr_test('MSG4-SEVI-MSGGIIN-0101-0101-20191106130000.000000000Z-20191106131702-1362128.bfr') - self.seviri_l2_bufr_test('MSG4-SEVI-MSGGIIN-0101-0101-20191106101500.000000000Z-20191106103218-1362148') + z = self.fh.get_dataset(make_dataid(name=dataset_info['name'], resolution=48000), dataset_info) + + return z + + +@pytest.mark.parametrize("input_file", TEST_FILES) +class TestSeviriL2BufrReader: + """Test SEVIRI L2 BUFR Reader.""" + + @staticmethod + def test_lonslats(input_file): + """Test reading of longitude and latitude data with SEVIRI L2 BUFR reader.""" + bufr_obj = SeviriL2BufrData(input_file) + zlat = bufr_obj.get_data(DATASET_INFO_LAT) + zlon = bufr_obj.get_data(DATASET_INFO_LON) + np.testing.assert_array_equal(zlat.values, np.concatenate((LAT, LAT), axis=0)) + np.testing.assert_array_equal(zlon.values, np.concatenate((LON, LON), axis=0)) + + @staticmethod + def test_attributes_with_swath_definition(input_file): + """Test correctness of dataset attributes with data loaded with a SwathDefinition (default behaviour).""" + bufr_obj = SeviriL2BufrData(input_file) + z = bufr_obj.get_data(DATASET_INFO) + assert z.attrs['platform_name'] == DATASET_ATTRS['platform_name'] + assert z.attrs['ssp_lon'] == DATASET_ATTRS['ssp_lon'] + assert z.attrs['seg_size'] == DATASET_ATTRS['seg_size'] + + @staticmethod + def test_attributes_with_area_definition(input_file): + """Test correctness of dataset attributes with data loaded with a AreaDefinition.""" + bufr_obj = SeviriL2BufrData(input_file, with_adef=True) + _ = bufr_obj.get_data(DATASET_INFO_LAT) # We need to load the lat/lon data in order to + _ = bufr_obj.get_data(DATASET_INFO_LON) # populate the file handler with these data + z = bufr_obj.get_data(DATASET_INFO) + assert z.attrs['platform_name'] == DATASET_ATTRS['platform_name'] + assert z.attrs['ssp_lon'] == DATASET_ATTRS['ssp_lon'] + assert z.attrs['seg_size'] == DATASET_ATTRS['seg_size'] + + @staticmethod + def test_data_with_swath_definition(input_file): + """Test data loaded with SwathDefinition (default behaviour).""" + bufr_obj = SeviriL2BufrData(input_file) + with pytest.raises(NotImplementedError): + bufr_obj.fh.get_area_def(None) + + # concatenate original test arrays as get_dataset will have read and concatented the data + x1 = np.concatenate((DATA, DATA), axis=0) + z = bufr_obj.get_data(DATASET_INFO) + np.testing.assert_array_equal(z.values, x1) + + def test_data_with_area_definition(self, input_file): + """Test data loaded with AreaDefinition.""" + bufr_obj = SeviriL2BufrData(input_file, with_adef=True) + _ = bufr_obj.get_data(DATASET_INFO_LAT) # We need to load the lat/lon data in order to + _ = bufr_obj.get_data(DATASET_INFO_LON) # populate the file handler with these data + z = bufr_obj.get_data(DATASET_INFO) + + ad = bufr_obj.fh.get_area_def(None) + assert ad == AREA_DEF + data_1d = np.concatenate((DATA, DATA), axis=0) + + # Put BUFR data on 2D grid that the 2D array returned by get_dataset should correspond to + lons_1d, lats_1d = da.compute(bufr_obj.fh.longitude, bufr_obj.fh.latitude) + icol, irow = ad.get_array_indices_from_lonlat(lons_1d, lats_1d) + + data_2d = np.empty(ad.shape) + data_2d[:] = np.nan + data_2d[irow.compressed(), icol.compressed()] = data_1d[~irow.mask] + np.testing.assert_array_equal(z.values, data_2d) + + # Test that the correct AreaDefinition is identified for products with 3 pixel segements + bufr_obj.fh.seg_size = 3 + ad_ext = bufr_obj.fh._construct_area_def(make_dataid(name='dummmy', resolution=9000)) + assert ad_ext == AREA_DEF_EXT + + def test_data_with_rect_lon(self, input_file): + """Test data loaded with AreaDefinition and user defined rectification longitude.""" + bufr_obj = SeviriL2BufrData(input_file, with_adef=True, rect_lon=0.0) + np.testing.assert_equal(bufr_obj.fh.ssp_lon, 0.0) + _ = bufr_obj.get_data(DATASET_INFO_LAT) # We need to load the lat/lon data in order to + _ = bufr_obj.get_data(DATASET_INFO_LON) # populate the file handler with these data + _ = bufr_obj.get_data(DATASET_INFO) # We need to lead the data in order to create the AreaDefinition + + ad = bufr_obj.fh.get_area_def(None) + assert ad == AREA_DEF_FES diff --git a/satpy/tests/reader_tests/test_seviri_l2_grib.py b/satpy/tests/reader_tests/test_seviri_l2_grib.py index 3e7e90a455..837dbcdc06 100644 --- a/satpy/tests/reader_tests/test_seviri_l2_grib.py +++ b/satpy/tests/reader_tests/test_seviri_l2_grib.py @@ -25,9 +25,11 @@ import numpy as np +from satpy.tests.utils import make_dataid + # Dictionary to be used as fake GRIB message FAKE_MESSAGE = { - 'longitudeOfSubSatellitePointInDegrees': 10.0, + 'longitudeOfSubSatellitePointInDegrees': 9.5, 'dataDate': 20191020, 'dataTime': 1745, 'Nx': 1000, @@ -75,12 +77,38 @@ def test_data_reading(self, da_, xr_): filetype_info={} ) - # Checks the correct file open call - mock_file.assert_called_with('test.grib', 'rb') + dataset_id = make_dataid(name='dummmy', resolution=3000) # Checks that the codes_grib_multi_support_on function has been called self.ec_.codes_grib_multi_support_on.assert_called() + # Restarts the id generator and clears the call history + fake_gid_generator = (i for i in FAKE_GID) + self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) + self.ec_.codes_grib_new_from_file.reset_mock() + self.ec_.codes_release.reset_mock() + + # Checks the correct execution of the get_dataset function with a valid parameter_number + valid_dataset = self.reader.get_dataset(dataset_id, {'parameter_number': 30}) + # Checks the correct file open call + mock_file.assert_called_with('test.grib', 'rb') + # Checks that the dataset has been created as a DataArray object + self.assertEqual(valid_dataset._extract_mock_name(), 'xr.DataArray()') + # Checks that codes_release has been called after each codes_grib_new_from_file call + # (except after the last one which has returned a None) + self.assertEqual(self.ec_.codes_grib_new_from_file.call_count, + self.ec_.codes_release.call_count + 1) + + # Restarts the id generator and clears the call history + fake_gid_generator = (i for i in FAKE_GID) + self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) + self.ec_.codes_grib_new_from_file.reset_mock() + self.ec_.codes_release.reset_mock() + + # Checks the correct execution of the get_dataset function with an invalid parameter_number + invalid_dataset = self.reader.get_dataset(dataset_id, {'parameter_number': 50}) + # Checks that the function returns None + self.assertEqual(invalid_dataset, None) # Checks that codes_release has been called after each codes_grib_new_from_file call # (except after the last one which has returned a None) self.assertEqual(self.ec_.codes_grib_new_from_file.call_count, @@ -90,15 +118,15 @@ def test_data_reading(self, da_, xr_): self.assertEqual(REPEAT_CYCLE_DURATION, 15) # Checks the correct execution of the _get_global_attributes and _get_metadata_from_msg functions - global_attributes = self.reader._get_global_attributes() - expected_global_attributes = { + attributes = self.reader._get_attributes() + expected_attributes = { 'orbital_parameters': { - 'projection_longitude': 10. + 'projection_longitude': 9.5 }, 'sensor': 'seviri', 'platform_name': 'Meteosat-11' } - self.assertEqual(global_attributes, expected_global_attributes) + self.assertEqual(attributes, expected_attributes) # Checks the reading of an array from the message self.reader._get_xarray_from_msg(0) @@ -119,16 +147,16 @@ def test_data_reading(self, da_, xr_): 'a': 6400000., 'b': 6300000., 'h': 32000000., - 'ssp_lon': 10., + 'ssp_lon': 9.5, 'nlines': 1000, 'ncols': 1200, - 'a_name': 'geos_seviri', - 'a_desc': 'Calculated area for SEVIRI L2 GRIB product', - 'p_id': 'geos', + 'a_name': 'msg_seviri_rss_3km', + 'a_desc': 'MSG SEVIRI Rapid Scanning Service area definition with 3 km resolution', + 'p_id': '', } self.assertEqual(pdict, expected_pdict) expected_area_dict = { - 'center_point': 500.5, + 'center_point': 500, 'north': 1200, 'east': 1, 'west': 1000, @@ -142,39 +170,12 @@ def test_data_reading(self, da_, xr_): with mock.patch('satpy.readers.seviri_l2_grib.get_area_definition', mock.Mock()) as gad: self.reader.get_area_def(mock.Mock(resolution=400.)) # Asserts that calculate_area_extent has been called with the correct arguments + expected_args = ({'center_point': 500, 'east': 1, 'west': 1000, 'south': 1, 'north': 1200, + 'column_step': 400., 'line_step': 400.},) name, args, kwargs = cae.mock_calls[0] - self.assertEqual(args[0]['resolution'], 400.) + self.assertEqual(args, expected_args) # Asserts that get_area_definition has been called with the correct arguments name, args, kwargs = gad.mock_calls[0] self.assertEqual(args[0], expected_pdict) # The second argument must be the return result of calculate_area_extent self.assertEqual(args[1]._extract_mock_name(), 'calculate_area_extent()') - - # Restarts the id generator and clears the call history - fake_gid_generator = (i for i in FAKE_GID) - self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) - self.ec_.codes_grib_new_from_file.reset_mock() - self.ec_.codes_release.reset_mock() - - # Checks the correct execution of the get_dataset function with a valid parameter_number - valid_dataset = self.reader.get_dataset(None, {'parameter_number': 30}) - # Checks that the dataset has been created as a DataArray object - self.assertEqual(valid_dataset._extract_mock_name(), 'xr.DataArray()') - # Checks that codes_release has been called after each codes_grib_new_from_file call - self.assertEqual(self.ec_.codes_grib_new_from_file.call_count, - self.ec_.codes_release.call_count) - - # Restarts the id generator and clears the call history - fake_gid_generator = (i for i in FAKE_GID) - self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) - self.ec_.codes_grib_new_from_file.reset_mock() - self.ec_.codes_release.reset_mock() - - # Checks the correct execution of the get_dataset function with an invalid parameter_number - invalid_dataset = self.reader.get_dataset(None, {'parameter_number': 50}) - # Checks that the function returns None - self.assertEqual(invalid_dataset, None) - # Checks that codes_release has been called after each codes_grib_new_from_file call - # (except after the last one which has returned a None) - self.assertEqual(self.ec_.codes_grib_new_from_file.call_count, - self.ec_.codes_release.call_count + 1) diff --git a/satpy/tests/reader_tests/test_slstr_l2.py b/satpy/tests/reader_tests/test_slstr_l2.py deleted file mode 100644 index 5330a10e3e..0000000000 --- a/satpy/tests/reader_tests/test_slstr_l2.py +++ /dev/null @@ -1,69 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# Copyright (c) 2018 Satpy developers -# -# This file is part of satpy. -# -# satpy is free software: you can redistribute it and/or modify it under the -# terms of the GNU General Public License as published by the Free Software -# Foundation, either version 3 of the License, or (at your option) any later -# version. -# -# satpy is distributed in the hope that it will be useful, but WITHOUT ANY -# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR -# A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# satpy. If not, see . -"""Module for testing the satpy.readers.slstr_l2 module.""" - -import unittest -from unittest import mock -from unittest.mock import MagicMock, patch - -import xarray as xr - -from satpy.readers.slstr_l2 import SLSTRL2FileHandler - - -class TestSLSTRL2Reader(unittest.TestCase): - """Test Sentinel-3 SST L2 reader.""" - - @mock.patch('xarray.open_dataset') - def test_instantiate(self, mocked_dataset): - """Test initialization of file handlers.""" - filename_info = {} - tmp = MagicMock(start_time='20191120T125002Z', stop_time='20191120T125002Z') - tmp.rename.return_value = tmp - xr.open_dataset.return_value = tmp - SLSTRL2FileHandler('somedir/somefile.nc', filename_info, None) - mocked_dataset.assert_called() - mocked_dataset.reset_mock() - - with patch('tarfile.open') as tf: - tf.return_value.__enter__.return_value = MagicMock(getnames=lambda *a: ["GHRSST-SSTskin.nc"]) - SLSTRL2FileHandler('somedir/somefile.tar', filename_info, None) - mocked_dataset.assert_called() - mocked_dataset.reset_mock() - - @mock.patch('xarray.open_dataset') - def test_get_dataset(self, mocked_dataset): - """Test retrieval of datasets.""" - filename_info = {} - tmp = MagicMock(start_time='20191120T125002Z', stop_time='20191120T125002Z') - tmp.rename.return_value = tmp - xr.open_dataset.return_value = tmp - test = SLSTRL2FileHandler('somedir/somefile.nc', filename_info, None) - test.nc = {'longitude': xr.Dataset(), - 'latitude': xr.Dataset(), - 'sea_surface_temperature': xr.Dataset(), - 'sea_ice_fraction': xr.Dataset(), - } - test.get_dataset('longitude', {'standard_name': 'longitude'}) - test.get_dataset('latitude', {'standard_name': 'latitude'}) - test.get_dataset('sea_surface_temperature', {'standard_name': 'sea_surface_temperature'}) - test.get_dataset('sea_ice_fraction', {'standard_name': 'sea_ice_fraction'}) - with self.assertRaises(KeyError): - test.get_dataset('erroneous dataset', {'standard_name': 'erroneous dataset'}) - mocked_dataset.assert_called() - mocked_dataset.reset_mock() diff --git a/satpy/tests/reader_tests/test_utils.py b/satpy/tests/reader_tests/test_utils.py index 52e65d12f3..a6bf987517 100644 --- a/satpy/tests/reader_tests/test_utils.py +++ b/satpy/tests/reader_tests/test_utils.py @@ -27,8 +27,10 @@ import numpy.testing import pyresample.geometry import xarray as xr +from fsspec.implementations.memory import MemoryFile, MemoryFileSystem from pyproj import CRS +from satpy.readers import FSFile from satpy.readers import utils as hf @@ -299,6 +301,86 @@ def test_unzip_file_pbzip2(self, mock_popen, mock_bz2): new_fname = hf.unzip_file(filename) self.assertIsNone(new_fname) + @mock.patch('bz2.BZ2File') + def test_generic_open_BZ2File(self, bz2_mock): + """Test the generic_open method with bz2 filename input.""" + mock_bz2_open = mock.MagicMock() + mock_bz2_open.read.return_value = b'TEST' + bz2_mock.return_value = mock_bz2_open + + filename = 'tester.DAT.bz2' + with hf.generic_open(filename) as file_object: + data = file_object.read() + assert data == b'TEST' + + assert mock_bz2_open.read.called + + def test_generic_open_FSFile_MemoryFileSystem(self): + """Test the generic_open method with FSFile in MemoryFileSystem.""" + mem_fs = MemoryFileSystem() + mem_file = MemoryFile(fs=mem_fs, path="{}test.DAT".format(mem_fs.root_marker), data=b"TEST") + mem_file.commit() + fsf = FSFile(mem_file) + with hf.generic_open(fsf) as file_object: + data = file_object.read() + assert data == b'TEST' + + @mock.patch('satpy.readers.utils.open') + def test_generic_open_filename(self, open_mock): + """Test the generic_open method with filename (str).""" + mock_fn_open = mock.MagicMock() + mock_fn_open.read.return_value = b'TEST' + open_mock.return_value = mock_fn_open + + filename = "test.DAT" + with hf.generic_open(filename) as file_object: + data = file_object.read() + assert data == b'TEST' + + assert mock_fn_open.read.called + + def test_generic_open_text(self): + """Test the bz2 file unzipping context manager using dummy text data.""" + dummy_text_data = 'Hello' + dummy_text_filename = 'dummy.txt' + with open(dummy_text_filename, 'w') as f: + f.write(dummy_text_data) + + with hf.generic_open(dummy_text_filename, 'r') as f: + read_text_data = f.read() + + assert read_text_data == dummy_text_data + + dummy_text_filename = 'dummy.txt.bz2' + with hf.bz2.open(dummy_text_filename, 'wt') as f: + f.write(dummy_text_data) + + with hf.generic_open(dummy_text_filename, 'rt') as f: + read_text_data = f.read() + + assert read_text_data == dummy_text_data + + def test_generic_open_binary(self): + """Test the bz2 file unzipping context manager using dummy binary data.""" + dummy_binary_data = b'Hello' + dummy_binary_filename = 'dummy.dat' + with open(dummy_binary_filename, 'wb') as f: + f.write(dummy_binary_data) + + with hf.generic_open(dummy_binary_filename, 'rb') as f: + read_binary_data = f.read() + + assert read_binary_data == dummy_binary_data + + dummy_binary_filename = 'dummy.dat.bz2' + with hf.bz2.open(dummy_binary_filename, 'wb') as f: + f.write(dummy_binary_data) + + with hf.generic_open(dummy_binary_filename, 'rb') as f: + read_binary_data = f.read() + + assert read_binary_data == dummy_binary_data + @mock.patch("os.remove") @mock.patch("satpy.readers.utils.unzip_file", return_value='dummy.txt') def test_pro_reading_gets_unzipped_file(self, fake_unzip_file, fake_remove): diff --git a/satpy/tests/reader_tests/test_vii_l1b_nc.py b/satpy/tests/reader_tests/test_vii_l1b_nc.py index 9836168b97..cf33e7872e 100644 --- a/satpy/tests/reader_tests/test_vii_l1b_nc.py +++ b/satpy/tests/reader_tests/test_vii_l1b_nc.py @@ -15,8 +15,12 @@ # # You should have received a copy of the GNU General Public License # along with satpy. If not, see . +"""The vii_l1b_nc reader tests package. + +This version tests the readers for VII test data V2 as per PFS V4A. + +""" -"""The vii_l1b_nc reader tests package.""" import datetime import os @@ -75,9 +79,9 @@ def setUp(self): # Add variables to data/measurement_data group sza = g1_2.createVariable('solar_zenith', np.float32, - dimensions=('num_tie_points_act', 'num_tie_points_alt')) + dimensions=('num_tie_points_alt', 'num_tie_points_act')) sza[:] = 25.0 - delta_lat = g1_2.createVariable('delta_lat', np.float32, dimensions=('num_pixels', 'num_lines')) + delta_lat = g1_2.createVariable('delta_lat', np.float32, dimensions=('num_lines', 'num_pixels')) delta_lat[:] = 1.0 self.reader = ViiL1bNCFileHandler( @@ -116,25 +120,25 @@ def test_calibration_functions(self): angle_factor = 0.4 isi = 2.0 refl = self.reader._calibrate_refl(radiance, angle_factor, isi) - expected_refl = np.array([[0.628318531, 1.256637061, 3.141592654], - [4.398229715, 6.283185307, 12.56637061]]) + expected_refl = np.array([[62.8318531, 125.6637061, 314.1592654], + [439.8229715, 628.3185307, 1256.637061]]) self.assertTrue(np.allclose(refl, expected_refl)) def test_functions(self): """Test the functions.""" # Checks that the _perform_orthorectification function is correctly executed variable = xr.DataArray( - dims=('num_pixels', 'num_lines'), + dims=('num_lines', 'num_pixels'), name='test_name', attrs={ 'key_1': 'value_1', 'key_2': 'value_2' }, - data=da.from_array(np.ones((72, 600))) + data=da.from_array(np.ones((600, 72))) ) orthorect_variable = self.reader._perform_orthorectification(variable, 'data/measurement_data/delta_lat') - expected_values = np.degrees(np.ones((72, 600)) / MEAN_EARTH_RADIUS) + np.ones((72, 600)) + expected_values = np.degrees(np.ones((600, 72)) / MEAN_EARTH_RADIUS) + np.ones((600, 72)) self.assertTrue(np.allclose(orthorect_variable.values, expected_values)) # Checks that the _perform_calibration function is correctly executed in all cases @@ -151,7 +155,7 @@ def test_functions(self): calibrated_variable = self.reader._perform_calibration(variable, {'calibration': 'brightness_temperature', 'chan_thermal_index': 3}) - expected_values = np.ones((72, 600)) * 302007.42728603 + expected_values = np.full((600, 72), 1101.10413712) self.assertTrue(np.allclose(calibrated_variable.values, expected_values)) # reflectance calibration: checks that the return value is correct @@ -159,5 +163,5 @@ def test_functions(self): {'calibration': 'reflectance', 'wavelength': [0.658, 0.668, 0.678], 'chan_solar_index': 2}) - expected_values = np.ones((72, 600)) * 1.733181982 * (0.678 - 0.658) + expected_values = np.full((600, 72), 173.3181982) self.assertTrue(np.allclose(calibrated_variable.values, expected_values)) diff --git a/satpy/tests/reader_tests/test_viirs_compact.py b/satpy/tests/reader_tests/test_viirs_compact.py index 0afa6c9da1..fe8f788a7a 100644 --- a/satpy/tests/reader_tests/test_viirs_compact.py +++ b/satpy/tests/reader_tests/test_viirs_compact.py @@ -25,6 +25,8 @@ import h5py import numpy as np +from satpy.tests.reader_tests.utils import fill_h5 + class TestCompact(unittest.TestCase): """Test class for reading compact viirs format.""" @@ -2419,19 +2421,6 @@ def setUp(self): ) h5f = h5py.File(self.filename, mode="w") - def fill_h5(root, stuff): - for key, val in stuff.items(): - if key in ["value", "attrs"]: - continue - if "value" in val: - root[key] = val["value"] - else: - grp = root.create_group(key) - fill_h5(grp, stuff[key]) - if "attrs" in val: - for attrs, val in val["attrs"].items(): - root[key].attrs[attrs] = val - fill_h5(h5f, fake_dnb) for attr, val in fake_dnb["attrs"].items(): h5f.attrs[attr] = val diff --git a/satpy/tests/reader_tests/test_viirs_l1b.py b/satpy/tests/reader_tests/test_viirs_l1b.py index af6f6d2cef..c2edab2724 100644 --- a/satpy/tests/reader_tests/test_viirs_l1b.py +++ b/satpy/tests/reader_tests/test_viirs_l1b.py @@ -18,7 +18,6 @@ """Module for testing the satpy.readers.viirs_l1b module.""" import os -import unittest from datetime import datetime, timedelta from unittest import mock @@ -38,25 +37,20 @@ DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0) -class FakeNetCDF4FileHandler2(FakeNetCDF4FileHandler): +class FakeNetCDF4FileHandlerDay(FakeNetCDF4FileHandler): """Swap-in NetCDF4 File Handler.""" + M_REFL_BANDS = [f"M{band_num:02d}" for band_num in range(1, 12)] + M_BT_BANDS = [f"M{band_num:02d}" for band_num in range(12, 17)] + M_BANDS = M_REFL_BANDS + M_BT_BANDS + I_REFL_BANDS = [f"I{band_num:02d}" for band_num in range(1, 4)] + I_BT_BANDS = [f"I{band_num:02d}" for band_num in range(4, 6)] + I_BANDS = I_REFL_BANDS + I_BT_BANDS + def get_test_content(self, filename, filename_info, filetype_info): """Mimic reader input file content.""" dt = filename_info.get('start_time', datetime(2016, 1, 1, 12, 0, 0)) file_type = filename[:5].lower() - # num_lines = { - # 'vl1bi': 3248 * 2, - # 'vl1bm': 3248, - # 'vl1bd': 3248, - # }[file_type] - # num_pixels = { - # 'vl1bi': 6400, - # 'vl1bm': 3200, - # 'vl1bd': 4064, - # }[file_type] - # num_scans = 203 - # num_luts = 65536 num_lines = DEFAULT_FILE_SHAPE[0] num_pixels = DEFAULT_FILE_SHAPE[1] num_scans = 5 @@ -69,18 +63,15 @@ def get_test_content(self, filename, filename_info, filetype_info): '/attr/time_coverage_start': dt.strftime('%Y-%m-%dT%H:%M:%S.000Z'), '/attr/time_coverage_end': (dt + timedelta(minutes=6)).strftime('%Y-%m-%dT%H:%M:%S.000Z'), '/attr/orbit_number': 26384, - '/attr/instrument': 'viirs', + '/attr/instrument': 'VIIRS', '/attr/platform': 'Suomi-NPP', } self._fill_contents_with_default_data(file_content, file_type) - self._set_dataset_specific_metadata(file_content) - convert_file_content_to_data_array(file_content) return file_content - @staticmethod - def _fill_contents_with_default_data(file_content, file_type): + def _fill_contents_with_default_data(self, file_content, file_type): """Fill file contents with default data.""" if file_type.startswith('vgeo'): file_content['/attr/OrbitNumber'] = file_content.pop('/attr/orbit_number') @@ -94,28 +85,11 @@ def _fill_contents_with_default_data(file_content, file_type): file_content['geolocation_data/lunar_zenith'] = DEFAULT_LON_DATA file_content['geolocation_data/lunar_azimuth'] = DEFAULT_LON_DATA elif file_type == 'vl1bm': - file_content['observation_data/M01'] = DEFAULT_FILE_DATA - file_content['observation_data/M02'] = DEFAULT_FILE_DATA - file_content['observation_data/M03'] = DEFAULT_FILE_DATA - file_content['observation_data/M04'] = DEFAULT_FILE_DATA - file_content['observation_data/M05'] = DEFAULT_FILE_DATA - file_content['observation_data/M06'] = DEFAULT_FILE_DATA - file_content['observation_data/M07'] = DEFAULT_FILE_DATA - file_content['observation_data/M08'] = DEFAULT_FILE_DATA - file_content['observation_data/M09'] = DEFAULT_FILE_DATA - file_content['observation_data/M10'] = DEFAULT_FILE_DATA - file_content['observation_data/M11'] = DEFAULT_FILE_DATA - file_content['observation_data/M12'] = DEFAULT_FILE_DATA - file_content['observation_data/M13'] = DEFAULT_FILE_DATA - file_content['observation_data/M14'] = DEFAULT_FILE_DATA - file_content['observation_data/M15'] = DEFAULT_FILE_DATA - file_content['observation_data/M16'] = DEFAULT_FILE_DATA + for m_band in self.M_BANDS: + file_content[f'observation_data/{m_band}'] = DEFAULT_FILE_DATA elif file_type == 'vl1bi': - file_content['observation_data/I01'] = DEFAULT_FILE_DATA - file_content['observation_data/I02'] = DEFAULT_FILE_DATA - file_content['observation_data/I03'] = DEFAULT_FILE_DATA - file_content['observation_data/I04'] = DEFAULT_FILE_DATA - file_content['observation_data/I05'] = DEFAULT_FILE_DATA + for i_band in self.I_BANDS: + file_content[f'observation_data/{i_band}'] = DEFAULT_FILE_DATA elif file_type == 'vl1bd': file_content['observation_data/DNB_observations'] = DEFAULT_FILE_DATA file_content['observation_data/DNB_observations/attr/units'] = 'Watts/cm^2/steradian' @@ -152,22 +126,36 @@ def _set_dataset_specific_metadata(file_content): file_content[k + '/attr/add_offset'] = 0.1 -class TestVIIRSL1BReader(unittest.TestCase): +class FakeNetCDF4FileHandlerNight(FakeNetCDF4FileHandlerDay): + """Same as the day file handler, but some day-only bands are missing. + + This matches what happens in real world files where reflectance bands + are removed in night data to save space. + + """ + + M_BANDS = FakeNetCDF4FileHandlerDay.M_BT_BANDS + I_BANDS = FakeNetCDF4FileHandlerDay.I_BT_BANDS + + +class TestVIIRSL1BReaderDay: """Test VIIRS L1B Reader.""" yaml_file = "viirs_l1b.yaml" + fake_cls = FakeNetCDF4FileHandlerDay + has_reflectance_bands = True - def setUp(self): + def setup_method(self): """Wrap NetCDF4 file handler with our own fake handler.""" from satpy._config import config_search_paths from satpy.readers.viirs_l1b import VIIRSL1BFileHandler self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file)) # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library - self.p = mock.patch.object(VIIRSL1BFileHandler, '__bases__', (FakeNetCDF4FileHandler2,)) + self.p = mock.patch.object(VIIRSL1BFileHandler, '__bases__', (self.fake_cls,)) self.fake_handler = self.p.start() self.p.is_local = True - def tearDown(self): + def teardown_method(self): """Stop wrapping the NetCDF4 file handler.""" self.p.stop() @@ -178,10 +166,24 @@ def test_init(self): loadables = r.select_files_from_pathnames([ 'VL1BM_snpp_d20161130_t012400_c20161130054822.nc', ]) - self.assertEqual(len(loadables), 1) + assert len(loadables) == 1 r.create_filehandlers(loadables) # make sure we have some files - self.assertTrue(r.file_handlers) + assert r.file_handlers + + def test_available_datasets_m_bands(self): + """Test available datasets for M band files.""" + from satpy.readers import load_reader + r = load_reader(self.reader_configs) + loadables = r.select_files_from_pathnames([ + 'VL1BM_snpp_d20161130_t012400_c20161130054822.nc', + 'VGEOM_snpp_d20161130_t012400_c20161130054822.nc', + ]) + r.create_filehandlers(loadables) + avail_names = r.available_dataset_names + angles = {"satellite_azimuth_angle", "satellite_zenith_angle", "solar_azimuth_angle", "solar_zenith_angle"} + geo = {"m_lon", "m_lat"} + assert set(avail_names) == set(self.fake_cls.M_BANDS) | angles | geo def test_load_every_m_band_bt(self): """Test loading all M band brightness temperatures.""" @@ -197,13 +199,14 @@ def test_load_every_m_band_bt(self): 'M14', 'M15', 'M16']) - self.assertEqual(len(datasets), 5) + assert len(datasets) == 5 for v in datasets.values(): - self.assertEqual(v.attrs['calibration'], 'brightness_temperature') - self.assertEqual(v.attrs['units'], 'K') - self.assertEqual(v.attrs['rows_per_scan'], 2) - self.assertEqual(v.attrs['area'].lons.attrs['rows_per_scan'], 2) - self.assertEqual(v.attrs['area'].lats.attrs['rows_per_scan'], 2) + assert v.attrs['calibration'] == 'brightness_temperature' + assert v.attrs['units'] == 'K' + assert v.attrs['rows_per_scan'] == 2 + assert v.attrs['area'].lons.attrs['rows_per_scan'] == 2 + assert v.attrs['area'].lats.attrs['rows_per_scan'] == 2 + assert v.attrs['sensor'] == "viirs" def test_load_every_m_band_refl(self): """Test loading all M band reflectances.""" @@ -225,13 +228,14 @@ def test_load_every_m_band_refl(self): 'M09', 'M10', 'M11']) - self.assertEqual(len(datasets), 11) + assert len(datasets) == (11 if self.has_reflectance_bands else 0) for v in datasets.values(): - self.assertEqual(v.attrs['calibration'], 'reflectance') - self.assertEqual(v.attrs['units'], '%') - self.assertEqual(v.attrs['rows_per_scan'], 2) - self.assertEqual(v.attrs['area'].lons.attrs['rows_per_scan'], 2) - self.assertEqual(v.attrs['area'].lats.attrs['rows_per_scan'], 2) + assert v.attrs['calibration'] == 'reflectance' + assert v.attrs['units'] == '%' + assert v.attrs['rows_per_scan'] == 2 + assert v.attrs['area'].lons.attrs['rows_per_scan'] == 2 + assert v.attrs['area'].lats.attrs['rows_per_scan'] == 2 + assert v.attrs['sensor'] == "viirs" def test_load_every_m_band_rad(self): """Test loading all M bands as radiances.""" @@ -259,13 +263,14 @@ def test_load_every_m_band_rad(self): make_dataid(name='M14', calibration='radiance'), make_dataid(name='M15', calibration='radiance'), make_dataid(name='M16', calibration='radiance')]) - self.assertEqual(len(datasets), 16) + assert len(datasets) == (16 if self.has_reflectance_bands else 5) for v in datasets.values(): - self.assertEqual(v.attrs['calibration'], 'radiance') - self.assertEqual(v.attrs['units'], 'W m-2 um-1 sr-1') - self.assertEqual(v.attrs['rows_per_scan'], 2) - self.assertEqual(v.attrs['area'].lons.attrs['rows_per_scan'], 2) - self.assertEqual(v.attrs['area'].lats.attrs['rows_per_scan'], 2) + assert v.attrs['calibration'] == 'radiance' + assert v.attrs['units'] == 'W m-2 um-1 sr-1' + assert v.attrs['rows_per_scan'] == 2 + assert v.attrs['area'].lons.attrs['rows_per_scan'] == 2 + assert v.attrs['area'].lats.attrs['rows_per_scan'] == 2 + assert v.attrs['sensor'] == "viirs" def test_load_dnb_radiance(self): """Test loading the main DNB dataset.""" @@ -277,13 +282,14 @@ def test_load_dnb_radiance(self): ]) r.create_filehandlers(loadables) datasets = r.load(['DNB']) - self.assertEqual(len(datasets), 1) + assert len(datasets) == 1 for v in datasets.values(): - self.assertEqual(v.attrs['calibration'], 'radiance') - self.assertEqual(v.attrs['units'], 'W m-2 sr-1') - self.assertEqual(v.attrs['rows_per_scan'], 2) - self.assertEqual(v.attrs['area'].lons.attrs['rows_per_scan'], 2) - self.assertEqual(v.attrs['area'].lats.attrs['rows_per_scan'], 2) + assert v.attrs['calibration'] == 'radiance' + assert v.attrs['units'] == 'W m-2 sr-1' + assert v.attrs['rows_per_scan'] == 2 + assert v.attrs['area'].lons.attrs['rows_per_scan'] == 2 + assert v.attrs['area'].lats.attrs['rows_per_scan'] == 2 + assert v.attrs['sensor'] == "viirs" def test_load_dnb_angles(self): """Test loading all DNB angle datasets.""" @@ -301,9 +307,21 @@ def test_load_dnb_angles(self): 'dnb_lunar_zenith_angle', 'dnb_lunar_azimuth_angle', ]) - self.assertEqual(len(datasets), 6) + assert len(datasets) == 6 for v in datasets.values(): - self.assertEqual(v.attrs['units'], 'degrees') - self.assertEqual(v.attrs['rows_per_scan'], 2) - self.assertEqual(v.attrs['area'].lons.attrs['rows_per_scan'], 2) - self.assertEqual(v.attrs['area'].lats.attrs['rows_per_scan'], 2) + assert v.attrs['units'] == 'degrees' + assert v.attrs['rows_per_scan'] == 2 + assert v.attrs['area'].lons.attrs['rows_per_scan'] == 2 + assert v.attrs['area'].lats.attrs['rows_per_scan'] == 2 + assert v.attrs['sensor'] == "viirs" + + +class TestVIIRSL1BReaderDayNight(TestVIIRSL1BReaderDay): + """Test VIIRS L1b with night data. + + Night data files don't have reflectance bands in them. + + """ + + fake_cls = FakeNetCDF4FileHandlerNight + has_reflectance_bands = False diff --git a/satpy/tests/reader_tests/utils.py b/satpy/tests/reader_tests/utils.py new file mode 100644 index 0000000000..dd5b09c86a --- /dev/null +++ b/satpy/tests/reader_tests/utils.py @@ -0,0 +1,45 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2022 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Utilities for reader tests.""" + + +def default_attr_processor(root, attr): + """Do not change the attribute.""" + return attr + + +def fill_h5(root, contents, attr_processor=default_attr_processor): + """Fill hdf5 file with the given contents. + + Args: + root: hdf5 file rott + contents: Contents to be written into the file + attr_processor: A method for modifying attributes before they are + written to the file. + """ + for key, val in contents.items(): + if key in ["value", "attrs"]: + continue + if "value" in val: + root[key] = val["value"] + else: + grp = root.create_group(key) + fill_h5(grp, contents[key]) + if "attrs" in val: + for attr_name, attr_val in val["attrs"].items(): + root[key].attrs[attr_name] = attr_processor(root, attr_val) diff --git a/satpy/tests/test_compat.py b/satpy/tests/test_compat.py new file mode 100644 index 0000000000..f084f88e53 --- /dev/null +++ b/satpy/tests/test_compat.py @@ -0,0 +1,48 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2022 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Test backports and compatibility fixes.""" + +import gc + +from satpy._compat import CachedPropertyBackport + + +class ClassWithCachedProperty: # noqa + def __init__(self, x): # noqa + self.x = x + + @CachedPropertyBackport + def property(self): # noqa + return 2 * self.x + + +def test_cached_property_backport(): + """Test cached property backport.""" + c = ClassWithCachedProperty(1) + assert c.property == 2 + + +def test_cached_property_backport_releases_memory(): + """Test that cached property backport releases memory.""" + c1 = ClassWithCachedProperty(2) + del c1 + instances = [ + obj for obj in gc.get_objects() + if isinstance(obj, ClassWithCachedProperty) + ] + assert len(instances) == 0 diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 0f14056466..8aa0b32c45 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -27,6 +27,7 @@ import numpy as np import pytest import xarray as xr +from pyresample import AreaDefinition class TestMatchDataArrays(unittest.TestCase): @@ -135,6 +136,8 @@ def setUp(self): 'start_time': datetime(2018, 1, 1, 18), 'modifiers': tuple(), 'resolution': 1000, + 'calibration': 'reflectance', + 'units': '%', 'name': 'test_vis'} ds1 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64), attrs=attrs, dims=('y', 'x'), @@ -229,6 +232,13 @@ def test_self_sharpened_basic(self): np.testing.assert_allclose(res[1], np.array([[3, 3], [3, 3]], dtype=np.float64)) np.testing.assert_allclose(res[2], np.array([[4, 4], [4, 4]], dtype=np.float64)) + def test_no_units(self): + """Test that the computed RGB has no units attribute.""" + from satpy.composites import RatioSharpenedRGB + comp = RatioSharpenedRGB(name='true_color') + res = comp((self.ds1, self.ds2, self.ds3)) + assert "units" not in res.attrs + class TestDifferenceCompositor(unittest.TestCase): """Test case for the difference compositor.""" @@ -269,8 +279,8 @@ def setUp(self): def test_basic_diff(self): """Test that a basic difference composite works.""" from satpy.composites import DifferenceCompositor - comp = DifferenceCompositor(name='diff') - res = comp((self.ds1, self.ds2), standard_name='temperature_difference') + comp = DifferenceCompositor(name='diff', standard_name='temperature_difference') + res = comp((self.ds1, self.ds2)) np.testing.assert_allclose(res.values, -2) assert res.attrs.get('standard_name') == 'temperature_difference' @@ -315,12 +325,12 @@ def setUp(self): self.sza = xr.DataArray(sza, dims=('y', 'x')) # fake area - my_area = mock.MagicMock() - lons = np.array([[-95., -94.], [-93., -92.]]) - lons = da.from_array(lons, lons.shape) - lats = np.array([[40., 41.], [42., 43.]]) - lats = da.from_array(lats, lats.shape) - my_area.get_lonlats.return_value = (lons, lats) + my_area = AreaDefinition( + "test", "", "", + "+proj=longlat", + 2, 2, + (-95.0, 40.0, -92.0, 43.0), + ) self.data_a.attrs['area'] = my_area self.data_b.attrs['area'] = my_area # not used except to check that it matches the data arrays @@ -448,16 +458,25 @@ def test_compositor(self): np.testing.assert_allclose(res.data, 0.0, atol=1e-9) -class TestSandwichCompositor(unittest.TestCase): +class TestSandwichCompositor: """Test sandwich compositor.""" + # Test RGB and RGBA + @pytest.mark.parametrize( + "input_shape,bands", + [ + ((3, 2, 2), ['R', 'G', 'B']), + ((4, 2, 2), ['R', 'G', 'B', 'A']) + ] + ) @mock.patch('satpy.composites.enhance2dataset') - def test_compositor(self, e2d): + def test_compositor(self, e2d, input_shape, bands): """Test luminance sharpening compositor.""" from satpy.composites import SandwichCompositor - rgb_arr = da.from_array(np.random.random((3, 2, 2)), chunks=2) - rgb = xr.DataArray(rgb_arr, dims=['bands', 'y', 'x']) + rgb_arr = da.from_array(np.random.random(input_shape), chunks=2) + rgb = xr.DataArray(rgb_arr, dims=['bands', 'y', 'x'], + coords={'bands': bands}) lum_arr = da.from_array(100 * np.random.random((2, 2)), chunks=2) lum = xr.DataArray(lum_arr, dims=['y', 'x']) @@ -467,9 +486,15 @@ def test_compositor(self, e2d): res = comp([lum, rgb]) - for i in range(3): - np.testing.assert_allclose(res.data[i, :, :], - rgb_arr[i, :, :] * lum_arr / 100.) + for band in rgb: + if band.bands != 'A': + # Check compositor has modified this band + np.testing.assert_allclose(res.loc[band.bands].to_numpy(), + band.to_numpy() * lum_arr / 100.) + else: + # Check Alpha band remains intact + np.testing.assert_allclose(res.loc[band.bands].to_numpy(), + band.to_numpy()) # make sure the compositor doesn't modify the input data np.testing.assert_allclose(lum.values, lum_arr.compute()) @@ -1164,15 +1189,72 @@ def test_multiple_sensors(self): assert res.attrs['sensor'] == {'abi', 'glm'} -class TestMaskingCompositor(unittest.TestCase): +class TestMaskingCompositor: """Test case for the simple masking compositor.""" + @pytest.fixture + def conditions_v1(self): + """Masking conditions with string values.""" + return [{'method': 'equal', + 'value': 'Cloud-free_land', + 'transparency': 100}, + {'method': 'equal', + 'value': 'Cloud-free_sea', + 'transparency': 50}] + + @pytest.fixture + def conditions_v2(self): + """Masking conditions with numerical values.""" + return [{'method': 'equal', + 'value': 1, + 'transparency': 100}, + {'method': 'equal', + 'value': 2, + 'transparency': 50}] + + @pytest.fixture + def test_data(self): + """Test data to use with masking compositors.""" + return xr.DataArray(da.random.random((3, 3)), dims=['y', 'x']) + + @pytest.fixture + def test_ct_data(self): + """Test 2D CT data array.""" + flag_meanings = ['Cloud-free_land', 'Cloud-free_sea'] + flag_values = da.array([1, 2]) + ct_data = da.array([[1, 2, 2], + [2, 1, 2], + [2, 2, 1]]) + ct_data = xr.DataArray(ct_data, dims=['y', 'x']) + ct_data.attrs['flag_meanings'] = flag_meanings + ct_data.attrs['flag_values'] = flag_values + return ct_data + + @pytest.fixture + def test_ct_data_v3(self, test_ct_data): + """Set ct data to NaN where it originally is 1.""" + return test_ct_data.where(test_ct_data == 1) + + @pytest.fixture + def reference_data(self, test_data, test_ct_data): + """Get reference data to use in masking compositor tests.""" + # The data are set to NaN where ct is `1` + return test_data.where(test_ct_data > 1) + + @pytest.fixture + def reference_alpha(self): + """Get reference alpha to use in masking compositor tests.""" + ref_alpha = da.array([[0, 0.5, 0.5], + [0.5, 0, 0.5], + [0.5, 0.5, 0]]) + return xr.DataArray(ref_alpha, dims=['y', 'x']) + def test_init(self): """Test the initializiation of compositor.""" from satpy.composites import MaskingCompositor # No transparency or conditions given raises ValueError - with self.assertRaises(ValueError): + with pytest.raises(ValueError): comp = MaskingCompositor("name") # transparency defined @@ -1209,102 +1291,94 @@ def test_get_flag_value(self): assert _get_flag_value(mask, 'Cloud-free_land') == 1 assert _get_flag_value(mask, 'Cloud-free_sea') == 2 - def test_call(self): - """Test call the compositor.""" + @pytest.mark.parametrize("mode", ["LA", "RGBA"]) + def test_call_numerical_transparency_data( + self, conditions_v1, test_data, test_ct_data, reference_data, + reference_alpha, mode): + """Test call the compositor with numerical transparency data. + + Use parameterisation to test different image modes. + """ from satpy.composites import MaskingCompositor from satpy.tests.utils import CustomScheduler - flag_meanings = ['Cloud-free_land', 'Cloud-free_sea'] - flag_meanings_str = 'Cloud-free_land Cloud-free_sea' - flag_values = da.array([1, 2]) - conditions_v1 = [{'method': 'equal', - 'value': 'Cloud-free_land', - 'transparency': 100}, - {'method': 'equal', - 'value': 'Cloud-free_sea', - 'transparency': 50}] - conditions_v2 = [{'method': 'equal', - 'value': 1, - 'transparency': 100}, - {'method': 'equal', - 'value': 2, - 'transparency': 50}] - conditions_v3 = [{'method': 'isnan', - 'transparency': 100}] - conditions_v4 = [{'method': 'absolute_import', - 'transparency': 'satpy.resample'}] - - # 2D data array - data = xr.DataArray(da.random.random((3, 3)), dims=['y', 'x']) - - # 2D CT data array - ct_data = da.array([[1, 2, 2], - [2, 1, 2], - [2, 2, 1]]) - ct_data = xr.DataArray(ct_data, dims=['y', 'x']) - ct_data.attrs['flag_meanings'] = flag_meanings - ct_data.attrs['flag_values'] = flag_values - - reference_alpha = da.array([[0, 0.5, 0.5], - [0.5, 0, 0.5], - [0.5, 0.5, 0]]) - reference_alpha = xr.DataArray(reference_alpha, dims=['y', 'x']) - # The data are set to NaN where ct is `1` - reference_data = data.where(ct_data > 1) - - reference_alpha_v3 = da.array([[1., 0., 0.], - [0., 1., 0.], - [0., 0., 1.]]) - reference_alpha_v3 = xr.DataArray(reference_alpha_v3, dims=['y', 'x']) - # The data are set to NaN where ct is NaN - reference_data_v3 = data.where(ct_data == 1) - # Test with numerical transparency data with dask.config.set(scheduler=CustomScheduler(max_computes=0)): - comp = MaskingCompositor("name", conditions=conditions_v1) - res = comp([data, ct_data]) - self.assertEqual(res.mode, 'LA') - np.testing.assert_allclose(res.sel(bands='L'), reference_data) + comp = MaskingCompositor("name", conditions=conditions_v1, + mode=mode) + res = comp([test_data, test_ct_data]) + assert res.mode == mode + for m in mode.rstrip("A"): + np.testing.assert_allclose(res.sel(bands=m), reference_data) np.testing.assert_allclose(res.sel(bands='A'), reference_alpha) - # Test with named fields + def test_call_named_fields(self, conditions_v2, test_data, test_ct_data, + reference_data, reference_alpha): + """Test with named fields.""" + from satpy.composites import MaskingCompositor + from satpy.tests.utils import CustomScheduler + with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = MaskingCompositor("name", conditions=conditions_v2) - res = comp([data, ct_data]) - self.assertEqual(res.mode, 'LA') + res = comp([test_data, test_ct_data]) + assert res.mode == "LA" np.testing.assert_allclose(res.sel(bands='L'), reference_data) np.testing.assert_allclose(res.sel(bands='A'), reference_alpha) - # Test with named fields which are as a string in the mask attributes - ct_data.attrs['flag_meanings'] = flag_meanings_str + def test_call_named_fields_string( + self, conditions_v2, test_data, test_ct_data, reference_data, + reference_alpha): + """Test with named fields which are as a string in the mask attributes.""" + from satpy.composites import MaskingCompositor + from satpy.tests.utils import CustomScheduler + + flag_meanings_str = 'Cloud-free_land Cloud-free_sea' + test_ct_data.attrs['flag_meanings'] = flag_meanings_str with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = MaskingCompositor("name", conditions=conditions_v2) - res = comp([data, ct_data]) - self.assertEqual(res.mode, 'LA') + res = comp([test_data, test_ct_data]) + assert res.mode == "LA" np.testing.assert_allclose(res.sel(bands='L'), reference_data) np.testing.assert_allclose(res.sel(bands='A'), reference_alpha) - # Test "isnan" as method - # Set ct data to NaN where it originally is 1 - ct_data_v3 = ct_data.where(ct_data == 1) + def test_method_isnan(self, test_data, + test_ct_data, test_ct_data_v3): + """Test "isnan" as method.""" + from satpy.composites import MaskingCompositor + from satpy.tests.utils import CustomScheduler + + conditions_v3 = [{'method': 'isnan', 'transparency': 100}] + + # The data are set to NaN where ct is NaN + reference_data_v3 = test_data.where(test_ct_data == 1) + reference_alpha_v3 = da.array([[1., 0., 0.], + [0., 1., 0.], + [0., 0., 1.]]) + reference_alpha_v3 = xr.DataArray(reference_alpha_v3, dims=['y', 'x']) with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = MaskingCompositor("name", conditions=conditions_v3) - res = comp([data, ct_data_v3]) - self.assertEqual(res.mode, 'LA') + res = comp([test_data, test_ct_data_v3]) + assert res.mode == "LA" np.testing.assert_allclose(res.sel(bands='L'), reference_data_v3) np.testing.assert_allclose(res.sel(bands='A'), reference_alpha_v3) - # Test "absolute_import" as method + def test_method_absolute_import(self, test_data, test_ct_data_v3): + """Test "absolute_import" as method.""" + from satpy.composites import MaskingCompositor + from satpy.tests.utils import CustomScheduler + + conditions_v4 = [{'method': 'absolute_import', 'transparency': 'satpy.resample'}] # This should raise AttributeError with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = MaskingCompositor("name", conditions=conditions_v4) - try: - res = comp([data, ct_data_v3]) - raise ValueError("Tried to use 'np.absolute_import'") - except AttributeError: - pass + with pytest.raises(AttributeError): + comp([test_data, test_ct_data_v3]) + + def test_rgb_dataset(self, conditions_v1, test_ct_data, reference_alpha): + """Test RGB dataset.""" + from satpy.composites import MaskingCompositor + from satpy.tests.utils import CustomScheduler - # Test RGB dataset # 3D data array data = xr.DataArray(da.random.random((3, 3, 3)), dims=['bands', 'y', 'x'], @@ -1314,17 +1388,20 @@ def test_call(self): with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = MaskingCompositor("name", conditions=conditions_v1) - res = comp([data, ct_data]) - self.assertEqual(res.mode, 'RGBA') + res = comp([data, test_ct_data]) + assert res.mode == "RGBA" np.testing.assert_allclose(res.sel(bands='R'), - data.sel(bands='R').where(ct_data > 1)) + data.sel(bands='R').where(test_ct_data > 1)) np.testing.assert_allclose(res.sel(bands='G'), - data.sel(bands='G').where(ct_data > 1)) + data.sel(bands='G').where(test_ct_data > 1)) np.testing.assert_allclose(res.sel(bands='B'), - data.sel(bands='B').where(ct_data > 1)) + data.sel(bands='B').where(test_ct_data > 1)) np.testing.assert_allclose(res.sel(bands='A'), reference_alpha) - # Test RGBA dataset + def test_rgba_dataset(self, conditions_v2, test_ct_data, reference_alpha): + """Test RGBA dataset.""" + from satpy.composites import MaskingCompositor + from satpy.tests.utils import CustomScheduler data = xr.DataArray(da.random.random((4, 3, 3)), dims=['bands', 'y', 'x'], coords={'bands': ['R', 'G', 'B', 'A'], @@ -1333,26 +1410,36 @@ def test_call(self): with dask.config.set(scheduler=CustomScheduler(max_computes=0)): comp = MaskingCompositor("name", conditions=conditions_v2) - res = comp([data, ct_data]) - self.assertEqual(res.mode, 'RGBA') + res = comp([data, test_ct_data]) + assert res.mode == "RGBA" np.testing.assert_allclose(res.sel(bands='R'), - data.sel(bands='R').where(ct_data > 1)) + data.sel(bands='R').where(test_ct_data > 1)) np.testing.assert_allclose(res.sel(bands='G'), - data.sel(bands='G').where(ct_data > 1)) + data.sel(bands='G').where(test_ct_data > 1)) np.testing.assert_allclose(res.sel(bands='B'), - data.sel(bands='B').where(ct_data > 1)) + data.sel(bands='B').where(test_ct_data > 1)) # The compositor should drop the original alpha band np.testing.assert_allclose(res.sel(bands='A'), reference_alpha) - # incorrect method + def test_incorrect_method(self, test_data, test_ct_data): + """Test incorrect method.""" + from satpy.composites import MaskingCompositor conditions = [{'method': 'foo', 'value': 0, 'transparency': 100}] comp = MaskingCompositor("name", conditions=conditions) - with self.assertRaises(AttributeError): - res = comp([data, ct_data]) + with pytest.raises(AttributeError): + comp([test_data, test_ct_data]) + # Test with too few projectables. + with pytest.raises(ValueError): + comp([test_data]) + + def test_incorrect_mode(self, conditions_v1): + """Test initiating with unsupported mode.""" + from satpy.composites import MaskingCompositor - # too few projectables - with self.assertRaises(ValueError): - res = comp([data]) + # Incorrect mode raises ValueError + with pytest.raises(ValueError): + MaskingCompositor("name", conditions=conditions_v1, + mode="YCbCrA") class TestNaturalEnhCompositor(unittest.TestCase): diff --git a/satpy/tests/test_crefl_utils.py b/satpy/tests/test_crefl_utils.py index 96a91b578f..1e5da8cd9a 100644 --- a/satpy/tests/test_crefl_utils.py +++ b/satpy/tests/test_crefl_utils.py @@ -26,11 +26,13 @@ def test_get_atm_variables_abi(self): """Test getting atmospheric variables for ABI.""" import numpy as np - from satpy.modifiers._crefl_utils import get_atm_variables_abi - sphalb, rhoray, TtotraytH2O, tOG = get_atm_variables_abi(0.17690244, 6.123234e-17, 530.61332168, 405., - 21.71342113, 77.14385758, 56.214566960, - 0.0043149700000000004, 0.0037296, - 0.014107995000000002, 0.052349) + from satpy.modifiers._crefl_utils import _ABIAtmosphereVariables + atm_vars = _ABIAtmosphereVariables( + 21.71342113, 77.14385758, 56.214566960, + 0.17690244, 6.123234e-17, 530.61332168, 405., + 0.0043149700000000004, 0.0037296, 0.014107995000000002, 0.052349, + ) + sphalb, rhoray, TtotraytH2O, tOG = atm_vars() self.assertLess(abs(np.array(sphalb) - 0.045213532544630494), 1e-10) self.assertLess(abs(rhoray - 2.2030281148621356), 1e-10) self.assertLess(abs(TtotraytH2O - 0.30309880915889087), 1e-10) diff --git a/satpy/tests/test_dataset.py b/satpy/tests/test_dataset.py index 09da5c02a6..3cbc937b3b 100644 --- a/satpy/tests/test_dataset.py +++ b/satpy/tests/test_dataset.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2015-2019 Satpy developers +# Copyright (c) 2015-2021 Satpy developers # # This file is part of satpy. # @@ -142,9 +142,9 @@ def test_combine_arrays(self): from satpy.dataset.metadata import combine_metadata dts = [ - {"quality": (arange(25) % 2).reshape(5, 5).astype("?")}, - {"quality": (arange(1, 26) % 3).reshape(5, 5).astype("?")}, - {"quality": ones((5, 5,), "?")}, + {"quality": (arange(25) % 2).reshape(5, 5).astype("?")}, + {"quality": (arange(1, 26) % 3).reshape(5, 5).astype("?")}, + {"quality": ones((5, 5,), "?")}, ] assert "quality" not in combine_metadata(*dts) dts2 = [{"quality": DataArray(d["quality"])} for d in dts] @@ -154,22 +154,22 @@ def test_combine_arrays(self): assert "quality" not in combine_metadata(*dts3) # check cases with repeated arrays dts4 = [ - {"quality": dts[0]["quality"]}, - {"quality": dts[0]["quality"]}, - ] + {"quality": dts[0]["quality"]}, + {"quality": dts[0]["quality"]}, + ] assert "quality" in combine_metadata(*dts4) dts5 = [ - {"quality": dts3[0]["quality"]}, - {"quality": dts3[0]["quality"]}, - ] + {"quality": dts3[0]["quality"]}, + {"quality": dts3[0]["quality"]}, + ] assert "quality" in combine_metadata(*dts5) # check with other types dts6 = [ - DataArray(arange(5), attrs=dts[0]), - DataArray(arange(5), attrs=dts[0]), - DataArray(arange(5), attrs=dts[1]), - object() - ] + DataArray(arange(5), attrs=dts[0]), + DataArray(arange(5), attrs=dts[0]), + DataArray(arange(5), attrs=dts[1]), + object() + ] assert "quality" not in combine_metadata(*dts6) def test_combine_lists_identical(self): @@ -418,20 +418,20 @@ def test_dataid(): # Check inequality default_id_keys_config = {'name': None, 'wavelength': { - 'type': WavelengthRange, + 'type': WavelengthRange, }, 'resolution': None, 'calibration': { - 'enum': [ - 'reflectance', - 'brightness_temperature', - 'radiance', - 'counts' - ] + 'enum': [ + 'reflectance', + 'brightness_temperature', + 'radiance', + 'counts' + ] }, 'modifiers': { - 'default': ModifierTuple(), - 'type': ModifierTuple, + 'default': ModifierTuple(), + 'type': ModifierTuple, }, } assert DataID(default_id_keys_config, wavelength=10) != DataID(default_id_keys_config, name="VIS006") @@ -502,6 +502,23 @@ def test_dataid_pickle(): assert did == pickle.loads(pickle.dumps(did)) +def test_dataid_elements_picklable(): + """Test individual elements of DataID can be pickled. + + In some cases, like in the base reader classes, the elements of a DataID + are extracted and stored in a separate dictionary. This means that the + internal/fancy pickle handling of DataID does not play a part. + + """ + import pickle + + from satpy.tests.utils import make_dataid + did = make_dataid(name='hi', wavelength=(10, 11, 12), resolution=1000, calibration='radiance') + for value in did.values(): + pickled_value = pickle.loads(pickle.dumps(value)) + assert value == pickled_value + + class TestDataQuery: """Test case for data queries.""" @@ -671,6 +688,156 @@ def test_seviri_hrv_has_priority_over_vis008(self): assert res[0].name == "HRV" +def test_frequency_double_side_band_class_method_convert(): + """Test the frequency double side band object: test the class method convert.""" + from satpy.readers.aapp_mhs_amsub_l1c import FrequencyDoubleSideBand + + frq_dsb = FrequencyDoubleSideBand(183, 7, 2) + + res = frq_dsb.convert(185) + assert res == 185 + + res = frq_dsb.convert({'central': 185, 'side': 7, 'bandwidth': 2}) + assert res == FrequencyDoubleSideBand(185, 7, 2) + + +def test_frequency_double_side_band_channel_str(): + """Test the frequency double side band object: test the band description.""" + from satpy.readers.aapp_mhs_amsub_l1c import FrequencyDoubleSideBand + + frq_dsb1 = FrequencyDoubleSideBand(183, 7, 2) + frq_dsb2 = FrequencyDoubleSideBand(183000, 7000, 2000, 'MHz') + + assert str(frq_dsb1) == "183 GHz (7_2 GHz)" + assert str(frq_dsb2) == "183000 MHz (7000_2000 MHz)" + + +def test_frequency_double_side_band_channel_equality(): + """Test the frequency double side band object: check if two bands are 'equal'.""" + from satpy.readers.aapp_mhs_amsub_l1c import FrequencyDoubleSideBand + + frq_dsb = FrequencyDoubleSideBand(183, 7, 2) + assert frq_dsb is not None + assert 183 != frq_dsb + assert 190 == frq_dsb + assert 176 == frq_dsb + assert 175.5 == frq_dsb + + assert frq_dsb != FrequencyDoubleSideBand(183, 6.5, 3) + + frq_dsb = None + assert FrequencyDoubleSideBand(183, 7, 2) != frq_dsb + + assert frq_dsb < FrequencyDoubleSideBand(183, 7, 2) + assert FrequencyDoubleSideBand(182, 7, 2) < FrequencyDoubleSideBand(183, 7, 2) + assert FrequencyDoubleSideBand(184, 7, 2) > FrequencyDoubleSideBand(183, 7, 2) + + +def test_frequency_double_side_band_channel_distances(): + """Test the frequency double side band object: get the distance between two bands.""" + from satpy.readers.aapp_mhs_amsub_l1c import FrequencyDoubleSideBand + + frq_dsb = FrequencyDoubleSideBand(183, 7, 2) + mydist = frq_dsb.distance(175.5) + assert mydist == 0.5 + + mydist = frq_dsb.distance(190.5) + assert mydist == 0.5 + + np.testing.assert_almost_equal(frq_dsb.distance(175.6), 0.4) + np.testing.assert_almost_equal(frq_dsb.distance(190.1), 0.1) + + mydist = frq_dsb.distance(185) + assert mydist == np.inf + + mydist = frq_dsb.distance((183, 7.0, 2)) + assert mydist == 0 + + mydist = frq_dsb.distance((183, 7.0, 1)) + assert mydist == 0 + + mydist = frq_dsb.distance(FrequencyDoubleSideBand(183, 7.0, 2)) + assert mydist == 0 + + +def test_frequency_double_side_band_channel_containment(): + """Test the frequency double side band object: check if one band contains another.""" + from satpy.readers.aapp_mhs_amsub_l1c import FrequencyDoubleSideBand + + frq_dsb = FrequencyDoubleSideBand(183, 7, 2) + + assert 175.5 in frq_dsb + assert frq_dsb in FrequencyDoubleSideBand(183, 6.5, 3) + assert frq_dsb not in FrequencyDoubleSideBand(183, 4, 2) + + with pytest.raises(NotImplementedError): + assert frq_dsb in FrequencyDoubleSideBand(183, 6.5, 3, 'MHz') + + frq_dsb = None + assert (frq_dsb in FrequencyDoubleSideBand(183, 3, 2)) is False + + assert '183' not in FrequencyDoubleSideBand(183, 3, 2) + + +def test_frequency_range_class_method_convert(): + """Test the frequency range object: test the class method convert.""" + from satpy.readers.aapp_mhs_amsub_l1c import FrequencyRange + + frq_dsb = FrequencyRange(89, 2) + + res = frq_dsb.convert(89) + assert res == 89 + + res = frq_dsb.convert({'central': 89, 'bandwidth': 2}) + assert res == FrequencyRange(89, 2) + + +def test_frequency_range_channel_equality(): + """Test the frequency range object: check if two bands are 'equal'.""" + from satpy.readers.aapp_mhs_amsub_l1c import FrequencyRange + + frqr = FrequencyRange(2, 1) + assert frqr is not None + assert 1.7 == frqr + assert 1.2 != frqr + assert frqr == (2, 1) + + assert frqr == (2, 1, 'GHz') + + +def test_frequency_range_channel_containment(): + """Test the frequency range object: channel containment.""" + from satpy.readers.aapp_mhs_amsub_l1c import FrequencyRange + + frqr = FrequencyRange(2, 1) + assert 1.7 in frqr + assert 2.8 not in frqr + + with pytest.raises(NotImplementedError): + assert frqr in FrequencyRange(89, 2, 'MHz') + + frqr = None + assert (frqr in FrequencyRange(89, 2)) is False + + assert '89' not in FrequencyRange(89, 2) + + +def test_frequency_range_channel_distances(): + """Test the frequency range object: derive distances between bands.""" + from satpy.readers.aapp_mhs_amsub_l1c import FrequencyRange + + frqr = FrequencyRange(190.0, 2) + + mydist = frqr.distance(FrequencyRange(190, 2)) + assert mydist == 0 + mydist = frqr.distance(FrequencyRange(189.5, 2)) + assert mydist == np.inf + mydist = frqr.distance(189.5) + assert mydist == 0.5 + mydist = frqr.distance(188.0) + assert mydist == np.inf + + def test_wavelength_range(): """Test the wavelength range object.""" from satpy.dataset.dataid import WavelengthRange @@ -700,6 +867,9 @@ def test_wavelength_range(): assert str(wr) == "2 µm (1-3 µm)" assert str(wr2) == "2 nm (1-3 nm)" + wr = WavelengthRange(10.5, 11.5, 12.5) + np.testing.assert_almost_equal(wr.distance(11.1), 0.4) + def test_wavelength_range_cf_roundtrip(): """Test the wavelength range object roundtrip to cf.""" diff --git a/satpy/tests/test_file_handlers.py b/satpy/tests/test_file_handlers.py index fc60d4d4e5..c4b76a919e 100644 --- a/satpy/tests/test_file_handlers.py +++ b/satpy/tests/test_file_handlers.py @@ -18,6 +18,7 @@ """test file handler baseclass.""" import unittest +from datetime import datetime, timedelta from unittest import mock import numpy as np @@ -157,6 +158,28 @@ def test_combine_orbital_parameters(self): # Empty self.fh.combine_info([{}]) + def test_combine_time_parameters(self): + """Combine times in 'time_parameters.""" + time_params1 = { + 'nominal_start_time': datetime(2020, 1, 1, 12, 0, 0), + 'nominal_end_time': datetime(2020, 1, 1, 12, 2, 30), + 'observation_start_time': datetime(2020, 1, 1, 12, 0, 2, 23821), + 'observation_end_time': datetime(2020, 1, 1, 12, 2, 23, 12348), + } + time_params2 = {} + time_shift = timedelta(seconds=1.5) + for key, value in time_params1.items(): + time_params2[key] = value + time_shift + res = self.fh.combine_info([ + {'time_parameters': time_params1}, + {'time_parameters': time_params2} + ]) + res_time_params = res['time_parameters'] + assert res_time_params['nominal_start_time'] == datetime(2020, 1, 1, 12, 0, 0) + assert res_time_params['nominal_end_time'] == datetime(2020, 1, 1, 12, 2, 31, 500000) + assert res_time_params['observation_start_time'] == datetime(2020, 1, 1, 12, 0, 2, 23821) + assert res_time_params['observation_end_time'] == datetime(2020, 1, 1, 12, 2, 24, 512348) + def test_file_is_kept_intact(self): """Test that the file object passed (string, path, or other) is kept intact.""" open_file = mock.MagicMock() diff --git a/satpy/tests/test_modifiers.py b/satpy/tests/test_modifiers.py index 244e13cfe1..d8598a5b05 100644 --- a/satpy/tests/test_modifiers.py +++ b/satpy/tests/test_modifiers.py @@ -16,96 +16,152 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """Tests for modifiers in modifiers/__init__.py.""" - +import contextlib import unittest +import warnings +from copy import deepcopy from datetime import datetime, timedelta from glob import glob +from typing import Optional, Union from unittest import mock import dask.array as da import numpy as np import pytest import xarray as xr -from pyresample.geometry import AreaDefinition +from pyresample.geometry import AreaDefinition, StackedAreaDefinition +from pytest_lazyfixture import lazy_fixture import satpy +from satpy.utils import PerformanceWarning + + +def _sunz_area_def(): + """Get fake area for testing sunz generation.""" + area = AreaDefinition('test', 'test', 'test', + {'proj': 'merc'}, 2, 2, + (-2000, -2000, 2000, 2000)) + return area + + +def _sunz_bigger_area_def(): + """Get area that is twice the size of 'sunz_area_def'.""" + bigger_area = AreaDefinition('test', 'test', 'test', + {'proj': 'merc'}, 4, 4, + (-2000, -2000, 2000, 2000)) + return bigger_area + + +def _sunz_stacked_area_def(): + """Get fake stacked area for testing sunz generation.""" + area1 = AreaDefinition('test', 'test', 'test', + {'proj': 'merc'}, 2, 1, + (-2000, 0, 2000, 2000)) + area2 = AreaDefinition('test', 'test', 'test', + {'proj': 'merc'}, 2, 1, + (-2000, -2000, 2000, 0)) + return StackedAreaDefinition(area1, area2) + + +def _shared_sunz_attrs(area_def): + attrs = {'area': area_def, + 'start_time': datetime(2018, 1, 1, 18), + 'modifiers': tuple(), + 'name': 'test_vis'} + return attrs + + +def _get_ds1(attrs): + ds1 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64), + attrs=attrs, dims=('y', 'x'), + coords={'y': [0, 1], 'x': [0, 1]}) + return ds1 + + +@pytest.fixture(scope="session") +def sunz_ds1(): + """Generate fake dataset for sunz tests.""" + attrs = _shared_sunz_attrs(_sunz_area_def()) + return _get_ds1(attrs) + + +@pytest.fixture(scope="session") +def sunz_ds1_stacked(): + """Generate fake dataset for sunz tests.""" + attrs = _shared_sunz_attrs(_sunz_stacked_area_def()) + return _get_ds1(attrs) + + +@pytest.fixture(scope="session") +def sunz_ds2(): + """Generate larger fake dataset for sunz tests.""" + attrs = _shared_sunz_attrs(_sunz_bigger_area_def()) + ds2 = xr.DataArray(da.ones((4, 4), chunks=2, dtype=np.float64), + attrs=attrs, dims=('y', 'x'), + coords={'y': [0, 0.5, 1, 1.5], 'x': [0, 0.5, 1, 1.5]}) + return ds2 + + +@pytest.fixture(scope="session") +def sunz_sza(): + """Generate fake solar zenith angle data array for testing.""" + sza = xr.DataArray( + np.rad2deg(np.arccos(da.from_array([[0.0149581333, 0.0146694376], [0.0150812684, 0.0147925727]], + chunks=2))), + attrs={'area': _sunz_area_def()}, + dims=('y', 'x'), + coords={'y': [0, 1], 'x': [0, 1]}, + ) + return sza -class TestSunZenithCorrector(unittest.TestCase): +class TestSunZenithCorrector: """Test case for the zenith corrector.""" - def setUp(self): - """Create test data.""" - from pyresample.geometry import AreaDefinition - area = AreaDefinition('test', 'test', 'test', - {'proj': 'merc'}, 2, 2, - (-2000, -2000, 2000, 2000)) - bigger_area = AreaDefinition('test', 'test', 'test', - {'proj': 'merc'}, 4, 4, - (-2000, -2000, 2000, 2000)) - attrs = {'area': area, - 'start_time': datetime(2018, 1, 1, 18), - 'modifiers': tuple(), - 'name': 'test_vis'} - ds1 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64), - attrs=attrs, dims=('y', 'x'), - coords={'y': [0, 1], 'x': [0, 1]}) - self.ds1 = ds1 - ds2 = xr.DataArray(da.ones((4, 4), chunks=2, dtype=np.float64), - attrs=attrs, dims=('y', 'x'), - coords={'y': [0, 0.5, 1, 1.5], 'x': [0, 0.5, 1, 1.5]}) - ds2.attrs['area'] = bigger_area - self.ds2 = ds2 - self.sza = xr.DataArray( - np.rad2deg(np.arccos(da.from_array([[0.0149581333, 0.0146694376], [0.0150812684, 0.0147925727]], - chunks=2))), - attrs={'area': area}, - dims=('y', 'x'), - coords={'y': [0, 1], 'x': [0, 1]}, - ) - - def test_basic_default_not_provided(self): + def test_basic_default_not_provided(self, sunz_ds1): """Test default limits when SZA isn't provided.""" from satpy.modifiers.geometry import SunZenithCorrector comp = SunZenithCorrector(name='sza_test', modifiers=tuple()) - res = comp((self.ds1,), test_attr='test') + res = comp((sunz_ds1,), test_attr='test') np.testing.assert_allclose(res.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]])) - self.assertIn('y', res.coords) - self.assertIn('x', res.coords) - ds1 = self.ds1.copy().drop_vars(('y', 'x')) + assert 'y' in res.coords + assert 'x' in res.coords + ds1 = sunz_ds1.copy().drop_vars(('y', 'x')) res = comp((ds1,), test_attr='test') np.testing.assert_allclose(res.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]])) - self.assertNotIn('y', res.coords) - self.assertNotIn('x', res.coords) + assert 'y' not in res.coords + assert 'x' not in res.coords - def test_basic_lims_not_provided(self): + def test_basic_lims_not_provided(self, sunz_ds1): """Test custom limits when SZA isn't provided.""" from satpy.modifiers.geometry import SunZenithCorrector comp = SunZenithCorrector(name='sza_test', modifiers=tuple(), correction_limit=90) - res = comp((self.ds1,), test_attr='test') + res = comp((sunz_ds1,), test_attr='test') np.testing.assert_allclose(res.values, np.array([[66.853262, 68.168939], [66.30742, 67.601493]])) - def test_basic_default_provided(self): + @pytest.mark.parametrize("data_arr", [lazy_fixture("sunz_ds1"), lazy_fixture("sunz_ds1_stacked")]) + def test_basic_default_provided(self, data_arr, sunz_sza): """Test default limits when SZA is provided.""" from satpy.modifiers.geometry import SunZenithCorrector comp = SunZenithCorrector(name='sza_test', modifiers=tuple()) - res = comp((self.ds1, self.sza), test_attr='test') + res = comp((data_arr, sunz_sza), test_attr='test') np.testing.assert_allclose(res.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]])) - def test_basic_lims_provided(self): + @pytest.mark.parametrize("data_arr", [lazy_fixture("sunz_ds1"), lazy_fixture("sunz_ds1_stacked")]) + def test_basic_lims_provided(self, data_arr, sunz_sza): """Test custom limits when SZA is provided.""" from satpy.modifiers.geometry import SunZenithCorrector comp = SunZenithCorrector(name='sza_test', modifiers=tuple(), correction_limit=90) - res = comp((self.ds1, self.sza), test_attr='test') + res = comp((data_arr, sunz_sza), test_attr='test') np.testing.assert_allclose(res.values, np.array([[66.853262, 68.168939], [66.30742, 67.601493]])) - def test_imcompatible_areas(self): + def test_imcompatible_areas(self, sunz_ds2, sunz_sza): """Test sunz correction on incompatible areas.""" from satpy.composites import IncompatibleAreas from satpy.modifiers.geometry import SunZenithCorrector comp = SunZenithCorrector(name='sza_test', modifiers=tuple(), correction_limit=90) with pytest.raises(IncompatibleAreas): - comp((self.ds2, self.sza), test_attr='test') + comp((sunz_ds2, sunz_sza), test_attr='test') class TestNIRReflectance(unittest.TestCase): @@ -380,29 +436,74 @@ def test_call(self): res.compute() -def _get_angle_test_data(): - orb_params = { - "satellite_nominal_altitude": 12345678, - "satellite_nominal_longitude": 10.0, - "satellite_nominal_latitude": 0.0, - } +def _angle_cache_area_def(): area = AreaDefinition( "test", "", "", {"proj": "merc"}, 5, 5, (-2500, -2500, 2500, 2500), ) + return area + + +def _angle_cache_stacked_area_def(): + area1 = AreaDefinition( + "test", "", "", + {"proj": "merc"}, + 5, 2, + (2500, 500, 7500, 2500), + ) + area2 = AreaDefinition( + "test", "", "", + {"proj": "merc"}, + 5, 3, + (2500, -2500, 7500, 500), + ) + return StackedAreaDefinition(area1, area2) + + +def _get_angle_test_data(area_def: Optional[Union[AreaDefinition, StackedAreaDefinition]] = None, + chunks: Optional[Union[int, tuple]] = 2, + shape: tuple = (5, 5), + dims: tuple = None, + ) -> xr.DataArray: + if area_def is None: + area_def = _angle_cache_area_def() + orb_params = { + "satellite_nominal_altitude": 12345678, + "satellite_nominal_longitude": 10.0, + "satellite_nominal_latitude": 0.0, + } stime = datetime(2020, 1, 1, 12, 0, 0) - data = da.zeros((5, 5), chunks=2) + data = da.zeros(shape, chunks=chunks) vis = xr.DataArray(data, + dims=dims, attrs={ - 'area': area, + 'area': area_def, 'start_time': stime, 'orbital_parameters': orb_params, }) return vis +def _get_stacked_angle_test_data(): + return _get_angle_test_data(area_def=_angle_cache_stacked_area_def(), + chunks=(5, (2, 2, 1))) + + +def _get_angle_test_data_rgb(): + return _get_angle_test_data(shape=(5, 5, 3), chunks=((2, 2, 1), (2, 2, 1), (1, 1, 1)), + dims=("y", "x", "bands")) + + +def _get_angle_test_data_rgb_nodims(): + return _get_angle_test_data(shape=(3, 5, 5), chunks=((1, 1, 1), (2, 2, 1), (2, 2, 1))) + + +def _get_angle_test_data_odd_chunks(): + return _get_angle_test_data(chunks=((2, 1, 2), (1, 1, 2, 1))) + + def _similar_sat_pos_datetime(orig_data, lon_offset=0.04): # change data slightly new_data = orig_data.copy() @@ -416,13 +517,43 @@ def _diff_sat_pos_datetime(orig_data): return _similar_sat_pos_datetime(orig_data, lon_offset=0.05) +def _glob_reversed(pat): + """Behave like glob but force results to be in the wrong order.""" + return sorted(glob(pat), reverse=True) + + +@contextlib.contextmanager +def _mock_glob_if(mock_glob): + if mock_glob: + with mock.patch("satpy.modifiers.angles.glob", _glob_reversed): + yield + else: + yield + + +def _assert_allclose_if(expect_equal, arr1, arr2): + if not expect_equal: + pytest.raises(AssertionError, np.testing.assert_allclose, arr1, arr2) + else: + np.testing.assert_allclose(arr1, arr2) + + class TestAngleGeneration: """Test the angle generation utility functions.""" - def test_get_angles(self): + @pytest.mark.parametrize( + ("input_func", "exp_calls"), + [ + (_get_angle_test_data, 9), + (_get_stacked_angle_test_data, 3), + (_get_angle_test_data_rgb, 9), + (_get_angle_test_data_rgb_nodims, 9), + ], + ) + def test_get_angles(self, input_func, exp_calls): """Test sun and satellite angle calculation.""" from satpy.modifiers.angles import get_angles - data = _get_angle_test_data() + data = input_func() from pyorbital.orbital import get_observer_look with mock.patch("satpy.modifiers.angles.get_observer_look", wraps=get_observer_look) as gol: @@ -431,12 +562,56 @@ def test_get_angles(self): da.compute(angles) # get_observer_look should have been called once per array chunk - assert gol.call_count == data.data.blocks.size + assert gol.call_count == exp_calls # Check arguments of get_orbserver_look() call, especially the altitude # unit conversion from meters to kilometers args = gol.call_args[0] assert args[:4] == (10.0, 0.0, 12345.678, data.attrs["start_time"]) + @pytest.mark.parametrize("forced_preference", ["actual", "nadir"]) + def test_get_angles_satpos_preference(self, forced_preference): + """Test that 'actual' satellite position is used for generating sensor angles.""" + from satpy.modifiers.angles import get_angles + + input_data1 = _get_angle_test_data() + # add additional satellite position metadata + input_data1.attrs["orbital_parameters"]["nadir_longitude"] = 9.0 + input_data1.attrs["orbital_parameters"]["nadir_latitude"] = 0.01 + input_data1.attrs["orbital_parameters"]["satellite_actual_longitude"] = 9.5 + input_data1.attrs["orbital_parameters"]["satellite_actual_latitude"] = 0.005 + input_data1.attrs["orbital_parameters"]["satellite_actual_altitude"] = 12345679 + input_data2 = input_data1.copy(deep=True) + input_data2.attrs = deepcopy(input_data1.attrs) + input_data2.attrs["orbital_parameters"]["nadir_longitude"] = 9.1 + input_data2.attrs["orbital_parameters"]["nadir_latitude"] = 0.02 + input_data2.attrs["orbital_parameters"]["satellite_actual_longitude"] = 9.5 + input_data2.attrs["orbital_parameters"]["satellite_actual_latitude"] = 0.005 + input_data2.attrs["orbital_parameters"]["satellite_actual_altitude"] = 12345679 + + from pyorbital.orbital import get_observer_look + with mock.patch("satpy.modifiers.angles.get_observer_look", wraps=get_observer_look) as gol, \ + satpy.config.set(sensor_angles_position_preference=forced_preference): + angles1 = get_angles(input_data1) + da.compute(angles1) + angles2 = get_angles(input_data2) + da.compute(angles2) + + # get_observer_look should have been called once per array chunk + assert gol.call_count == input_data1.data.blocks.size * 2 + if forced_preference == "actual": + exp_call = mock.call(9.5, 0.005, 12345.679, input_data1.attrs["start_time"], mock.ANY, mock.ANY, 0) + all_same_calls = [exp_call] * gol.call_count + gol.assert_has_calls(all_same_calls) + # the dask arrays should have the same name to prove they are the same computation + for angle_arr1, angle_arr2 in zip(angles1, angles2): + assert angle_arr1.data.name == angle_arr2.data.name + else: + # nadir 1 + gol.assert_any_call(9.0, 0.01, 12345.679, input_data1.attrs["start_time"], mock.ANY, mock.ANY, 0) + # nadir 2 + gol.assert_any_call(9.1, 0.02, 12345.679, input_data1.attrs["start_time"], mock.ANY, mock.ANY, 0) + + @pytest.mark.parametrize("force_bad_glob", [False, True]) @pytest.mark.parametrize( ("input2_func", "exp_equal_sun", "exp_num_zarr"), [ @@ -445,54 +620,110 @@ def test_get_angles(self): (_diff_sat_pos_datetime, False, 6), ] ) - def test_cache_get_angles(self, input2_func, exp_equal_sun, exp_num_zarr, tmpdir): + @pytest.mark.parametrize( + ("input_func", "num_normalized_chunks", "exp_zarr_chunks"), + [ + (_get_angle_test_data, 9, ((2, 2, 1), (2, 2, 1))), + (_get_stacked_angle_test_data, 3, ((5,), (2, 2, 1))), + (_get_angle_test_data_odd_chunks, 9, ((2, 1, 2), (1, 1, 2, 1))), + (_get_angle_test_data_rgb, 9, ((2, 2, 1), (2, 2, 1))), + (_get_angle_test_data_rgb_nodims, 9, ((2, 2, 1), (2, 2, 1))), + ]) + def test_cache_get_angles( + self, + input_func, num_normalized_chunks, exp_zarr_chunks, + input2_func, exp_equal_sun, exp_num_zarr, + force_bad_glob, tmp_path): """Test get_angles when caching is enabled.""" - from satpy.modifiers.angles import ( - STATIC_EARTH_INERTIAL_DATETIME, - _get_sensor_angles_from_sat_pos, - _get_valid_lonlats, - get_angles, - ) + from satpy.modifiers.angles import STATIC_EARTH_INERTIAL_DATETIME, get_angles # Patch methods - data = _get_angle_test_data() + data = input_func() additional_cache = exp_num_zarr > 4 # Compute angles from pyorbital.orbital import get_observer_look with mock.patch("satpy.modifiers.angles.get_observer_look", wraps=get_observer_look) as gol, \ - satpy.config.set(cache_lonlats=True, cache_sensor_angles=True, cache_dir=str(tmpdir)): + satpy.config.set(cache_lonlats=True, cache_sensor_angles=True, cache_dir=str(tmp_path)), \ + warnings.catch_warnings(record=True) as caught_warnings: res = get_angles(data) - assert all(isinstance(x, xr.DataArray) for x in res) + self._check_cached_result(res, exp_zarr_chunks) # call again, should be cached new_data = input2_func(data) - res2 = get_angles(new_data) - assert all(isinstance(x, xr.DataArray) for x in res2) - res, res2 = da.compute(res, res2) - for r1, r2 in zip(res[:2], res2[:2]): - if additional_cache: - pytest.raises(AssertionError, np.testing.assert_allclose, r1, r2) - else: - np.testing.assert_allclose(r1, r2) - - for r1, r2 in zip(res[2:], res2[2:]): - if exp_equal_sun: - np.testing.assert_allclose(r1, r2) - else: - pytest.raises(AssertionError, np.testing.assert_allclose, r1, r2) - - zarr_dirs = glob(str(tmpdir / "*.zarr")) - assert len(zarr_dirs) == exp_num_zarr # two for lon/lat, one for sata, one for satz - - _get_sensor_angles_from_sat_pos.cache_clear() - _get_valid_lonlats.cache_clear() - zarr_dirs = glob(str(tmpdir / "*.zarr")) - assert len(zarr_dirs) == 0 - - assert gol.call_count == data.data.blocks.size * (int(additional_cache) + 1) + with _mock_glob_if(force_bad_glob): + res2 = get_angles(new_data) + self._check_cached_result(res2, exp_zarr_chunks) + + res_numpy, res2_numpy = da.compute(res, res2) + for r1, r2 in zip(res_numpy[:2], res2_numpy[:2]): + _assert_allclose_if(not additional_cache, r1, r2) + for r1, r2 in zip(res_numpy[2:], res2_numpy[2:]): + _assert_allclose_if(exp_equal_sun, r1, r2) + + self._check_cache_and_clear(tmp_path, exp_num_zarr) + + if "odd_chunks" in input_func.__name__: + assert any(w.category is PerformanceWarning for w in caught_warnings) + else: + assert not any(w.category is PerformanceWarning for w in caught_warnings) + assert gol.call_count == num_normalized_chunks * (int(additional_cache) + 1) args = gol.call_args_list[0][0] assert args[:4] == (10.0, 0.0, 12345.678, STATIC_EARTH_INERTIAL_DATETIME) exp_sat_lon = 10.1 if additional_cache else 10.0 args = gol.call_args_list[-1][0] assert args[:4] == (exp_sat_lon, 0.0, 12345.678, STATIC_EARTH_INERTIAL_DATETIME) + + @staticmethod + def _check_cached_result(results, exp_zarr_chunks): + assert all(isinstance(x, xr.DataArray) for x in results) + # output chunks should be consistent + for angle_data_arr in results: + assert angle_data_arr.chunks == exp_zarr_chunks + + @staticmethod + def _check_cache_and_clear(tmp_path, exp_num_zarr): + from satpy.modifiers.angles import _get_sensor_angles_from_sat_pos, _get_valid_lonlats + zarr_dirs = glob(str(tmp_path / "*.zarr")) + assert len(zarr_dirs) == exp_num_zarr # two for lon/lat, one for sata, one for satz + + _get_valid_lonlats.cache_clear() + _get_sensor_angles_from_sat_pos.cache_clear() + zarr_dirs = glob(str(tmp_path / "*.zarr")) + assert len(zarr_dirs) == 0 + + def test_cached_no_chunks_fails(self, tmp_path): + """Test that trying to pass non-dask arrays and no chunks fails.""" + from satpy.modifiers.angles import _sanitize_args_with_chunks, cache_to_zarr_if + + @cache_to_zarr_if("cache_lonlats", sanitize_args_func=_sanitize_args_with_chunks) + def _fake_func(data, tuple_arg, chunks): + return da.from_array(data) + + data = list(range(5)) + with pytest.raises(RuntimeError), \ + satpy.config.set(cache_lonlats=True, cache_dir=str(tmp_path)): + _fake_func(data, (1, 2, 3), 5) + + def test_cached_result_numpy_fails(self, tmp_path): + """Test that trying to cache with non-dask arrays fails.""" + from satpy.modifiers.angles import _sanitize_args_with_chunks, cache_to_zarr_if + + @cache_to_zarr_if("cache_lonlats", sanitize_args_func=_sanitize_args_with_chunks) + def _fake_func(shape, chunks): + return np.zeros(shape) + + with pytest.raises(ValueError), \ + satpy.config.set(cache_lonlats=True, cache_dir=str(tmp_path)): + _fake_func((5, 5), ((5,), (5,))) + + def test_no_cache_dir_fails(self, tmp_path): + """Test that 'cache_dir' not being set fails.""" + from satpy.modifiers.angles import _get_sensor_angles_from_sat_pos, get_angles + data = _get_angle_test_data() + with pytest.raises(RuntimeError), \ + satpy.config.set(cache_lonlats=True, cache_sensor_angles=True, cache_dir=None): + get_angles(data) + with pytest.raises(RuntimeError), \ + satpy.config.set(cache_lonlats=True, cache_sensor_angles=True, cache_dir=None): + _get_sensor_angles_from_sat_pos.cache_clear() diff --git a/satpy/tests/test_multiscene.py b/satpy/tests/test_multiscene.py index 0efe727f25..1d14d43298 100644 --- a/satpy/tests/test_multiscene.py +++ b/satpy/tests/test_multiscene.py @@ -25,7 +25,9 @@ from unittest import mock import pytest +import xarray as xr +from satpy import DataQuery from satpy.dataset.dataid import DataID, ModifierTuple, WavelengthRange DEFAULT_SHAPE = (5, 10) @@ -211,80 +213,80 @@ def test_from_files(self): time_threshold=30) assert len(mscn.scenes) == 12 - def test_group(self): - """Test group.""" - from satpy import MultiScene, Scene - - ds1 = _create_test_dataset(name='ds1') - ds2 = _create_test_dataset(name='ds2') - ds3 = _create_test_dataset(name='ds3') - ds4 = _create_test_dataset(name='ds4') - scene1 = Scene() - scene1['ds1'] = ds1 - scene1['ds2'] = ds2 - scene2 = Scene() - scene2['ds3'] = ds3 - scene2['ds4'] = ds4 - - multi_scene = MultiScene([scene1, scene2]) - groups = {make_dataid(name='odd', wavelength=(1, 2, 3)): ['ds1', 'ds3'], - make_dataid(name='even', wavelength=(2, 3, 4)): ['ds2', 'ds4']} - multi_scene.group(groups) - - self.assertSetEqual(multi_scene.shared_dataset_ids, set(groups.keys())) - def test_add_group_aliases(self): - """Test adding group aliases.""" - import types - - import numpy as np - import xarray as xr +class TestMultiSceneGrouping: + """Test dataset grouping in MultiScene.""" + @pytest.fixture + def scene1(self): + """Create first test scene.""" from satpy import Scene - from satpy.multiscene import add_group_aliases + scene = Scene() + dsid1 = make_dataid( + name="ds1", + resolution=123, + wavelength=(1, 2, 3), + polarization="H" + ) + scene[dsid1] = _create_test_dataset(name='ds1') + dsid2 = make_dataid( + name="ds2", + resolution=456, + wavelength=(4, 5, 6), + polarization="V" + ) + scene[dsid2] = _create_test_dataset(name='ds2') + return scene - # Define test scenes - ds_id1 = make_dataid(name='ds1', wavelength=(10.7, 10.8, 10.9)) - ds_id2 = make_dataid(name='ds2', wavelength=(1.9, 2.0, 2.1)) - ds_id3 = make_dataid(name='ds3', wavelength=(10.8, 10.9, 11.0)) - ds_id31 = make_dataid(name='ds31', polarization='H') + @pytest.fixture + def scene2(self): + """Create second test scene.""" + from satpy import Scene + scene = Scene() + dsid1 = make_dataid( + name="ds3", + resolution=123.1, + wavelength=(1.1, 2.1, 3.1), + polarization="H" + ) + scene[dsid1] = _create_test_dataset(name='ds3') + dsid2 = make_dataid( + name="ds4", + resolution=456.1, + wavelength=(4.1, 5.1, 6.1), + polarization="V" + ) + scene[dsid2] = _create_test_dataset(name='ds4') + return scene - scene1 = Scene() - scene1[ds_id1] = xr.DataArray([1]) - scene2 = Scene() - scene2[ds_id2] = xr.DataArray([2]) - scene3 = Scene() - scene3[ds_id3] = xr.DataArray([3]) - scene3[ds_id31] = xr.DataArray([4]) - scenes = [scene1, scene2, scene3] - - # Define groups - g1 = make_dataid(name='g1', wavelength=(10, 11, 12)) - g2 = make_dataid(name='g2', wavelength=(1, 2, 3), polarization='V') - groups = {g1: ['ds1', 'ds3'], g2: ['ds2']} - - # Test adding aliases - with_aliases = add_group_aliases(iter(scenes), groups) - self.assertIsInstance(with_aliases, types.GeneratorType) - with_aliases = list(with_aliases) - self.assertSetEqual(set(with_aliases[0].keys()), {g1, ds_id1}) - self.assertSetEqual(set(with_aliases[1].keys()), {g2, ds_id2}) - self.assertSetEqual(set(with_aliases[2].keys()), {g1, ds_id3, ds_id31}) - - np.testing.assert_array_equal(with_aliases[0]['g1'].values, [1]) - np.testing.assert_array_equal(with_aliases[0]['ds1'].values, [1]) - np.testing.assert_array_equal(with_aliases[1]['g2'].values, [2]) - np.testing.assert_array_equal(with_aliases[1]['ds2'].values, [2]) - np.testing.assert_array_equal(with_aliases[2]['g1'].values, [3]) - np.testing.assert_array_equal(with_aliases[2]['ds3'].values, [3]) - np.testing.assert_array_equal(with_aliases[2]['ds31'].values, [4]) - - # Make sure that modifying the result doesn't modify the original - self.assertNotIn(g1, scene1) - - # Adding an alias for multiple datasets in one scene should fail - gen = add_group_aliases([scene3], {g1: ['ds3', 'ds31']}) - self.assertRaises(ValueError, list, gen) + @pytest.fixture + def multi_scene(self, scene1, scene2): + """Create small multi scene for testing.""" + from satpy import MultiScene + return MultiScene([scene1, scene2]) + + @pytest.fixture + def groups(self): + """Get group definitions for the MultiScene.""" + return { + DataQuery(name='odd'): ['ds1', 'ds3'], + DataQuery(name='even'): ['ds2', 'ds4'] + } + + def test_multi_scene_grouping(self, multi_scene, groups, scene1): + """Test grouping a MultiScene.""" + multi_scene.group(groups) + shared_ids_exp = {make_dataid(name="odd"), make_dataid(name="even")} + assert multi_scene.shared_dataset_ids == shared_ids_exp + assert DataQuery(name='odd') not in scene1 + xr.testing.assert_allclose(multi_scene.scenes[0]["ds1"], scene1["ds1"]) + + def test_fails_to_add_multiple_datasets_from_the_same_scene_to_a_group(self, multi_scene): + """Test that multiple datasets from the same scene in one group fails.""" + groups = {DataQuery(name='mygroup'): ['ds1', 'ds2']} + multi_scene.group(groups) + with pytest.raises(ValueError): + next(multi_scene.scenes) class TestMultiSceneSave(unittest.TestCase): diff --git a/satpy/tests/test_resample.py b/satpy/tests/test_resample.py index 3e5b0035eb..42301cedf8 100644 --- a/satpy/tests/test_resample.py +++ b/satpy/tests/test_resample.py @@ -371,22 +371,22 @@ def test_expand_reduce(self): from satpy.resample import NativeResampler d_arr = da.zeros((6, 20), chunks=4) - new_data = NativeResampler.expand_reduce(d_arr, {0: 2., 1: 2.}) + new_data = NativeResampler._expand_reduce(d_arr, {0: 2., 1: 2.}) self.assertEqual(new_data.shape, (12, 40)) - new_data = NativeResampler.expand_reduce(d_arr, {0: .5, 1: .5}) + new_data = NativeResampler._expand_reduce(d_arr, {0: .5, 1: .5}) self.assertEqual(new_data.shape, (3, 10)) - self.assertRaises(ValueError, NativeResampler.expand_reduce, + self.assertRaises(ValueError, NativeResampler._expand_reduce, d_arr, {0: 1. / 3, 1: 1.}) - new_data = NativeResampler.expand_reduce(d_arr, {0: 1., 1: 1.}) + new_data = NativeResampler._expand_reduce(d_arr, {0: 1., 1: 1.}) self.assertEqual(new_data.shape, (6, 20)) self.assertIs(new_data, d_arr) - self.assertRaises(ValueError, NativeResampler.expand_reduce, + self.assertRaises(ValueError, NativeResampler._expand_reduce, d_arr, {0: 0.333323423, 1: 1.}) - self.assertRaises(ValueError, NativeResampler.expand_reduce, + self.assertRaises(ValueError, NativeResampler._expand_reduce, d_arr, {0: 1.333323423, 1: 1.}) n_arr = np.zeros((6, 20)) - new_data = NativeResampler.expand_reduce(n_arr, {0: 2., 1: 1.0}) + new_data = NativeResampler._expand_reduce(n_arr, {0: 2., 1: 1.0}) self.assertTrue(np.all(new_data.compute()[::2, :] == n_arr)) def test_expand_dims(self): diff --git a/satpy/tests/test_scene.py b/satpy/tests/test_scene.py index 793e3a20e7..bf2ffd948c 100644 --- a/satpy/tests/test_scene.py +++ b/satpy/tests/test_scene.py @@ -98,6 +98,41 @@ def test_init_preserve_reader_kwargs(self): assert scene.start_time == FAKE_FILEHANDLER_START assert scene.end_time == FAKE_FILEHANDLER_END + @pytest.mark.parametrize( + ("reader", "filenames", "exp_sensors"), + [ + ("fake1", ["fake1_1.txt"], {"fake_sensor"}), + (None, {"fake1": ["fake1_1.txt"], "fake2_1ds": ["fake2_1ds_1.txt"]}, {"fake_sensor", "fake_sensor2"}), + ] + ) + def test_sensor_names_readers(self, reader, filenames, exp_sensors): + """Test that Scene sensor_names handles different cases properly.""" + scene = Scene(reader=reader, filenames=filenames) + assert scene.start_time == FAKE_FILEHANDLER_START + assert scene.end_time == FAKE_FILEHANDLER_END + assert scene.sensor_names == exp_sensors + + @pytest.mark.parametrize( + ("include_reader", "added_sensor", "exp_sensors"), + [ + (False, "my_sensor", {"my_sensor"}), + (True, "my_sensor", {"my_sensor", "fake_sensor"}), + (False, {"my_sensor"}, {"my_sensor"}), + (True, {"my_sensor"}, {"my_sensor", "fake_sensor"}), + (False, {"my_sensor1", "my_sensor2"}, {"my_sensor1", "my_sensor2"}), + (True, {"my_sensor1", "my_sensor2"}, {"my_sensor1", "my_sensor2", "fake_sensor"}), + ] + ) + def test_sensor_names_added_datasets(self, include_reader, added_sensor, exp_sensors): + """Test that Scene sensor_names handles contained sensors properly.""" + if include_reader: + scene = Scene(reader="fake1", filenames=["fake1_1.txt"]) + else: + scene = Scene() + + scene["my_ds"] = xr.DataArray([], attrs={"sensor": added_sensor}) + assert scene.sensor_names == exp_sensors + def test_init_alone(self): """Test simple initialization.""" scn = Scene() @@ -597,68 +632,177 @@ def test_available_dataset_names_no_readers(self): name_list = scene.available_dataset_names(composites=True) assert name_list == [] + def test_storage_options_from_reader_kwargs_no_options(self): + """Test getting storage options from reader kwargs. -class TestFinestCoarsestArea: - """Test the Scene logic for finding the finest and coarsest area.""" + Case where there are no options given. + """ + filenames = ["s3://data-bucket/file1", "s3://data-bucket/file2", "s3://data-bucket/file3"] + with mock.patch('satpy.scene.load_readers'): + with mock.patch('fsspec.open_files') as open_files: + Scene(filenames=filenames) + open_files.assert_called_once_with(filenames) - def setup_method(self): - """Set common variables.""" - from pyresample.geometry import AreaDefinition - from pyresample.utils import proj4_str_to_dict - self.scene = Scene() - self.scene["1"] = xr.DataArray(np.arange(10).reshape((2, 5)), - attrs={'wavelength': (0.1, 0.2, 0.3)}) - self.ds1 = self.scene["1"] + def test_storage_options_from_reader_kwargs_single_dict_no_options(self): + """Test getting storage options from reader kwargs for remote files. - self.scene["2"] = xr.DataArray(np.arange(40).reshape((4, 10)), - attrs={'wavelength': (0.4, 0.5, 0.6)}) - self.ds2 = self.scene["2"] + Case where a single dict is given for all readers without storage options. + """ + filenames = ["s3://data-bucket/file1", "s3://data-bucket/file2", "s3://data-bucket/file3"] + reader_kwargs = {'reader_opt': 'foo'} + with mock.patch('satpy.scene.load_readers'): + with mock.patch('fsspec.open_files') as open_files: + Scene(filenames=filenames, reader_kwargs=reader_kwargs) + open_files.assert_called_once_with(filenames) - self.scene["3"] = xr.DataArray(np.arange(40).reshape((4, 10)), - attrs={'wavelength': (0.7, 0.8, 0.9)}) - self.ds3 = self.scene["3"] + def test_storage_options_from_reader_kwargs_single_dict(self): + """Test getting storage options from reader kwargs. + + Case where a single dict is given for all readers with some common storage options. + """ + filenames = ["s3://data-bucket/file1", "s3://data-bucket/file2", "s3://data-bucket/file3"] + reader_kwargs = {'reader_opt': 'foo'} + expected_reader_kwargs = reader_kwargs.copy() + storage_options = {'option1': '1'} + reader_kwargs['storage_options'] = storage_options + with mock.patch('satpy.scene.load_readers') as load_readers: + with mock.patch('fsspec.open_files') as open_files: + Scene(filenames=filenames, reader_kwargs=reader_kwargs) + call_ = load_readers.mock_calls[0] + assert call_.kwargs['reader_kwargs'] == expected_reader_kwargs + open_files.assert_called_once_with(filenames, **storage_options) + + def test_storage_options_from_reader_kwargs_per_reader(self): + """Test getting storage options from reader kwargs. + + Case where each reader have their own storage options. + """ + from copy import deepcopy + + filenames = { + "reader1": ["s3://data-bucket/file1"], + "reader2": ["s3://data-bucket/file2"], + "reader3": ["s3://data-bucket/file3"], + } + storage_options_1 = {'option1': '1'} + storage_options_2 = {'option2': '2'} + storage_options_3 = {'option3': '3'} + reader_kwargs = { + "reader1": {'reader_opt_1': 'foo'}, + "reader2": {'reader_opt_2': 'bar'}, + "reader3": {'reader_opt_3': 'baz'}, + } + expected_reader_kwargs = deepcopy(reader_kwargs) + reader_kwargs['reader1']['storage_options'] = storage_options_1 + reader_kwargs['reader2']['storage_options'] = storage_options_2 + reader_kwargs['reader3']['storage_options'] = storage_options_3 + + with mock.patch('satpy.scene.load_readers') as load_readers: + with mock.patch('fsspec.open_files') as open_files: + Scene(filenames=filenames, reader_kwargs=reader_kwargs) + call_ = load_readers.mock_calls[0] + assert call_.kwargs['reader_kwargs'] == expected_reader_kwargs + assert mock.call(filenames["reader1"], **storage_options_1) in open_files.mock_calls + assert mock.call(filenames["reader2"], **storage_options_2) in open_files.mock_calls + assert mock.call(filenames["reader3"], **storage_options_3) in open_files.mock_calls + + +def _create_coarest_finest_data_array(shape, area_def, attrs=None): + data_arr = xr.DataArray( + da.arange(shape[0] * shape[1]).reshape(shape), + attrs={ + 'area': area_def, + }) + if attrs: + data_arr.attrs.update(attrs) + return data_arr + + +def _create_coarsest_finest_area_def(shape, extents): + from pyresample import AreaDefinition + proj_str = '+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs' + area_def = AreaDefinition( + 'test', + 'test', + 'test', + proj_str, + shape[1], + shape[0], + extents, + ) + return area_def + + +def _create_coarsest_finest_swath_def(shape, extents, name_suffix): + from pyresample import SwathDefinition + lons_arr = da.repeat(da.linspace(extents[0], extents[2], shape[1], dtype=np.float32)[None, :], shape[0], axis=0) + lats_arr = da.repeat(da.linspace(extents[1], extents[3], shape[0], dtype=np.float32)[:, None], shape[1], axis=1) + lons_data_arr = xr.DataArray(lons_arr, attrs={"name": f"longitude{name_suffix}"}) + lats_data_arr = xr.DataArray(lats_arr, attrs={"name": f"latitude1{name_suffix}"}) + return SwathDefinition(lons_data_arr, lats_data_arr) - proj_dict = proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 ' - '+lon_0=-95. +lat_0=25 +lat_1=25 ' - '+units=m +no_defs') - self.area_def1 = AreaDefinition( - 'test', - 'test', - 'test', - proj_dict, - 100, - 200, - (-1000., -1500., 1000., 1500.), - ) - self.area_def2 = AreaDefinition( - 'test', - 'test', - 'test', - proj_dict, - 200, - 400, - (-1000., -1500., 1000., 1500.), - ) - def test_coarsest_finest_area_upright_area(self): +class TestFinestCoarsestArea: + """Test the Scene logic for finding the finest and coarsest area.""" + + @pytest.mark.parametrize( + ("coarse_area", "fine_area"), + [ + (_create_coarsest_finest_area_def((2, 5), (1000.0, 1500.0, -1000.0, -1500.0)), + _create_coarsest_finest_area_def((4, 10), (1000.0, 1500.0, -1000.0, -1500.0))), + (_create_coarsest_finest_area_def((2, 5), (-1000.0, -1500.0, 1000.0, 1500.0)), + _create_coarsest_finest_area_def((4, 10), (-1000.0, -1500.0, 1000.0, 1500.0))), + (_create_coarsest_finest_swath_def((2, 5), (1000.0, 1500.0, -1000.0, -1500.0), "1"), + _create_coarsest_finest_swath_def((4, 10), (1000.0, 1500.0, -1000.0, -1500.0), "1")), + ] + ) + def test_coarsest_finest_area_different_shape(self, coarse_area, fine_area): """Test 'coarsest_area' and 'finest_area' methods for upright areas.""" - self.ds1.attrs['area'] = self.area_def1 - self.ds2.attrs['area'] = self.area_def2 - self.ds3.attrs['area'] = self.area_def2 - assert self.scene.coarsest_area() is self.area_def1 - assert self.scene.finest_area() is self.area_def2 - assert self.scene.coarsest_area(['2', '3']) is self.area_def2 - - def test_coarsest_finest_area_flipped_area(self): - """Test 'coarsest_area' and 'finest_area' methods for flipped areas with negative pixel sizes.""" - area_def1_flipped = self.area_def1.copy(area_extent=tuple([-1*ae for ae in self.area_def1.area_extent])) - area_def2_flipped = self.area_def2.copy(area_extent=tuple([-1*ae for ae in self.area_def2.area_extent])) - self.ds1.attrs['area'] = area_def1_flipped - self.ds2.attrs['area'] = area_def2_flipped - self.ds3.attrs['area'] = area_def2_flipped - assert self.scene.coarsest_area() is area_def1_flipped - assert self.scene.finest_area() is area_def2_flipped - assert self.scene.coarsest_area(['2', '3']) is area_def2_flipped + ds1 = _create_coarest_finest_data_array(coarse_area.shape, coarse_area, {"wavelength": (0.1, 0.2, 0.3)}) + ds2 = _create_coarest_finest_data_array(fine_area.shape, fine_area, {"wavelength": (0.4, 0.5, 0.6)}) + ds3 = _create_coarest_finest_data_array(fine_area.shape, fine_area, {"wavelength": (0.7, 0.8, 0.9)}) + scn = Scene() + scn["1"] = ds1 + scn["2"] = ds2 + scn["3"] = ds3 + + assert scn.coarsest_area() is coarse_area + assert scn.finest_area() is fine_area + assert scn.coarsest_area(['2', '3']) is fine_area + + @pytest.mark.parametrize( + ("area_def", "shifted_area"), + [ + (_create_coarsest_finest_area_def((2, 5), (-1000.0, -1500.0, 1000.0, 1500.0)), + _create_coarsest_finest_area_def((2, 5), (-900.0, -1400.0, 1100.0, 1600.0))), + (_create_coarsest_finest_swath_def((2, 5), (-1000.0, -1500.0, 1000.0, 1500.0), "1"), + _create_coarsest_finest_swath_def((2, 5), (-900.0, -1400.0, 1100.0, 1600.0), "2")), + ], + ) + def test_coarsest_finest_area_same_shape(self, area_def, shifted_area): + """Test that two areas with the same shape are consistently returned. + + If two geometries (ex. two AreaDefinitions or two SwathDefinitions) + have the same resolution (shape) but different + coordinates, which one has the finer resolution would ultimately be + determined by the semi-random ordering of the internal container of + the Scene (a dict) if only pixel resolution was compared. This test + makes sure that it is always the same object returned. + + """ + ds1 = _create_coarest_finest_data_array(area_def.shape, area_def) + ds2 = _create_coarest_finest_data_array(area_def.shape, shifted_area) + scn = Scene() + scn["ds1"] = ds1 + scn["ds2"] = ds2 + course_area1 = scn.coarsest_area() + + scn = Scene() + scn["ds2"] = ds2 + scn["ds1"] = ds1 + coarse_area2 = scn.coarsest_area() + # doesn't matter what order they were added, this should be the same area + assert coarse_area2 is course_area1 class TestSceneAvailableDatasets: @@ -724,6 +868,29 @@ def test_available_composites_known_versus_all(self): assert not_avail_comp not in avail_comps +class TestSceneSerialization: + """Test the Scene serialization.""" + + def setup_method(self): + """Set config_path to point to test 'etc' directory.""" + self.old_config_path = satpy.config.get('config_path') + satpy.config.set(config_path=[TEST_ETC_DIR]) + + def teardown_method(self): + """Restore previous 'config_path' setting.""" + satpy.config.set(config_path=self.old_config_path) + + def test_serialization_with_readers_and_data_arr(self): + """Test that dask can serialize a Scene with readers.""" + from distributed.protocol import deserialize, serialize + + scene = Scene(filenames=['fake1_1.txt'], reader='fake1') + scene.load(['ds1']) + cloned_scene = deserialize(*serialize(scene)) + assert scene._readers.keys() == cloned_scene._readers.keys() + assert scene.all_dataset_ids == scene.all_dataset_ids + + class TestSceneLoading: """Test the Scene objects `.load` method.""" @@ -1339,6 +1506,31 @@ def test_available_comps_no_deps(self): available_comp_ids = scene.available_composite_ids() assert make_cid(name='static_image') in available_comp_ids + def test_compute_pass_through(self): + """Test pass through of xarray compute.""" + import numpy as np + scene = Scene(filenames=['fake1_1.txt'], reader='fake1') + scene.load(['ds1']) + scene = scene.compute() + assert isinstance(scene['ds1'].data, np.ndarray) + + def test_persist_pass_through(self): + """Test pass through of xarray persist.""" + from dask.array.utils import assert_eq + scene = Scene(filenames=['fake1_1.txt'], reader='fake1') + scene.load(['ds1']) + scenep = scene.persist() + assert_eq(scene['ds1'].data, scenep['ds1'].data) + assert set(scenep['ds1'].data.dask).issubset(scene['ds1'].data.dask) + assert len(scenep["ds1"].data.dask) == scenep['ds1'].data.npartitions + + def test_chunk_pass_through(self): + """Test pass through of xarray chunk.""" + scene = Scene(filenames=['fake1_1.txt'], reader='fake1') + scene.load(['ds1']) + scene = scene.chunk(chunks=2) + assert scene['ds1'].data.chunksize == (2, 2) + class TestSceneResampling: """Test resampling a Scene to another Scene object.""" @@ -1658,6 +1850,7 @@ def test_comp_loading_multisensor_composite_created_user(self): scene3["ds1"] = scene1["ds1"] scene3["ds4_b"] = scene2["ds4_b"] scene3.load(["comp_multi"]) + assert "comp_multi" in scene3 def test_comps_need_resampling_optional_mod_deps(self): """Test that a composite with complex dependencies. diff --git a/satpy/tests/test_utils.py b/satpy/tests/test_utils.py index 1ac0941332..b2e53ebd46 100644 --- a/satpy/tests/test_utils.py +++ b/satpy/tests/test_utils.py @@ -209,54 +209,67 @@ def test_proj_units_to_meters(self): res = proj_units_to_meters(prj) self.assertEqual(res, '+a=6378137.000 +b=6378137.000 +h=35785863.000') - @mock.patch('satpy.utils.warnings.warn') - def test_get_satpos(self, warn_mock): + +class TestGetSatPos: + """Tests for 'get_satpos'.""" + + @pytest.mark.parametrize( + ("included_prefixes", "preference", "expected_result"), + [ + (("nadir_", "satellite_actual_", "satellite_nominal_", "projection_"), None, (1, 2, 3)), + (("satellite_actual_", "satellite_nominal_", "projection_"), None, (1.1, 2.1, 3)), + (("satellite_nominal_", "projection_"), None, (1.2, 2.2, 3.1)), + (("projection_",), None, (1.3, 2.3, 3.2)), + (("nadir_", "satellite_actual_", "satellite_nominal_", "projection_"), "nadir", (1, 2, 3)), + (("nadir_", "satellite_actual_", "satellite_nominal_", "projection_"), "actual", (1.1, 2.1, 3)), + (("nadir_", "satellite_actual_", "satellite_nominal_", "projection_"), "nominal", (1.2, 2.2, 3.1)), + (("nadir_", "satellite_actual_", "satellite_nominal_", "projection_"), "projection", (1.3, 2.3, 3.2)), + (("satellite_nominal_", "projection_"), "actual", (1.2, 2.2, 3.1)), + (("projection_",), "projection", (1.3, 2.3, 3.2)), + ] + ) + def test_get_satpos(self, included_prefixes, preference, expected_result): """Test getting the satellite position.""" - orb_params = {'nadir_longitude': 1, - 'satellite_actual_longitude': 1.1, - 'satellite_nominal_longitude': 1.2, - 'projection_longitude': 1.3, - 'nadir_latitude': 2, - 'satellite_actual_latitude': 2.1, - 'satellite_nominal_latitude': 2.2, - 'projection_latitude': 2.3, - 'satellite_actual_altitude': 3, - 'satellite_nominal_altitude': 3.1, - 'projection_altitude': 3.2} - dataset = mock.MagicMock(attrs={'orbital_parameters': orb_params, - 'satellite_longitude': -1, - 'satellite_latitude': -2, - 'satellite_altitude': -3}) - - # Nadir - lon, lat, alt = get_satpos(dataset) - self.assertTupleEqual((lon, lat, alt), (1, 2, 3)) - - # Actual - orb_params.pop('nadir_longitude') - orb_params.pop('nadir_latitude') - lon, lat, alt = get_satpos(dataset) - self.assertTupleEqual((lon, lat, alt), (1.1, 2.1, 3)) - - # Nominal - orb_params.pop('satellite_actual_longitude') - orb_params.pop('satellite_actual_latitude') - orb_params.pop('satellite_actual_altitude') - lon, lat, alt = get_satpos(dataset) - self.assertTupleEqual((lon, lat, alt), (1.2, 2.2, 3.1)) - - # Projection - orb_params.pop('satellite_nominal_longitude') - orb_params.pop('satellite_nominal_latitude') - orb_params.pop('satellite_nominal_altitude') - lon, lat, alt = get_satpos(dataset) - self.assertTupleEqual((lon, lat, alt), (1.3, 2.3, 3.2)) - warn_mock.assert_called() - - # Legacy - dataset.attrs.pop('orbital_parameters') - lon, lat, alt = get_satpos(dataset) - self.assertTupleEqual((lon, lat, alt), (-1, -2, -3)) + all_orb_params = { + 'nadir_longitude': 1, + 'satellite_actual_longitude': 1.1, + 'satellite_nominal_longitude': 1.2, + 'projection_longitude': 1.3, + 'nadir_latitude': 2, + 'satellite_actual_latitude': 2.1, + 'satellite_nominal_latitude': 2.2, + 'projection_latitude': 2.3, + 'satellite_actual_altitude': 3, + 'satellite_nominal_altitude': 3.1, + 'projection_altitude': 3.2 + } + orb_params = {key: value for key, value in all_orb_params.items() if + any(in_prefix in key for in_prefix in included_prefixes)} + data_arr = xr.DataArray((), attrs={'orbital_parameters': orb_params}) + + with warnings.catch_warnings(record=True) as caught_warnings: + lon, lat, alt = get_satpos(data_arr, preference=preference) + has_satpos_warnings = any("using projection" in str(msg.message) for msg in caught_warnings) + expect_warning = included_prefixes == ("projection_",) and preference != "projection" + if expect_warning: + assert has_satpos_warnings + else: + assert not has_satpos_warnings + assert (lon, lat, alt) == expected_result + + @pytest.mark.parametrize( + "attrs", + ( + {}, + {'orbital_parameters': {'projection_longitude': 1}}, + {'satellite_altitude': 1} + ) + ) + def test_get_satpos_fails_with_informative_error(self, attrs): + """Test that get_satpos raises an informative error message.""" + data_arr = xr.DataArray((), attrs=attrs) + with pytest.raises(KeyError, match="Unable to determine satellite position.*"): + get_satpos(data_arr) def test_make_fake_scene(): @@ -414,3 +427,120 @@ def _verify_unchanged_chunks(data_arrays: list[xr.DataArray], orig_arrays: list[xr.DataArray]) -> None: for data_arr, orig_arr in zip(data_arrays, orig_arrays): assert data_arr.chunks == orig_arr.chunks + + +def test_chunk_pixel_size(): + """Check the chunk pixel size computations.""" + from unittest.mock import patch + + from satpy.utils import get_chunk_pixel_size + with patch("satpy.utils.CHUNK_SIZE", None): + assert get_chunk_pixel_size() is None + with patch("satpy.utils.CHUNK_SIZE", 10): + assert get_chunk_pixel_size() == 100 + with patch("satpy.utils.CHUNK_SIZE", (10, 20)): + assert get_chunk_pixel_size() == 200 + + +def test_chunk_size_limit(): + """Check the chunk size limit computations.""" + from unittest.mock import patch + + from satpy.utils import get_chunk_size_limit + with patch("satpy.utils.CHUNK_SIZE", None): + assert get_chunk_size_limit(np.uint8) is None + with patch("satpy.utils.CHUNK_SIZE", 10): + assert get_chunk_size_limit(np.float64) == 800 + with patch("satpy.utils.CHUNK_SIZE", (10, 20)): + assert get_chunk_size_limit(np.int32) == 800 + + +def test_convert_remote_files_to_fsspec_local_files(): + """Test convertion of remote files to fsspec objects. + + Case without scheme/protocol, which should default to plain filenames. + """ + from satpy.utils import convert_remote_files_to_fsspec + + filenames = ["/tmp/file1.nc", "file:///tmp/file2.nc"] + res = convert_remote_files_to_fsspec(filenames) + assert res == filenames + + +def test_convert_remote_files_to_fsspec_mixed_sources(): + """Test convertion of remote files to fsspec objects. + + Case with mixed local and remote files. + """ + from satpy.readers import FSFile + from satpy.utils import convert_remote_files_to_fsspec + + filenames = ["/tmp/file1.nc", "s3://data-bucket/file2.nc", "file:///tmp/file3.nc"] + res = convert_remote_files_to_fsspec(filenames) + # Two local files, one remote + assert filenames[0] in res + assert filenames[2] in res + assert sum([isinstance(f, FSFile) for f in res]) == 1 + + +def test_convert_remote_files_to_fsspec_filename_dict(): + """Test convertion of remote files to fsspec objects. + + Case where filenames is a dictionary mapping readers and filenames. + """ + from satpy.readers import FSFile + from satpy.utils import convert_remote_files_to_fsspec + + filenames = { + "reader1": ["/tmp/file1.nc", "/tmp/file2.nc"], + "reader2": ["s3://tmp/file3.nc", "file:///tmp/file4.nc", "/tmp/file5.nc"] + } + res = convert_remote_files_to_fsspec(filenames) + + assert res["reader1"] == filenames["reader1"] + assert filenames["reader2"][1] in res["reader2"] + assert filenames["reader2"][2] in res["reader2"] + assert sum([isinstance(f, FSFile) for f in res["reader2"]]) == 1 + + +def test_convert_remote_files_to_fsspec_fsfile(): + """Test convertion of remote files to fsspec objects. + + Case where the some of the files are already FSFile objects. + """ + from satpy.readers import FSFile + from satpy.utils import convert_remote_files_to_fsspec + + filenames = ["/tmp/file1.nc", "s3://data-bucket/file2.nc", FSFile("ssh:///tmp/file3.nc")] + res = convert_remote_files_to_fsspec(filenames) + + assert sum([isinstance(f, FSFile) for f in res]) == 2 + + +def test_convert_remote_files_to_fsspec_windows_paths(): + """Test convertion of remote files to fsspec objects. + + Case where windows paths are used. + """ + from satpy.utils import convert_remote_files_to_fsspec + + filenames = [r"C:\wintendo\file1.nc", "e:\\wintendo\\file2.nc", r"wintendo\file3.nc"] + res = convert_remote_files_to_fsspec(filenames) + + assert res == filenames + + +@mock.patch('fsspec.open_files') +def test_convert_remote_files_to_fsspec_storage_options(open_files): + """Test convertion of remote files to fsspec objects. + + Case with storage options given. + """ + from satpy.utils import convert_remote_files_to_fsspec + + filenames = ["s3://tmp/file1.nc"] + storage_options = {'anon': True} + + _ = convert_remote_files_to_fsspec(filenames, storage_options=storage_options) + + open_files.assert_called_once_with(filenames, **storage_options) diff --git a/satpy/tests/test_yaml_reader.py b/satpy/tests/test_yaml_reader.py index e594257fcc..91d51127fe 100644 --- a/satpy/tests/test_yaml_reader.py +++ b/satpy/tests/test_yaml_reader.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2015-2019 Satpy developers +# Copyright (c) 2015-2019, 2021 Satpy developers # # This file is part of satpy. # @@ -22,16 +22,62 @@ import unittest from datetime import datetime from tempfile import mkdtemp -from unittest.mock import MagicMock, patch +from unittest.mock import MagicMock, call, patch import numpy as np import xarray as xr import satpy.readers.yaml_reader as yr from satpy.dataset import DataQuery +from satpy.dataset.dataid import ModifierTuple +from satpy.readers.aapp_mhs_amsub_l1c import FrequencyDoubleSideBand, FrequencyRange from satpy.readers.file_handlers import BaseFileHandler from satpy.tests.utils import make_dataid +MHS_YAML_READER_DICT = { + 'reader': {'name': 'mhs_l1c_aapp', + 'description': 'AAPP l1c Reader for AMSU-B/MHS data', + 'sensors': ['mhs'], + 'default_channels': [1, 2, 3, 4, 5], + 'data_identification_keys': {'name': {'required': True}, + 'frequency_double_sideband': + {'type': FrequencyDoubleSideBand}, + 'frequency_range': {'type': FrequencyRange}, + 'resolution': None, + 'polarization': {'enum': ['H', 'V']}, + 'calibration': {'enum': ['brightness_temperature'], 'transitive': True}, + 'modifiers': {'required': True, + 'default': [], + 'type': ModifierTuple}}, + 'config_files': ('satpy/etc/readers/mhs_l1c_aapp.yaml',)}, + 'datasets': {'1': {'name': '1', + 'frequency_range': {'central': 89.0, 'bandwidth': 2.8, 'unit': 'GHz'}, + 'polarization': 'V', + 'resolution': 16000, + 'calibration': {'brightness_temperature': {'standard_name': 'toa_brightness_temperature'}}, + 'coordinates': ['longitude', 'latitude'], + 'file_type': 'mhs_aapp_l1c'}, + '2': {'name': '2', + 'frequency_range': {'central': 157.0, 'bandwidth': 2.8, 'unit': 'GHz'}, + 'polarization': 'V', + 'resolution': 16000, + 'calibration': {'brightness_temperature': {'standard_name': 'toa_brightness_temperature'}}, + 'coordinates': ['longitude', 'latitude'], + 'file_type': 'mhs_aapp_l1c'}, + '3': {'name': '3', + 'frequency_double_sideband': {'unit': 'GHz', + 'central': 183.31, + 'side': 1.0, + 'bandwidth': 1.0}, + 'polarization': 'V', + 'resolution': 16000, + 'calibration': {'brightness_temperature': {'standard_name': 'toa_brightness_temperature'}}, + 'coordinates': ['longitude', 'latitude'], + 'file_type': 'mhs_aapp_l1c'}}, + 'file_types': {'mhs_aapp_l1c': {'file_reader': BaseFileHandler, + 'file_patterns': [ + 'mhsl1c_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit_number:05d}.l1c']}}} # noqa + class FakeFH(BaseFileHandler): """Fake file handler class.""" @@ -209,6 +255,41 @@ def test_create_filehandlers(self): self.reader.create_filehandlers(filelist) self.assertEqual(len(self.reader.file_handlers['ftype1']), 3) + def test_serializable(self): + """Check that a reader is serializable by dask. + + This ensures users are able to serialize a Scene object that contains + readers. + """ + from distributed.protocol import deserialize, serialize + filelist = ['a001.bla', 'a002.bla', 'a001.bla', 'a002.bla', + 'abcd.bla', 'k001.bla', 'a003.bli'] + + self.reader.create_filehandlers(filelist) + cloned_reader = deserialize(*serialize(self.reader)) + assert self.reader.file_handlers.keys() == cloned_reader.file_handlers.keys() + assert self.reader.all_ids == cloned_reader.all_ids + + +class TestFileYAMLReaderWithCustomIDKey(unittest.TestCase): + """Test units from FileYAMLReader with custom id_keys.""" + + def setUp(self): + """Set up the test case.""" + self.config = MHS_YAML_READER_DICT + self.reader = yr.FileYAMLReader(MHS_YAML_READER_DICT, + filter_parameters={ + 'start_time': datetime(2000, 1, 1), + 'end_time': datetime(2000, 1, 2), + }) + + def test_custom_type_with_dict_contents_gets_parsed_correctly(self): + """Test custom type with dictionary contents gets parsed correctly.""" + ds_ids = list(self.reader.all_dataset_ids) + assert ds_ids[0]["frequency_range"] == FrequencyRange(89., 2.8, "GHz") + + assert ds_ids[2]["frequency_double_sideband"] == FrequencyDoubleSideBand(183.31, 1., 1., "GHz") + class TestFileFileYAMLReader(unittest.TestCase): """Test units from FileYAMLReader.""" @@ -608,7 +689,7 @@ def test_update_ds_ids_from_file_handlers(self): # need to copy this because the dataset infos will be modified _orig_ids = {key: val.copy() for key, val in orig_ids.items()} with patch.dict(self.reader.all_ids, _orig_ids, clear=True), \ - patch.dict(self.reader.available_ids, {}, clear=True): + patch.dict(self.reader.available_ids, {}, clear=True): # Add a file handler with resolution property fh = MagicMock(filetype_info={'file_type': ftype}, resolution=resol) @@ -630,6 +711,7 @@ def test_update_ds_ids_from_file_handlers(self): if ftype in file_types: self.assertEqual(resol, ds_id['resolution']) + # Test methods @@ -831,6 +913,88 @@ def test_load_dataset_with_area_for_stacked_areas(self, ldwa): np.testing.assert_equal(res.coords['x'], np.arange(3)) np.testing.assert_equal(res.coords['time'], np.flip(np.arange(4))) + @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) + @patch.object(yr.FileYAMLReader, "_load_dataset_with_area") + def test_load_dataset_with_area_for_swath_def_data(self, ldwa): + """Test _load_dataset_with_area() for swath definition data.""" + from pyresample.geometry import SwathDefinition + + from satpy.readers.yaml_reader import GEOFlippableFileYAMLReader + + reader = GEOFlippableFileYAMLReader() + + dsid = MagicMock() + coords = MagicMock() + + # create a dummy upright xarray + original_array = np.ones(3) + dim = np.arange(3) + lats = np.arange(3) + lons = np.arange(3) + + swath_def = SwathDefinition(lons, lats) + dummy_ds_xr = xr.DataArray(original_array, + coords={'y': dim}, + attrs={'area': swath_def}, + dims=('y',)) + + # assign the dummy xr as return for the super _load_dataset_with_area method + ldwa.return_value = dummy_ds_xr + + # returned dataset should be unchanged since datasets with a swath definition are not flippable + res = reader._load_dataset_with_area(dsid, coords, 'NE') + np.testing.assert_equal(res.values, original_array) + + @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) + @patch.object(yr.FileYAMLReader, "_load_dataset_with_area") + def test_load_dataset_with_area_for_data_without_area(self, ldwa): + """Test _load_dataset_with_area() for data wihtout area information.""" + from satpy.readers.yaml_reader import GEOFlippableFileYAMLReader + + reader = GEOFlippableFileYAMLReader() + + dsid = MagicMock() + coords = MagicMock() + + # create a dummy upright xarray + original_array = np.ones(3) + dim = np.arange(3) + + dummy_ds_xr = xr.DataArray(original_array, + coords={'y': dim}, + attrs={}, + dims=('y',)) + + # assign the dummy xr as return for the super _load_dataset_with_area method + ldwa.return_value = dummy_ds_xr + + # returned dataset should be unchanged since datasets without area information are not flippable + res = reader._load_dataset_with_area(dsid, coords, 'NE') + np.testing.assert_equal(res.values, original_array) + + +def _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info): + seg_area = MagicMock() + seg_area.crs = 'some_crs' + seg_area.area_extent = aex + seg_area.shape = ashape + get_area_def = MagicMock() + get_area_def.return_value = seg_area + + get_segment_position_info = MagicMock() + get_segment_position_info.return_value = chk_pos_info + + fh = MagicMock() + filetype_info = {'expected_segments': expected_segments, + 'file_type': 'filetype1'} + filename_info = {'segment': segment} + fh.filetype_info = filetype_info + fh.filename_info = filename_info + fh.get_area_def = get_area_def + fh.get_segment_position_info = get_segment_position_info + + return fh, seg_area + class TestGEOSegmentYAMLReader(unittest.TestCase): """Test GEOSegmentYAMLReader.""" @@ -880,11 +1044,10 @@ def test_get_expected_segments(self, cfh): self.assertEqual(es, 5) @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) - @patch('satpy.readers.yaml_reader._get_empty_segment_with_height') @patch('satpy.readers.yaml_reader.FileYAMLReader._load_dataset') @patch('satpy.readers.yaml_reader.xr') @patch('satpy.readers.yaml_reader._find_missing_segments') - def test_load_dataset(self, mss, xr, parent_load_dataset, geswh): + def test_load_dataset(self, mss, xr, parent_load_dataset): """Test _load_dataset().""" from satpy.readers.yaml_reader import GEOSegmentYAMLReader reader = GEOSegmentYAMLReader() @@ -967,53 +1130,17 @@ def test_load_dataset(self, mss, xr, parent_load_dataset, geswh): self.assertTrue(slice_list[0] is empty_segment) self.assertTrue(slice_list[1] is empty_segment) - # Check that new FCI empty segment is generated if missing in the middle and at the end - fake_fh = MagicMock() - fake_fh.filename_info = {} - fake_fh.filetype_info = {'file_type': 'fci_l1c_fdhsi'} - empty_segment.shape = (140, 5568) - slice_list[4] = None - counter = 7 - mss.return_value = (counter, expected_segments, slice_list, - failure, projectable) - res = reader._load_dataset(dataid, ds_info, [fake_fh]) - assert 2 == geswh.call_count - # Disable padding res = reader._load_dataset(dataid, ds_info, file_handlers, pad_data=False) parent_load_dataset.assert_called_once_with(dataid, ds_info, file_handlers) - def test_get_empty_segment_with_height(self): - """Test _get_empty_segment_with_height().""" - from satpy.readers.yaml_reader import _get_empty_segment_with_height as geswh - - dim = 'y' - - # check expansion of empty segment - empty_segment = xr.DataArray(np.ones((139, 5568)), dims=['y', 'x']) - new_height = 140 - new_empty_segment = geswh(empty_segment, new_height, dim) - assert new_empty_segment.shape == (140, 5568) - - # check reduction of empty segment - empty_segment = xr.DataArray(np.ones((140, 5568)), dims=['y', 'x']) - new_height = 139 - new_empty_segment = geswh(empty_segment, new_height, dim) - assert new_empty_segment.shape == (139, 5568) - - # check that empty segment is not modified if it has the right height already - empty_segment = xr.DataArray(np.ones((140, 5568)), dims=['y', 'x']) - new_height = 140 - new_empty_segment = geswh(empty_segment, new_height, dim) - assert new_empty_segment is empty_segment - @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) @patch('satpy.readers.yaml_reader._load_area_def') @patch('satpy.readers.yaml_reader._stack_area_defs') - @patch('satpy.readers.yaml_reader._pad_earlier_segments_area') - @patch('satpy.readers.yaml_reader._pad_later_segments_area') + @patch('satpy.readers.yaml_reader.GEOSegmentYAMLReader._pad_earlier_segments_area') + @patch('satpy.readers.yaml_reader.GEOSegmentYAMLReader._pad_later_segments_area') def test_load_area_def(self, pesa, plsa, sad, parent_load_area_def): """Test _load_area_def().""" from satpy.readers.yaml_reader import GEOSegmentYAMLReader @@ -1030,122 +1157,50 @@ def test_load_area_def(self, pesa, plsa, sad, parent_load_area_def): reader._load_area_def(dataid, file_handlers, pad_data=False) parent_load_area_def.assert_called_once_with(dataid, file_handlers) + @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) @patch('satpy.readers.yaml_reader.AreaDefinition') def test_pad_later_segments_area(self, AreaDefinition): """Test _pad_later_segments_area().""" - from satpy.readers.yaml_reader import _pad_later_segments_area as plsa - - seg1_area = MagicMock() - seg1_area.crs = 'some_crs' - seg1_area.area_extent = [0, 1000, 200, 500] - seg1_area.shape = [200, 500] - get_area_def = MagicMock() - get_area_def.return_value = seg1_area - fh_1 = MagicMock() - filetype_info = {'expected_segments': 2} - filename_info = {'segment': 1} - fh_1.filetype_info = filetype_info - fh_1.filename_info = filename_info - fh_1.get_area_def = get_area_def + from satpy.readers.yaml_reader import GEOSegmentYAMLReader + reader = GEOSegmentYAMLReader() + + expected_segments = 2 + segment = 1 + aex = [0, 1000, 200, 500] + ashape = [200, 500] + fh_1, _ = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, None) file_handlers = [fh_1] dataid = 'dataid' - res = plsa(file_handlers, dataid) + res = reader._pad_later_segments_area(file_handlers, dataid) self.assertEqual(len(res), 2) seg2_extent = (0, 1500, 200, 1000) expected_call = ('fill', 'fill', 'fill', 'some_crs', 500, 200, seg2_extent) AreaDefinition.assert_called_once_with(*expected_call) - @patch('satpy.readers.yaml_reader.AreaDefinition') - def test_pad_later_segments_area_for_FCI_padding(self, AreaDefinition): - """Test _pad_later_segments_area() in the FCI padding case.""" - from satpy.readers.yaml_reader import _pad_later_segments_area as plsa - - seg1_area = MagicMock() - seg1_area.crs = 'some_crs' - seg1_area.area_extent = [0, 1000, 200, 500] - seg1_area.shape = [556, 11136] - get_area_def = MagicMock() - get_area_def.return_value = seg1_area - fh_1 = MagicMock() - filetype_info = {'expected_segments': 2, - 'file_type': 'fci_l1c_fdhsi'} - filename_info = {'segment': 1} - fh_1.filetype_info = filetype_info - fh_1.filename_info = filename_info - fh_1.get_area_def = get_area_def - file_handlers = [fh_1] - dataid = 'dataid' - res = plsa(file_handlers, dataid) - self.assertEqual(len(res), 2) - - # the previous chunk size is 556, which is exactly double the size of the FCI chunk 2 size (278) - # therefore, the new vertical area extent should be half of the previous size (1000-500)/2=250. - # The new area extent lower-left row is therefore 1000+250=1250 - seg2_extent = (0, 1250, 200, 1000) - expected_call = ('fill', 'fill', 'fill', 'some_crs', 11136, 278, - seg2_extent) - AreaDefinition.assert_called_once_with(*expected_call) - + @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) @patch('satpy.readers.yaml_reader.AreaDefinition') def test_pad_earlier_segments_area(self, AreaDefinition): """Test _pad_earlier_segments_area().""" - from satpy.readers.yaml_reader import _pad_earlier_segments_area as pesa - - seg2_area = MagicMock() - seg2_area.crs = 'some_crs' - seg2_area.area_extent = [0, 1000, 200, 500] - seg2_area.shape = [200, 500] - get_area_def = MagicMock() - get_area_def.return_value = seg2_area - fh_2 = MagicMock() - filetype_info = {'expected_segments': 2} - filename_info = {'segment': 2} - fh_2.filetype_info = filetype_info - fh_2.filename_info = filename_info - fh_2.get_area_def = get_area_def + from satpy.readers.yaml_reader import GEOSegmentYAMLReader + reader = GEOSegmentYAMLReader() + + expected_segments = 2 + segment = 2 + aex = [0, 1000, 200, 500] + ashape = [200, 500] + fh_2, seg2_area = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, None) + file_handlers = [fh_2] dataid = 'dataid' area_defs = {2: seg2_area} - res = pesa(file_handlers, dataid, area_defs) + res = reader._pad_earlier_segments_area(file_handlers, dataid, area_defs) self.assertEqual(len(res), 2) seg1_extent = (0, 500, 200, 0) expected_call = ('fill', 'fill', 'fill', 'some_crs', 500, 200, seg1_extent) AreaDefinition.assert_called_once_with(*expected_call) - @patch('satpy.readers.yaml_reader.AreaDefinition') - def test_pad_earlier_segments_area_for_FCI_padding(self, AreaDefinition): - """Test _pad_earlier_segments_area() for the FCI case.""" - from satpy.readers.yaml_reader import _pad_earlier_segments_area as pesa - - seg2_area = MagicMock() - seg2_area.crs = 'some_crs' - seg2_area.area_extent = [0, 1000, 200, 500] - seg2_area.shape = [278, 5568] - get_area_def = MagicMock() - get_area_def.return_value = seg2_area - fh_2 = MagicMock() - filetype_info = {'expected_segments': 2, - 'file_type': 'fci_l1c_fdhsi'} - filename_info = {'segment': 2} - fh_2.filetype_info = filetype_info - fh_2.filename_info = filename_info - fh_2.get_area_def = get_area_def - file_handlers = [fh_2] - dataid = 'dataid' - area_defs = {2: seg2_area} - res = pesa(file_handlers, dataid, area_defs) - self.assertEqual(len(res), 2) - - # the previous chunk size is 278, which is exactly double the size of the FCI chunk 1 size (139) - # therefore, the new vertical area extent should be half of the previous size (1000-500)/2=250. - # The new area extent lower-left row is therefore 500-250=250 - seg1_extent = (0, 500, 200, 250) - expected_call = ('fill', 'fill', 'fill', 'some_crs', 5568, 139, - seg1_extent) - AreaDefinition.assert_called_once_with(*expected_call) - def test_find_missing_segments(self): """Test _find_missing_segments().""" from satpy.readers.yaml_reader import _find_missing_segments as fms @@ -1188,3 +1243,258 @@ def test_find_missing_segments(self): self.assertEqual(slice_list, [None, projectable, None]) self.assertFalse(failure) self.assertTrue(proj is projectable) + + +class TestGEOVariableSegmentYAMLReader(unittest.TestCase): + """Test GEOVariableSegmentYAMLReader.""" + + @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) + @patch('satpy.readers.yaml_reader._get_empty_segment_with_height') + @patch('satpy.readers.yaml_reader.xr') + @patch('satpy.readers.yaml_reader._find_missing_segments') + def test_get_empty_segment(self, mss, xr, geswh): + """Test execution of (overridden) get_empty_segment inside _load_dataset.""" + from satpy.readers.yaml_reader import GEOVariableSegmentYAMLReader + reader = GEOVariableSegmentYAMLReader() + # Setup input, and output of mocked functions for first segment missing + chk_pos_info = { + '1km': {'start_position_row': 0, + 'end_position_row': 0, + 'segment_height': 0, + 'segment_width': 11136}, + '2km': {'start_position_row': 140, + 'end_position_row': None, + 'segment_height': 278, + 'segment_width': 5568} + } + expected_segments = 2 + segment = 2 + aex = [0, 1000, 200, 500] + ashape = [278, 5568] + fh_2, seg2_area = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info) + + file_handlers = {'filetype1': [fh_2]} + reader._extract_segment_location_dicts(file_handlers) + + counter = 2 + seg = MagicMock(dims=['y', 'x']) + slice_list = [None, seg] + failure = False + projectable = MagicMock() + empty_segment = MagicMock() + empty_segment.shape = [278, 5568] + xr.full_like.return_value = empty_segment + dataid = MagicMock() + ds_info = MagicMock() + + mss.return_value = (counter, expected_segments, slice_list, + failure, projectable) + reader._load_dataset(dataid, ds_info, [fh_2]) + # the return of get_empty_segment + geswh.assert_called_once_with(empty_segment, 139, dim='y') + + @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) + @patch('satpy.readers.yaml_reader.AreaDefinition') + def test_pad_earlier_segments_area(self, AreaDefinition): + """Test _pad_earlier_segments_area() for the variable segment case.""" + from satpy.readers.yaml_reader import GEOVariableSegmentYAMLReader + reader = GEOVariableSegmentYAMLReader() + # setting to 0 or None values that shouldn't be relevant + chk_pos_info = { + '1km': {'start_position_row': 0, + 'end_position_row': 0, + 'segment_height': 0, + 'segment_width': 11136}, + '2km': {'start_position_row': 140, + 'end_position_row': None, + 'segment_height': 278, + 'segment_width': 5568} + } + expected_segments = 2 + segment = 2 + aex = [0, 1000, 200, 500] + ashape = [278, 5568] + fh_2, seg2_area = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info) + + file_handlers = {'filetype1': [fh_2]} + reader._extract_segment_location_dicts(file_handlers) + dataid = 'dataid' + area_defs = {2: seg2_area} + res = reader._pad_earlier_segments_area([fh_2], dataid, area_defs) + self.assertEqual(len(res), 2) + + # The later vertical chunk (nr. 2) size is 278, which is exactly double the size + # of the gap left by the missing first chunk (139, as the second chunk starts at line 140). + # Therefore, the new vertical area extent for the first chunk should be + # half of the previous size (1000-500)/2=250. + # The new area extent lower-left row is therefore 500-250=250 + seg1_extent = (0, 500, 200, 250) + expected_call = ('fill', 'fill', 'fill', 'some_crs', 5568, 139, + seg1_extent) + AreaDefinition.assert_called_once_with(*expected_call) + + @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) + @patch('satpy.readers.yaml_reader.AreaDefinition') + def test_pad_later_segments_area(self, AreaDefinition): + """Test _pad_later_segments_area() in the variable padding case.""" + from satpy.readers.yaml_reader import GEOVariableSegmentYAMLReader + reader = GEOVariableSegmentYAMLReader() + + chk_pos_info = { + '1km': {'start_position_row': None, + 'end_position_row': 11136 - 278, + 'segment_height': 556, + 'segment_width': 11136}, + '2km': {'start_position_row': 0, + 'end_position_row': 0, + 'segment_height': 0, + 'segment_width': 5568}} + + expected_segments = 2 + segment = 1 + aex = [0, 1000, 200, 500] + ashape = [556, 11136] + fh_1, _ = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info) + file_handlers = {'filetype1': [fh_1]} + reader._extract_segment_location_dicts(file_handlers) + dataid = 'dataid' + res = reader._pad_later_segments_area([fh_1], dataid) + self.assertEqual(len(res), 2) + + # The previous chunk size is 556, which is exactly double the size of the gap left + # by the missing last chunk (278, as the second-to-last chunk ends at line 11136 - 278 ) + # therefore, the new vertical area extent should be half of the previous size (1000-500)/2=250. + # The new area extent lower-left row is therefore 1000+250=1250 + seg2_extent = (0, 1250, 200, 1000) + expected_call = ('fill', 'fill', 'fill', 'some_crs', 11136, 278, + seg2_extent) + AreaDefinition.assert_called_once_with(*expected_call) + + @patch.object(yr.FileYAMLReader, "__init__", lambda x: None) + @patch('satpy.readers.yaml_reader.AreaDefinition') + def test_pad_later_segments_area_for_multiple_chunks_gap(self, AreaDefinition): + """Test _pad_later_segments_area() in the variable padding case for multiple gaps with multiple chunks.""" + from satpy.readers.yaml_reader import GEOVariableSegmentYAMLReader + reader = GEOVariableSegmentYAMLReader() + + def side_effect_areadef(a, b, c, crs, width, height, aex): + m = MagicMock() + m.shape = [height, width] + m.area_extent = aex + m.crs = crs + return m + + AreaDefinition.side_effect = side_effect_areadef + + chk_pos_info = { + '1km': {'start_position_row': 11136 - 600 - 100 + 1, + 'end_position_row': 11136 - 600, + 'segment_height': 100, + 'segment_width': 11136}, + '2km': {'start_position_row': 0, + 'end_position_row': 0, + 'segment_height': 0, + 'segment_width': 5568}} + expected_segments = 8 + segment = 1 + aex = [0, 1000, 200, 500] + ashape = [100, 11136] + fh_1, _ = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info) + chk_pos_info = { + '1km': {'start_position_row': 11136 - 300 - 100 + 1, + 'end_position_row': 11136 - 300, + 'segment_height': 100, + 'segment_width': 11136}, + '2km': {'start_position_row': 0, + 'end_position_row': 0, + 'segment_height': 0, + 'segment_width': 5568}} + segment = 4 + fh_4, _ = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info) + + chk_pos_info = { + '1km': {'start_position_row': 11136 - 100 + 1, + 'end_position_row': None, + 'segment_height': 100, + 'segment_width': 11136}, + '2km': {'start_position_row': 0, + 'end_position_row': 0, + 'segment_height': 0, + 'segment_width': 5568}} + segment = 8 + fh_8, _ = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info) + + file_handlers = {'filetype1': [fh_1, fh_4, fh_8]} + + reader._extract_segment_location_dicts(file_handlers) + dataid = 'dataid' + res = reader._pad_later_segments_area([fh_1, fh_4, fh_8], dataid) + self.assertEqual(len(res), 8) + + # Regarding the chunk sizes: + # First group of missing chunks: + # The end position row of the gap is the start row of the last available chunk-1:11136-300-100+1-1=10736 + # The start position row of the gap is the end row fo the first available chunk+1: 11136-600+1=10837 + # hence the gap is 10736-10537+1=200 px high + # The 200px have to be split between two missing chunks, the most equal way to do it is with + # sizes 100: 100+100=200 + # Second group: + # The end position row of the gap is the start row of the last chunk -1: 11136-100+1-1=11036 + # The start position row of the gap is the end row fo the first chunk +1: 11136-300+1=10837 + # hence the gap is 11036-10837+1=200 px high + # The 200px have to be split between three missing chunks, the most equal way to do it is with + # sizes 66 and 67: 66+67+67=200 + + # Regarding the heights: + # First group: + # The first chunk has 100px height and 500 area extent height. + # The first padded chunk has 100px height -> 500*100/100=500 area extent height ->1000+500=1500 + # The second padded chunk has 100px height -> 500*100/100=500 area extent height ->1500+500=2000 + # Second group: + # The first chunk has 100px height and 500 area extent height. + # The first padded chunk has 66px height -> 500*66/100=330 area extent height ->1000+330=1330 + # The second padded chunk has 67px height -> 500*67/100=335 area extent height ->1330+335=1665 + # The first padded chunk has 67px height -> 500*67/100=335 area extent height ->1665+335=2000 + self.assertEqual(AreaDefinition.call_count, 5) + expected_call1 = ('fill', 'fill', 'fill', 'some_crs', 11136, 100, + (0, 1500.0, 200, 1000)) + expected_call2 = ('fill', 'fill', 'fill', 'some_crs', 11136, 100, + (0, 2000.0, 200, 1500)) + expected_call3 = ('fill', 'fill', 'fill', 'some_crs', 11136, 66, + (0, 1330.0, 200, 1000)) + expected_call4 = ('fill', 'fill', 'fill', 'some_crs', 11136, 67, + (0, 1665.0, 200, 1330.0)) + expected_call5 = ('fill', 'fill', 'fill', 'some_crs', 11136, 67, + (0, 2000.0, 200, 1665.0)) + + AreaDefinition.side_effect = None + AreaDefinition.assert_has_calls([call(*expected_call1), + call(*expected_call2), + call(*expected_call3), + call(*expected_call4), + call(*expected_call5) + ]) + + def test_get_empty_segment_with_height(self): + """Test _get_empty_segment_with_height().""" + from satpy.readers.yaml_reader import _get_empty_segment_with_height as geswh + + dim = 'y' + + # check expansion of empty segment + empty_segment = xr.DataArray(np.ones((139, 5568)), dims=['y', 'x']) + new_height = 140 + new_empty_segment = geswh(empty_segment, new_height, dim) + assert new_empty_segment.shape == (140, 5568) + + # check reduction of empty segment + empty_segment = xr.DataArray(np.ones((140, 5568)), dims=['y', 'x']) + new_height = 139 + new_empty_segment = geswh(empty_segment, new_height, dim) + assert new_empty_segment.shape == (139, 5568) + + # check that empty segment is not modified if it has the right height already + empty_segment = xr.DataArray(np.ones((140, 5568)), dims=['y', 'x']) + new_height = 140 + new_empty_segment = geswh(empty_segment, new_height, dim) + assert new_empty_segment is empty_segment diff --git a/satpy/tests/utils.py b/satpy/tests/utils.py index fe3bcca1e0..54e2a85a27 100644 --- a/satpy/tests/utils.py +++ b/satpy/tests/utils.py @@ -16,6 +16,7 @@ # along with this program. If not, see . """Utilities for various satpy tests.""" +from contextlib import contextmanager from datetime import datetime from unittest import mock @@ -284,6 +285,14 @@ def __call__(self, dsk, keys, **kwargs): return dask.get(dsk, keys, **kwargs) +@contextmanager +def assert_maximum_dask_computes(max_computes=1): + """Context manager to make sure dask computations are not executed more than ``max_computes`` times.""" + import dask + with dask.config.set(scheduler=CustomScheduler(max_computes=max_computes)) as new_config: + yield new_config + + def make_fake_scene(content_dict, daskify=False, area=True, common_attrs=None): """Create a fake Scene. diff --git a/satpy/tests/writer_tests/test_awips_tiled.py b/satpy/tests/writer_tests/test_awips_tiled.py index a4819677e7..999d8e0c24 100644 --- a/satpy/tests/writer_tests/test_awips_tiled.py +++ b/satpy/tests/writer_tests/test_awips_tiled.py @@ -19,7 +19,7 @@ import logging import os -import warnings +import shutil from datetime import datetime, timedelta from glob import glob @@ -32,6 +32,9 @@ from satpy.resample import update_resampled_coords +START_TIME = datetime(2018, 1, 1, 12, 0, 0) +END_TIME = START_TIME + timedelta(minutes=20) + def _check_production_location(ds): if 'production_site' in ds.attrs: @@ -69,6 +72,7 @@ def _check_required_common_attributes(ds): assert data_arr.encoding.get('zlib', False) assert 'grid_mapping' in data_arr.attrs assert data_arr.attrs['grid_mapping'] in ds + assert 'units' in data_arr.attrs def _check_scaled_x_coordinate_variable(ds, masked_ds): @@ -77,7 +81,7 @@ def _check_scaled_x_coordinate_variable(ds, masked_ds): np.testing.assert_equal(np.diff(x_coord), 1) x_attrs = x_coord.attrs assert x_attrs.get('standard_name') == 'projection_x_coordinate' - assert x_attrs.get('units') == 'meter' + assert x_attrs.get('units') == 'meters' assert 'scale_factor' in x_attrs assert x_attrs['scale_factor'] > 0 assert 'add_offset' in x_attrs @@ -92,7 +96,7 @@ def _check_scaled_y_coordinate_variable(ds, masked_ds): np.testing.assert_equal(np.diff(y_coord), 1) y_attrs = y_coord.attrs assert y_attrs.get('standard_name') == 'projection_y_coordinate' - assert y_attrs.get('units') == 'meter' + assert y_attrs.get('units') == 'meters' assert 'scale_factor' in y_attrs assert y_attrs['scale_factor'] < 0 assert 'add_offset' in y_attrs @@ -101,76 +105,75 @@ def _check_scaled_y_coordinate_variable(ds, masked_ds): assert (np.diff(unscaled_y) < 0).all() +def _get_test_area(shape=(200, 100), crs=None, extents=None): + from pyresample.geometry import AreaDefinition + if crs is None: + crs = CRS('+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs') + if extents is None: + extents = (-1000., -1500., 1000., 1500.) + area_def = AreaDefinition( + 'test', + 'test', + 'test', + crs, + shape[1], + shape[0], + extents, + ) + return area_def + + +def _get_test_data(shape=(200, 100), chunks=50): + data = np.linspace(0., 1., shape[0] * shape[1], dtype=np.float32).reshape(shape) + return da.from_array(data, chunks=chunks) + + +def _get_test_lcc_data(dask_arr, area_def, extra_attrs=None): + attrs = dict( + name='test_ds', + platform_name='PLAT', + sensor='SENSOR', + units='1', + standard_name='toa_bidirectional_reflectance', + area=area_def, + start_time=START_TIME, + end_time=END_TIME + ) + if extra_attrs: + attrs.update(extra_attrs) + ds = xr.DataArray( + dask_arr, + dims=('y', 'x') if dask_arr.ndim == 2 else ('bands', 'y', 'x'), + attrs=attrs, + ) + return update_resampled_coords(ds, ds, area_def) + + class TestAWIPSTiledWriter: """Test basic functionality of AWIPS Tiled writer.""" - def setup_method(self): - """Create temporary directory to save files to.""" - import tempfile - self.base_dir = tempfile.mkdtemp() - self.start_time = datetime(2018, 1, 1, 12, 0, 0) - self.end_time = self.start_time + timedelta(minutes=20) - - def teardown_method(self): - """Remove the temporary directory created for a test.""" - try: - import shutil - shutil.rmtree(self.base_dir, ignore_errors=True) - except OSError: - pass - - def test_init(self): + def test_init(self, tmp_path): """Test basic init method of writer.""" from satpy.writers.awips_tiled import AWIPSTiledWriter - AWIPSTiledWriter(base_dir=self.base_dir) - - def _get_test_area(self, shape=(200, 100), crs=None, extents=None): - from pyresample.geometry import AreaDefinition - if crs is None: - crs = CRS('+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs') - if extents is None: - extents = (-1000., -1500., 1000., 1500.) - area_def = AreaDefinition( - 'test', - 'test', - 'test', - crs, - shape[1], - shape[0], - extents, - ) - return area_def - - def _get_test_data(self, shape=(200, 100), chunks=50): - data = np.linspace(0., 1., shape[0] * shape[1], dtype=np.float32).reshape(shape) - return da.from_array(data, chunks=chunks) - - def _get_test_lcc_data(self, dask_arr, area_def): - ds = xr.DataArray( - dask_arr, - dims=('y', 'x') if dask_arr.ndim == 2 else ('bands', 'y', 'x'), - attrs=dict( - name='test_ds', - platform_name='PLAT', - sensor='SENSOR', - units='1', - standard_name='toa_bidirectional_reflectance', - area=area_def, - start_time=self.start_time, - end_time=self.end_time) - ) - return update_resampled_coords(ds, ds, area_def) + AWIPSTiledWriter(base_dir=str(tmp_path)) @pytest.mark.parametrize('use_save_dataset', [(False,), (True,)]) - def test_basic_numbered_1_tile(self, use_save_dataset, caplog): + @pytest.mark.parametrize( + ('extra_attrs', 'expected_filename'), + [ + ({}, 'TESTS_AII_PLAT_SENSOR_test_ds_TEST_T001_20180101_1200.nc'), + ({'sensor': 'viirs', 'name': 'I01'}, 'TESTS_AII_PLAT_viirs_I01_TEST_T001_20180101_1200.nc'), + ] + ) + def test_basic_numbered_1_tile(self, extra_attrs, expected_filename, use_save_dataset, caplog, tmp_path): """Test creating a single numbered tile.""" from satpy.writers.awips_tiled import AWIPSTiledWriter - data = self._get_test_data() - area_def = self._get_test_area() - input_data_arr = self._get_test_lcc_data(data, area_def) + data = _get_test_data() + area_def = _get_test_area() + input_data_arr = _get_test_lcc_data(data, area_def, extra_attrs) with caplog.at_level(logging.DEBUG): - w = AWIPSTiledWriter(base_dir=self.base_dir, compress=True) + w = AWIPSTiledWriter(base_dir=str(tmp_path), compress=True) if use_save_dataset: w.save_dataset(input_data_arr, sector_id='TEST', source_name='TESTS') else: @@ -178,9 +181,9 @@ def test_basic_numbered_1_tile(self, use_save_dataset, caplog): assert "no routine matching" not in caplog.text assert "Can't format string" not in caplog.text - all_files = glob(os.path.join(self.base_dir, 'TESTS_AII*.nc')) + all_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*.nc')) assert len(all_files) == 1 - assert os.path.basename(all_files[0]) == 'TESTS_AII_PLAT_SENSOR_test_ds_TEST_T001_20180101_1200.nc' + assert os.path.basename(all_files[0]) == expected_filename for fn in all_files: unmasked_ds = xr.open_dataset(fn, mask_and_scale=False) output_ds = xr.open_dataset(fn, mask_and_scale=True) @@ -189,20 +192,16 @@ def test_basic_numbered_1_tile(self, use_save_dataset, caplog): np.testing.assert_allclose(input_data_arr.values, output_ds['data'].data, atol=scale_factor / 2) - def test_units_length_warning(self): + def test_units_length_warning(self, tmp_path): """Test long 'units' warnings are raised.""" from satpy.writers.awips_tiled import AWIPSTiledWriter - data = self._get_test_data() - area_def = self._get_test_area() - input_data_arr = self._get_test_lcc_data(data, area_def) + data = _get_test_data() + area_def = _get_test_area() + input_data_arr = _get_test_lcc_data(data, area_def) input_data_arr.attrs["units"] = "this is a really long units string" - w = AWIPSTiledWriter(base_dir=self.base_dir, compress=True) - with warnings.catch_warnings(record=True) as caught_warnings: + w = AWIPSTiledWriter(base_dir=str(tmp_path), compress=True) + with pytest.warns(UserWarning, match=r'.*this is a really long units string.*too long.*'): w.save_dataset(input_data_arr, sector_id='TEST', source_name='TESTS') - assert len(caught_warnings) == 1 - warn_msg = caught_warnings[0].message.args[0] - assert "too long" in warn_msg - assert "this is a really long units string" in warn_msg @pytest.mark.parametrize( ("tile_count", "tile_size"), @@ -212,14 +211,14 @@ def test_units_length_warning(self): (None, None), ] ) - def test_basic_numbered_tiles(self, tile_count, tile_size): + def test_basic_numbered_tiles(self, tile_count, tile_size, tmp_path): """Test creating a multiple numbered tiles.""" from satpy.tests.utils import CustomScheduler from satpy.writers.awips_tiled import AWIPSTiledWriter - data = self._get_test_data() - area_def = self._get_test_area() - input_data_arr = self._get_test_lcc_data(data, area_def) - w = AWIPSTiledWriter(base_dir=self.base_dir, compress=True) + data = _get_test_data() + area_def = _get_test_area() + input_data_arr = _get_test_lcc_data(data, area_def) + w = AWIPSTiledWriter(base_dir=str(tmp_path), compress=True) save_kwargs = dict( sector_id='TEST', source_name="TESTS", @@ -236,7 +235,7 @@ def test_basic_numbered_tiles(self, tile_count, tile_size): with dask.config.set(scheduler=CustomScheduler(1 * 2)): # precompute=*2 w.save_datasets([input_data_arr], **save_kwargs) - all_files = glob(os.path.join(self.base_dir, 'TESTS_AII*.nc')) + all_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*.nc')) expected_num_files = 0 if should_error else 9 assert len(all_files) == expected_num_files for fn in all_files: @@ -249,72 +248,63 @@ def test_basic_numbered_tiles(self, tile_count, tile_size): stime = input_data_arr.attrs['start_time'] assert unmasked_ds.attrs['start_date_time'] == stime.strftime('%Y-%m-%dT%H:%M:%S') - def test_basic_lettered_tiles(self): + def test_basic_lettered_tiles(self, tmp_path): """Test creating a lettered grid.""" - import xarray as xr - from satpy.writers.awips_tiled import AWIPSTiledWriter - w = AWIPSTiledWriter(base_dir=self.base_dir, compress=True) - data = self._get_test_data(shape=(2000, 1000), chunks=500) - area_def = self._get_test_area(shape=(2000, 1000), - extents=(-1000000., -1500000., 1000000., 1500000.)) - ds = self._get_test_lcc_data(data, area_def) + w = AWIPSTiledWriter(base_dir=str(tmp_path), compress=True) + data = _get_test_data(shape=(2000, 1000), chunks=500) + area_def = _get_test_area(shape=(2000, 1000), + extents=(-1000000., -1500000., 1000000., 1500000.)) + ds = _get_test_lcc_data(data, area_def) # tile_count should be ignored since we specified lettered_grid w.save_datasets([ds], sector_id='LCC', source_name="TESTS", tile_count=(3, 3), lettered_grid=True) - all_files = glob(os.path.join(self.base_dir, 'TESTS_AII*.nc')) + all_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*.nc')) assert len(all_files) == 16 for fn in all_files: unmasked_ds = xr.open_dataset(fn, mask_and_scale=False) masked_ds = xr.open_dataset(fn, mask_and_scale=True) check_required_properties(unmasked_ds, masked_ds) - assert masked_ds.attrs['start_date_time'] == self.start_time.strftime('%Y-%m-%dT%H:%M:%S') + assert masked_ds.attrs['start_date_time'] == START_TIME.strftime('%Y-%m-%dT%H:%M:%S') - def test_basic_lettered_tiles_diff_projection(self): + def test_basic_lettered_tiles_diff_projection(self, tmp_path): """Test creating a lettered grid from data with differing projection..""" - import xarray as xr - from satpy.writers.awips_tiled import AWIPSTiledWriter - w = AWIPSTiledWriter(base_dir=self.base_dir, compress=True) + w = AWIPSTiledWriter(base_dir=str(tmp_path), compress=True) crs = CRS("+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. +lat_0=45 +lat_1=45 +units=m +no_defs") - data = self._get_test_data(shape=(2000, 1000), chunks=500) - area_def = self._get_test_area(shape=(2000, 1000), crs=crs, - extents=(-1000000., -1500000., 1000000., 1500000.)) - ds = self._get_test_lcc_data(data, area_def) + data = _get_test_data(shape=(2000, 1000), chunks=500) + area_def = _get_test_area(shape=(2000, 1000), crs=crs, + extents=(-1000000., -1500000., 1000000., 1500000.)) + ds = _get_test_lcc_data(data, area_def) # tile_count should be ignored since we specified lettered_grid w.save_datasets([ds], sector_id='LCC', source_name="TESTS", tile_count=(3, 3), lettered_grid=True) - all_files = sorted(glob(os.path.join(self.base_dir, 'TESTS_AII*.nc'))) + all_files = sorted(glob(os.path.join(str(tmp_path), 'TESTS_AII*.nc'))) assert len(all_files) == 24 assert "TC02" in all_files[0] # the first tile should be TC02 for fn in all_files: unmasked_ds = xr.open_dataset(fn, mask_and_scale=False) masked_ds = xr.open_dataset(fn, mask_and_scale=True) check_required_properties(unmasked_ds, masked_ds) - assert masked_ds.attrs['start_date_time'] == self.start_time.strftime('%Y-%m-%dT%H:%M:%S') + assert masked_ds.attrs['start_date_time'] == START_TIME.strftime('%Y-%m-%dT%H:%M:%S') - def test_lettered_tiles_update_existing(self): + def test_lettered_tiles_update_existing(self, tmp_path): """Test updating lettered tiles with additional data.""" - import shutil - - import dask - import xarray as xr - from satpy.writers.awips_tiled import AWIPSTiledWriter - first_base_dir = os.path.join(self.base_dir, 'first') + first_base_dir = os.path.join(str(tmp_path), 'first') w = AWIPSTiledWriter(base_dir=first_base_dir, compress=True) shape = (2000, 1000) data = np.linspace(0., 1., shape[0] * shape[1], dtype=np.float32).reshape(shape) # pixels to be filled in later data[:, -200:] = np.nan data = da.from_array(data, chunks=500) - area_def = self._get_test_area(shape=(2000, 1000), - extents=(-1000000., -1500000., 1000000., 1500000.)) - ds = self._get_test_lcc_data(data, area_def) + area_def = _get_test_area(shape=(2000, 1000), + extents=(-1000000., -1500000., 1000000., 1500000.)) + ds = _get_test_lcc_data(data, area_def) # tile_count should be ignored since we specified lettered_grid w.save_datasets([ds], sector_id='LCC', source_name="TESTS", tile_count=(3, 3), lettered_grid=True) all_files = sorted(glob(os.path.join(first_base_dir, 'TESTS_AII*.nc'))) assert len(all_files) == 16 first_files = [] - second_base_dir = os.path.join(self.base_dir, 'second') + second_base_dir = os.path.join(str(tmp_path), 'second') os.makedirs(second_base_dir) for fn in all_files: new_fn = fn.replace(first_base_dir, second_base_dir) @@ -323,15 +313,15 @@ def test_lettered_tiles_update_existing(self): # Second writing/updating # Area is about 100 pixels to the right - area_def2 = self._get_test_area(shape=(2000, 1000), - extents=(-800000., -1500000., 1200000., 1500000.)) + area_def2 = _get_test_area(shape=(2000, 1000), + extents=(-800000., -1500000., 1200000., 1500000.)) data2 = np.linspace(0., 1., 2000000, dtype=np.float32).reshape((2000, 1000)) # a gap at the beginning where old values remain data2[:, :200] = np.nan # a gap at the end where old values remain data2[:, -400:-300] = np.nan data2 = da.from_array(data2, chunks=500) - ds2 = self._get_test_lcc_data(data2, area_def2) + ds2 = _get_test_lcc_data(data2, area_def2) w = AWIPSTiledWriter(base_dir=second_base_dir, compress=True) # HACK: The _copy_to_existing function hangs when opening the output # file multiple times...sometimes. If we limit dask to one worker @@ -363,62 +353,60 @@ def test_lettered_tiles_update_existing(self): assert np.isnan(orig_data[:, 200:]).all() assert not np.isnan(new_data[:, 200:]).all() - def test_lettered_tiles_sector_ref(self): + def test_lettered_tiles_sector_ref(self, tmp_path): """Test creating a lettered grid using the sector as reference.""" - import xarray as xr - from satpy.writers.awips_tiled import AWIPSTiledWriter - w = AWIPSTiledWriter(base_dir=self.base_dir, compress=True) - data = self._get_test_data(shape=(2000, 1000), chunks=500) - area_def = self._get_test_area(shape=(2000, 1000), - extents=(-1000000., -1500000., 1000000., 1500000.)) - ds = self._get_test_lcc_data(data, area_def) + w = AWIPSTiledWriter(base_dir=str(tmp_path), compress=True) + data = _get_test_data(shape=(2000, 1000), chunks=500) + area_def = _get_test_area(shape=(2000, 1000), + extents=(-1000000., -1500000., 1000000., 1500000.)) + ds = _get_test_lcc_data(data, area_def) w.save_datasets([ds], sector_id='LCC', source_name="TESTS", lettered_grid=True, use_sector_reference=True, use_end_time=True) - all_files = glob(os.path.join(self.base_dir, 'TESTS_AII*.nc')) + all_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*.nc')) assert len(all_files) == 16 for fn in all_files: unmasked_ds = xr.open_dataset(fn, mask_and_scale=False) masked_ds = xr.open_dataset(fn, mask_and_scale=True) check_required_properties(unmasked_ds, masked_ds) - expected_start = (self.start_time + timedelta(minutes=20)).strftime('%Y-%m-%dT%H:%M:%S') + expected_start = (START_TIME + timedelta(minutes=20)).strftime('%Y-%m-%dT%H:%M:%S') assert masked_ds.attrs['start_date_time'] == expected_start - def test_lettered_tiles_no_fit(self): + def test_lettered_tiles_no_fit(self, tmp_path): """Test creating a lettered grid with no data overlapping the grid.""" from satpy.writers.awips_tiled import AWIPSTiledWriter - w = AWIPSTiledWriter(base_dir=self.base_dir, compress=True) - data = self._get_test_data(shape=(2000, 1000), chunks=500) - area_def = self._get_test_area(shape=(2000, 1000), - extents=(4000000., 5000000., 5000000., 6000000.)) - ds = self._get_test_lcc_data(data, area_def) + w = AWIPSTiledWriter(base_dir=str(tmp_path), compress=True) + data = _get_test_data(shape=(2000, 1000), chunks=500) + area_def = _get_test_area(shape=(2000, 1000), + extents=(4000000., 5000000., 5000000., 6000000.)) + ds = _get_test_lcc_data(data, area_def) w.save_datasets([ds], sector_id='LCC', source_name="TESTS", tile_count=(3, 3), lettered_grid=True) # No files created - all_files = glob(os.path.join(self.base_dir, 'TESTS_AII*.nc')) + all_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*.nc')) assert not all_files - def test_lettered_tiles_no_valid_data(self): + def test_lettered_tiles_no_valid_data(self, tmp_path): """Test creating a lettered grid with no valid data.""" from satpy.writers.awips_tiled import AWIPSTiledWriter - w = AWIPSTiledWriter(base_dir=self.base_dir, compress=True) + w = AWIPSTiledWriter(base_dir=str(tmp_path), compress=True) data = da.full((2000, 1000), np.nan, chunks=500, dtype=np.float32) - area_def = self._get_test_area(shape=(2000, 1000), - extents=(-1000000., -1500000., 1000000., 1500000.)) - ds = self._get_test_lcc_data(data, area_def) + area_def = _get_test_area(shape=(2000, 1000), + extents=(-1000000., -1500000., 1000000., 1500000.)) + ds = _get_test_lcc_data(data, area_def) w.save_datasets([ds], sector_id='LCC', source_name="TESTS", tile_count=(3, 3), lettered_grid=True) # No files created - all NaNs should result in no tiles being created - all_files = glob(os.path.join(self.base_dir, 'TESTS_AII*.nc')) + all_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*.nc')) assert not all_files - def test_lettered_tiles_bad_filename(self): + def test_lettered_tiles_bad_filename(self, tmp_path): """Test creating a lettered grid with a bad filename.""" from satpy.writers.awips_tiled import AWIPSTiledWriter - w = AWIPSTiledWriter(base_dir=self.base_dir, compress=True, filename="{Bad Key}.nc") - data = self._get_test_data(shape=(2000, 1000), chunks=500) - area_def = self._get_test_area(shape=(2000, 1000), - extents=(-1000000., -1500000., 1000000., 1500000.)) - ds = self._get_test_lcc_data(data, area_def) + w = AWIPSTiledWriter(base_dir=str(tmp_path), compress=True, filename="{Bad Key}.nc") + data = _get_test_data(shape=(2000, 1000), chunks=500) + area_def = _get_test_area(shape=(2000, 1000), + extents=(-1000000., -1500000., 1000000., 1500000.)) + ds = _get_test_lcc_data(data, area_def) with pytest.raises(KeyError): w.save_datasets([ds], sector_id='LCC', @@ -426,26 +414,24 @@ def test_lettered_tiles_bad_filename(self): tile_count=(3, 3), lettered_grid=True) - def test_basic_numbered_tiles_rgb(self): + def test_basic_numbered_tiles_rgb(self, tmp_path): """Test creating a multiple numbered tiles with RGB.""" - import xarray as xr - from satpy.writers.awips_tiled import AWIPSTiledWriter - w = AWIPSTiledWriter(base_dir=self.base_dir, compress=True) + w = AWIPSTiledWriter(base_dir=str(tmp_path), compress=True) data = da.from_array(np.linspace(0., 1., 60000, dtype=np.float32).reshape((3, 200, 100)), chunks=50) - area_def = self._get_test_area() - ds = self._get_test_lcc_data(data, area_def) + area_def = _get_test_area() + ds = _get_test_lcc_data(data, area_def) ds = ds.rename(dict((old, new) for old, new in zip(ds.dims, ['bands', 'y', 'x']))) ds.coords['bands'] = ['R', 'G', 'B'] w.save_datasets([ds], sector_id='TEST', source_name="TESTS", tile_count=(3, 3)) - chan_files = glob(os.path.join(self.base_dir, 'TESTS_AII*test_ds_R*.nc')) + chan_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*test_ds_R*.nc')) all_files = chan_files[:] assert len(chan_files) == 9 - chan_files = glob(os.path.join(self.base_dir, 'TESTS_AII*test_ds_G*.nc')) + chan_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*test_ds_G*.nc')) all_files.extend(chan_files) assert len(chan_files) == 9 - chan_files = glob(os.path.join(self.base_dir, 'TESTS_AII*test_ds_B*.nc')) + chan_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*test_ds_B*.nc')) assert len(chan_files) == 9 all_files.extend(chan_files) for fn in all_files: @@ -466,18 +452,14 @@ def test_basic_numbered_tiles_rgb(self): {'environment_prefix': 'BB', 'filename': '{environment_prefix}_{name}_GLM_T{tile_number:04d}.nc'}, ] ) - def test_multivar_numbered_tiles_glm(self, sector, extra_kwargs): + def test_multivar_numbered_tiles_glm(self, sector, extra_kwargs, tmp_path): """Test creating a tiles with multiple variables.""" - import os - - import xarray as xr - from satpy.writers.awips_tiled import AWIPSTiledWriter os.environ['ORGANIZATION'] = '1' * 50 - w = AWIPSTiledWriter(base_dir=self.base_dir, compress=True) - data = self._get_test_data() - area_def = self._get_test_area() - ds1 = self._get_test_lcc_data(data, area_def) + w = AWIPSTiledWriter(base_dir=tmp_path, compress=True) + data = _get_test_data() + area_def = _get_test_area() + ds1 = _get_test_lcc_data(data, area_def) ds1.attrs.update( dict( name='total_energy', @@ -509,16 +491,16 @@ def test_multivar_numbered_tiles_glm(self, sector, extra_kwargs): tile_count=(3, 3), template='glm_l2_rad{}'.format(sector.lower()), **extra_kwargs) fn_glob = self._get_glm_glob_filename(extra_kwargs) - all_files = glob(os.path.join(self.base_dir, fn_glob)) + all_files = glob(os.path.join(str(tmp_path), fn_glob)) assert len(all_files) == 9 for fn in all_files: unmasked_ds = xr.open_dataset(fn, mask_and_scale=False) masked_ds = xr.open_dataset(fn, mask_and_scale=True) check_required_properties(unmasked_ds, masked_ds) if sector == 'C': - assert masked_ds.attrs['time_coverage_end'] == self.end_time.strftime('%Y-%m-%dT%H:%M:%S.%fZ') + assert masked_ds.attrs['time_coverage_end'] == END_TIME.strftime('%Y-%m-%dT%H:%M:%S.%fZ') else: # 'F' - assert masked_ds.attrs['time_coverage_end'] == self.end_time.strftime('%Y-%m-%dT%H:%M:%SZ') + assert masked_ds.attrs['time_coverage_end'] == END_TIME.strftime('%Y-%m-%dT%H:%M:%SZ') @staticmethod def _get_glm_glob_filename(extra_kwargs): diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 6d3ca1929b..508c7d8a59 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -293,6 +293,24 @@ def test_single_time_value(self): bounds_exp = np.array([[start_time, end_time]], dtype='datetime64[m]') np.testing.assert_array_equal(f['time_bnds'], bounds_exp) + def test_time_coordinate_on_a_swath(self): + """Test that time dimension is not added on swath data with time already as a coordinate.""" + import xarray as xr + + from satpy import Scene + scn = Scene() + test_array = np.array([[1, 2], [3, 4], [5, 6], [7, 8]]) + times = np.array(['2018-05-30T10:05:00', '2018-05-30T10:05:01', + '2018-05-30T10:05:02', '2018-05-30T10:05:03'], dtype=np.datetime64) + scn['test-array'] = xr.DataArray(test_array, + dims=['y', 'x'], + coords={'time': ('y', times)}, + attrs=dict(start_time=times[0], end_time=times[-1])) + with TempFile() as filename: + scn.save_datasets(filename=filename, writer='cf', pretty=True) + with xr.open_dataset(filename, decode_cf=True) as f: + np.testing.assert_array_equal(f['time'], scn['test-array']['time']) + def test_bounds(self): """Test setting time bounds.""" import xarray as xr diff --git a/satpy/tests/writer_tests/test_geotiff.py b/satpy/tests/writer_tests/test_geotiff.py index 158d864cea..47cab92047 100644 --- a/satpy/tests/writer_tests/test_geotiff.py +++ b/satpy/tests/writer_tests/test_geotiff.py @@ -17,154 +17,194 @@ # satpy. If not, see . """Tests for the geotiff writer.""" -import unittest +from datetime import datetime from unittest import mock +import dask.array as da import numpy as np - - -class TestGeoTIFFWriter(unittest.TestCase): +import pytest +import xarray as xr + + +def _get_test_datasets_2d(): + """Create a single 2D test dataset.""" + ds1 = xr.DataArray( + da.zeros((100, 200), chunks=50), + dims=('y', 'x'), + attrs={'name': 'test', + 'start_time': datetime.utcnow()} + ) + return [ds1] + + +def _get_test_datasets_2d_nonlinear_enhancement(): + data_arrays = _get_test_datasets_2d() + enh_history = [ + {"gamma": 2.0}, + ] + for data_arr in data_arrays: + data_arr.attrs["enhancement_history"] = enh_history + return data_arrays + + +def _get_test_datasets_3d(): + """Create a single 3D test dataset.""" + ds1 = xr.DataArray( + da.zeros((3, 100, 200), chunks=50), + dims=('bands', 'y', 'x'), + coords={'bands': ['R', 'G', 'B']}, + attrs={'name': 'test', + 'start_time': datetime.utcnow()} + ) + return [ds1] + + +class TestGeoTIFFWriter: """Test the GeoTIFF Writer class.""" - def setUp(self): - """Create temporary directory to save files to.""" - import tempfile - self.base_dir = tempfile.mkdtemp() - - def tearDown(self): - """Remove the temporary directory created for a test.""" - try: - import shutil - shutil.rmtree(self.base_dir, ignore_errors=True) - except OSError: - pass - - def _get_test_datasets(self): - """Create a single test dataset.""" - from datetime import datetime - - import dask.array as da - import xarray as xr - ds1 = xr.DataArray( - da.zeros((100, 200), chunks=50), - dims=('y', 'x'), - attrs={'name': 'test', - 'start_time': datetime.utcnow()} - ) - return [ds1] - def test_init(self): """Test creating the writer with no arguments.""" from satpy.writers.geotiff import GeoTIFFWriter GeoTIFFWriter() - def test_simple_write(self): + @pytest.mark.parametrize( + "input_func", + [ + _get_test_datasets_2d, + _get_test_datasets_3d + ] + ) + def test_simple_write(self, input_func, tmp_path): """Test basic writer operation.""" from satpy.writers.geotiff import GeoTIFFWriter - datasets = self._get_test_datasets() - w = GeoTIFFWriter(base_dir=self.base_dir) + datasets = input_func() + w = GeoTIFFWriter(base_dir=tmp_path) w.save_datasets(datasets) - def test_simple_delayed_write(self): + def test_simple_delayed_write(self, tmp_path): """Test writing can be delayed.""" - import dask.array as da - from satpy.writers.geotiff import GeoTIFFWriter - datasets = self._get_test_datasets() - w = GeoTIFFWriter(base_dir=self.base_dir) + datasets = _get_test_datasets_2d() + w = GeoTIFFWriter(base_dir=tmp_path) # when we switch to rio_save on XRImage then this will be sources # and targets res = w.save_datasets(datasets, compute=False) # this will fail if rasterio isn't installed - self.assertIsInstance(res, tuple) + assert isinstance(res, tuple) # two lists, sources and destinations - self.assertEqual(len(res), 2) - self.assertIsInstance(res[0], list) - self.assertIsInstance(res[1], list) - self.assertIsInstance(res[0][0], da.Array) + assert len(res) == 2 + assert isinstance(res[0], list) + assert isinstance(res[1], list) + assert isinstance(res[0][0], da.Array) da.store(res[0], res[1]) for target in res[1]: if hasattr(target, 'close'): target.close() - def test_colormap_write(self): + def test_colormap_write(self, tmp_path): """Test writing an image with a colormap.""" from trollimage.colormap import spectral from trollimage.xrimage import XRImage from satpy.writers.geotiff import GeoTIFFWriter - datasets = self._get_test_datasets() - w = GeoTIFFWriter(base_dir=self.base_dir) + datasets = _get_test_datasets_2d() + w = GeoTIFFWriter(base_dir=tmp_path) # we'd have to customize enhancements to test this through # save_datasets. We'll use `save_image` as a workaround. img = XRImage(datasets[0]) img.palettize(spectral) w.save_image(img, keep_palette=True) - def test_float_write(self): + def test_float_write(self, tmp_path): """Test that geotiffs can be written as floats. NOTE: Does not actually check that the output is floats. """ from satpy.writers.geotiff import GeoTIFFWriter - datasets = self._get_test_datasets() - w = GeoTIFFWriter(base_dir=self.base_dir, + datasets = _get_test_datasets_2d() + w = GeoTIFFWriter(base_dir=tmp_path, enhance=False, dtype=np.float32) w.save_datasets(datasets) - def test_dtype_for_enhance_false(self): + def test_dtype_for_enhance_false(self, tmp_path): """Test that dtype of dataset is used if parameters enhance=False and dtype=None.""" from satpy.writers.geotiff import GeoTIFFWriter - datasets = self._get_test_datasets() - w = GeoTIFFWriter(base_dir=self.base_dir, enhance=False) + datasets = _get_test_datasets_2d() + w = GeoTIFFWriter(base_dir=tmp_path, enhance=False) with mock.patch('satpy.writers.XRImage.save') as save_method: save_method.return_value = None w.save_datasets(datasets, compute=False) - self.assertEqual(save_method.call_args[1]['dtype'], np.float64) + assert save_method.call_args[1]['dtype'] == np.float64 - def test_dtype_for_enhance_false_and_given_dtype(self): + def test_dtype_for_enhance_false_and_given_dtype(self, tmp_path): """Test that dtype of dataset is used if enhance=False and dtype=uint8.""" from satpy.writers.geotiff import GeoTIFFWriter - datasets = self._get_test_datasets() - w = GeoTIFFWriter(base_dir=self.base_dir, enhance=False, dtype=np.uint8) + datasets = _get_test_datasets_2d() + w = GeoTIFFWriter(base_dir=tmp_path, enhance=False, dtype=np.uint8) with mock.patch('satpy.writers.XRImage.save') as save_method: save_method.return_value = None w.save_datasets(datasets, compute=False) - self.assertEqual(save_method.call_args[1]['dtype'], np.uint8) + assert save_method.call_args[1]['dtype'] == np.uint8 - def test_fill_value_from_config(self): + def test_fill_value_from_config(self, tmp_path): """Test fill_value coming from the writer config.""" from satpy.writers.geotiff import GeoTIFFWriter - datasets = self._get_test_datasets() - w = GeoTIFFWriter(base_dir=self.base_dir) + datasets = _get_test_datasets_2d() + w = GeoTIFFWriter(base_dir=tmp_path) w.info['fill_value'] = 128 with mock.patch('satpy.writers.XRImage.save') as save_method: save_method.return_value = None w.save_datasets(datasets, compute=False) - self.assertEqual(save_method.call_args[1]['fill_value'], 128) + assert save_method.call_args[1]['fill_value'] == 128 - def test_tags(self): + def test_tags(self, tmp_path): """Test tags being added.""" from satpy.writers.geotiff import GeoTIFFWriter - datasets = self._get_test_datasets() - w = GeoTIFFWriter(tags={'test1': 1}, base_dir=self.base_dir) + datasets = _get_test_datasets_2d() + w = GeoTIFFWriter(tags={'test1': 1}, base_dir=tmp_path) w.info['fill_value'] = 128 with mock.patch('satpy.writers.XRImage.save') as save_method: save_method.return_value = None w.save_datasets(datasets, tags={'test2': 2}, compute=False) called_tags = save_method.call_args[1]['tags'] - self.assertDictEqual(called_tags, {'test1': 1, 'test2': 2}) - - def test_scale_offset(self): + assert called_tags == {'test1': 1, 'test2': 2} + + @pytest.mark.parametrize( + "input_func", + [ + _get_test_datasets_2d, + _get_test_datasets_3d, + _get_test_datasets_2d_nonlinear_enhancement, + ] + ) + @pytest.mark.parametrize( + "save_kwargs", + [ + {"include_scale_offset": True}, + {"scale_offset_tags": ("scale", "offset")}, + ] + ) + def test_scale_offset(self, input_func, save_kwargs, tmp_path): """Test tags being added.""" from satpy.writers.geotiff import GeoTIFFWriter - datasets = self._get_test_datasets() - w = GeoTIFFWriter(tags={'test1': 1}, base_dir=self.base_dir) + datasets = input_func() + w = GeoTIFFWriter(tags={'test1': 1}, base_dir=tmp_path) w.info['fill_value'] = 128 with mock.patch('satpy.writers.XRImage.save') as save_method: save_method.return_value = None - w.save_datasets(datasets, tags={'test2': 2}, compute=False, include_scale_offset=True) - called_include = save_method.call_args[1]['include_scale_offset_tags'] - self.assertTrue(called_include) + w.save_datasets(datasets, tags={'test2': 2}, compute=False, **save_kwargs) + kwarg_name = "include_scale_offset_tags" if "include_scale_offset" in save_kwargs else "scale_offset_tags" + kwarg_value = save_method.call_args[1].get(kwarg_name) + assert kwarg_value is not None + + def test_tiled_value_from_config(self, tmp_path): + """Test tiled value coming from the writer config.""" + from satpy.writers.geotiff import GeoTIFFWriter + datasets = _get_test_datasets_2d() + w = GeoTIFFWriter(base_dir=tmp_path) + with mock.patch('satpy.writers.XRImage.save') as save_method: + save_method.return_value = None + w.save_datasets(datasets, compute=False) + assert save_method.call_args[1]['tiled'] diff --git a/satpy/tests/writer_tests/test_ninjogeotiff.py b/satpy/tests/writer_tests/test_ninjogeotiff.py index 573e47afd7..a339edaa8b 100644 --- a/satpy/tests/writer_tests/test_ninjogeotiff.py +++ b/satpy/tests/writer_tests/test_ninjogeotiff.py @@ -183,6 +183,20 @@ def test_image_small_mid_atlantic_L(test_area_tiny_eqc_sphere): return get_enhanced_image(arr) +@pytest.fixture(scope="module") +def test_image_small_mid_atlantic_K_L(test_area_tiny_eqc_sphere): + """Get a small test image in units K, mode L, over Atlantic.""" + arr = xr.DataArray( + _get_fake_da(-80+273.15, 40+273.15, test_area_tiny_eqc_sphere.shape + (1,)), + dims=("y", "x", "bands"), + attrs={ + "name": "test-small-mid-atlantic", + "start_time": datetime.datetime(1985, 8, 13, 13, 0), + "area": test_area_tiny_eqc_sphere, + "units": "K"}) + return get_enhanced_image(arr) + + @pytest.fixture(scope="module") def test_image_large_asia_RGB(test_area_small_eqc_wgs84): """Get a large-ish test image in mode RGB, over Asia.""" @@ -365,6 +379,22 @@ def ntg_weird(test_image_weird): SatelliteNameID=6500014) +@pytest.fixture(scope="module") +def ntg_no_fill_value(test_image_small_mid_atlantic_L): + """Create instance of NinJoTagGenerator class.""" + from satpy.writers.ninjogeotiff import NinJoTagGenerator + return NinJoTagGenerator( + test_image_small_mid_atlantic_L, + None, + "bulgur.tif", + ChannelID=900015, + DataType="GORN", + PhysicUnit="C", + PhysicValue="Temperature", + SatelliteNameID=6400014, + DataSource="dowsing rod") + + @pytest.fixture(scope="module") def ntg_rgba(test_image_rgba_merc): """Create NinJoTagGenerator instance with RGBA image.""" @@ -509,6 +539,7 @@ def test_write_and_read_file_LA(test_image_latlon, tmp_path): np.testing.assert_allclose(float(tgs["ninjo_Gradient"]), 0.30816176470588236) np.testing.assert_allclose(float(tgs["ninjo_AxisIntercept"]), -49.603125) assert tgs["ninjo_PhysicValue"] == "Reflectance" + assert tgs["ninjo_TransparentPixel"] == "-1" # meaning not set def test_write_and_read_file_P(test_image_small_arctic_P, tmp_path): @@ -539,6 +570,63 @@ def test_write_and_read_file_P(test_image_small_arctic_P, tmp_path): assert tgs["ninjo_DataSource"] == "dowsing rod" +def test_write_and_read_file_units( + test_image_small_mid_atlantic_K_L, tmp_path, caplog): + """Test that it writes a GeoTIFF with the appropriate NinJo-tags and units.""" + import rasterio + + from satpy.writers.ninjogeotiff import NinJoGeoTIFFWriter + fn = os.fspath(tmp_path / "test.tif") + ngtw = NinJoGeoTIFFWriter() + with caplog.at_level(logging.DEBUG): + ngtw.save_dataset( + test_image_small_mid_atlantic_K_L.data, + filename=fn, + fill_value=0, + blockxsize=128, + blockysize=128, + compress="lzw", + predictor=2, + PhysicUnit="C", + PhysicValue="Temperature", + SatelliteNameID=6400014, + ChannelID=900015, + DataType="GORN", + DataSource="dowsing rod") + assert "Adding offset for K → °C conversion" in caplog.text + # a better test would be to check that the attributes haven't changed at + # all, but that currently fails due to + # https://github.com/pytroll/satpy/issues/2022 + assert test_image_small_mid_atlantic_K_L.data.attrs["enhancement_history"][0] != {"scale": 1, "offset": 273.15} + src = rasterio.open(fn) + tgs = src.tags() + assert tgs["ninjo_FileName"] == fn + assert tgs["ninjo_DataSource"] == "dowsing rod" + np.testing.assert_allclose(float(tgs["ninjo_Gradient"]), + 0.465379, rtol=1e-5) + np.testing.assert_allclose(float(tgs["ninjo_AxisIntercept"]), + -79.86838) + fn2 = os.fspath(tmp_path / "test2.tif") + with caplog.at_level(logging.WARNING): + ngtw.save_dataset( + test_image_small_mid_atlantic_K_L.data, + filename=fn2, + fill_value=0, + blockxsize=128, + blockysize=128, + compress="lzw", + predictor=2, + PhysicUnit="F", + PhysicValue="Temperature", + SatelliteNameID=6400014, + ChannelID=900015, + DataType="GORN", + DataSource="dowsing rod") + assert ("Writing F to ninjogeotiff headers, but " + "data attributes have unit K. " + "No conversion applied.") in caplog.text + + def test_write_and_read_via_scene(test_image_small_mid_atlantic_L, tmp_path): """Test that all attributes are written also when writing from scene. @@ -774,13 +862,14 @@ def test_get_ref_lat_2(ntg1, ntg2, ntg3): np.testing.assert_allclose(ntg2.get_ref_lat_3(), 0.0) -def test_get_transparent_pixel(ntg1, ntg2, ntg3): +def test_get_transparent_pixel(ntg1, ntg2, ntg3, ntg_no_fill_value): """Test getting fill value.""" tp = ntg1.get_transparent_pixel() assert isinstance(tp, int) assert tp == 255 - assert ntg2.get_transparent_pixel() == 0 # when not set ?? + assert ntg2.get_transparent_pixel() == 0 assert ntg3.get_transparent_pixel() == 255 + assert ntg_no_fill_value.get_transparent_pixel() == -1 def test_get_xmax(ntg1, ntg2, ntg3): @@ -815,3 +904,17 @@ def test_create_unknown_tags(test_image_small_arctic_P): PhysicValue="N/A", SatelliteNameID=6500014, Locatie="Hozomeen") + + +def test_str_ids(test_image_small_arctic_P): + """Test that channel and satellit IDs can be str.""" + from satpy.writers.ninjogeotiff import NinJoTagGenerator + NinJoTagGenerator( + test_image_small_arctic_P, + 42, + "quorn.tif", + ChannelID="la manche", + DataType="GPRN", + PhysicUnit="N/A", + PhysicValue="N/A", + SatelliteNameID="trollsat") diff --git a/satpy/tests/writer_tests/test_ninjotiff.py b/satpy/tests/writer_tests/test_ninjotiff.py index e1d90bd904..1eac33f82a 100644 --- a/satpy/tests/writer_tests/test_ninjotiff.py +++ b/satpy/tests/writer_tests/test_ninjotiff.py @@ -67,6 +67,19 @@ def test_dataset(self, iwsd): uconv.assert_called_once_with(dataset, 'K', 'CELSIUS') self.assertEqual(iwsd.call_count, 1) + @mock.patch('satpy.writers.ninjotiff.ImageWriter.save_dataset') + @mock.patch('satpy.writers.ninjotiff.nt', pyninjotiff_mock.ninjotiff) + def test_dataset_skip_unit_conversion(self, iwsd): + """Test saving a dataset without unit conversion.""" + from satpy.writers.ninjotiff import NinjoTIFFWriter + ntw = NinjoTIFFWriter() + dataset = xr.DataArray([1, 2, 3], attrs={'units': 'K'}) + with mock.patch('satpy.writers.ninjotiff.convert_units') as uconv: + ntw.save_dataset(dataset, physic_unit='CELSIUS', + convert_temperature_units=False) + uconv.assert_not_called() + self.assertEqual(iwsd.call_count, 1) + @mock.patch('satpy.writers.ninjotiff.NinjoTIFFWriter.save_dataset') @mock.patch('satpy.writers.ninjotiff.ImageWriter.save_image') @mock.patch('satpy.writers.ninjotiff.nt', pyninjotiff_mock.ninjotiff) @@ -109,15 +122,15 @@ def test_convert_units_temp(self): sc = make_fake_scene( {"IR108": np.arange(25, dtype="f4").reshape(5, 5)}, common_attrs={"units": "K"}) - ds_in = sc["IR108"] + ds_in_k = sc["IR108"] for out_unit in ("C", "CELSIUS"): - ds_out = convert_units(ds_in, "K", out_unit) - np.testing.assert_array_almost_equal(ds_in + 273.15, ds_out) - assert ds_in.attrs != ds_out.attrs - assert ds_out.attrs["units"] == out_unit + ds_out_c = convert_units(ds_in_k, "K", out_unit) + np.testing.assert_array_almost_equal(ds_in_k - 273.15, ds_out_c) + assert ds_in_k.attrs != ds_out_c.attrs + assert ds_out_c.attrs["units"] == out_unit # test that keys aren't lost - assert ds_out.attrs.keys() - ds_in.attrs.keys() <= {"units"} - assert ds_in.attrs.keys() <= ds_out.attrs.keys() + assert ds_out_c.attrs.keys() - ds_in_k.attrs.keys() <= {"units"} + assert ds_in_k.attrs.keys() <= ds_out_c.attrs.keys() def test_convert_units_other(self): """Test that other unit conversions are not implemented.""" diff --git a/satpy/utils.py b/satpy/utils.py index 7c362a672d..95383e6f6a 100644 --- a/satpy/utils.py +++ b/satpy/utils.py @@ -24,13 +24,16 @@ import logging import os import warnings -from typing import Mapping +from typing import Mapping, Optional +from urllib.parse import urlparse import numpy as np import xarray as xr import yaml from yaml import BaseLoader +from satpy import CHUNK_SIZE + try: from yaml import UnsafeLoader except ImportError: @@ -40,6 +43,10 @@ TRACE_LEVEL = 5 +class PerformanceWarning(Warning): + """Warning raised when there is a possible performance impact.""" + + def ensure_dir(filename): """Check if the dir of f exists, otherwise create it.""" directory = os.path.dirname(filename) @@ -240,42 +247,6 @@ def _get_sunz_corr_li_and_shibata(cos_zen): return 24.35 / (2. * cos_zen + np.sqrt(498.5225 * cos_zen**2 + 1)) -def sunzen_corr_cos(data, cos_zen, limit=88., max_sza=95.): - """Perform Sun zenith angle correction. - - The correction is based on the provided cosine of the zenith - angle (``cos_zen``). The correction is limited - to ``limit`` degrees (default: 88.0 degrees). For larger zenith - angles, the correction is the same as at the ``limit`` if ``max_sza`` - is `None`. The default behavior is to gradually reduce the correction - past ``limit`` degrees up to ``max_sza`` where the correction becomes - 0. Both ``data`` and ``cos_zen`` should be 2D arrays of the same shape. - - """ - # Convert the zenith angle limit to cosine of zenith angle - limit_rad = np.deg2rad(limit) - limit_cos = np.cos(limit_rad) - max_sza_rad = np.deg2rad(max_sza) if max_sza is not None else max_sza - - # Cosine correction - corr = 1. / cos_zen - if max_sza is not None: - # gradually fall off for larger zenith angle - grad_factor = (np.arccos(cos_zen) - limit_rad) / (max_sza_rad - limit_rad) - # invert the factor so maximum correction is done at `limit` and falls off later - grad_factor = 1. - np.log(grad_factor + 1) / np.log(2) - # make sure we don't make anything negative - grad_factor = grad_factor.clip(0.) - else: - # Use constant value (the limit) for larger zenith angles - grad_factor = 1. - corr = corr.where(cos_zen > limit_cos, grad_factor / limit_cos) - # Force "night" pixels to 0 (where SZA is invalid) - corr = corr.where(cos_zen.notnull(), 0) - - return data * corr - - def atmospheric_path_length_correction(data, cos_zen, limit=88., max_sza=95.): """Perform Sun zenith angle correction. @@ -317,66 +288,88 @@ def atmospheric_path_length_correction(data, cos_zen, limit=88., max_sza=95.): return data * corr -def get_satpos(dataset): +def get_satpos( + data_arr: xr.DataArray, + preference: Optional[str] = None, +) -> tuple[float, float, float]: """Get satellite position from dataset attributes. - Preferences are: + Args: + data_arr: DataArray object to access ``.attrs`` metadata + from. + preference: Optional preference for one of the available types of + position information. If not provided or ``None`` then the default + preference is: - * Longitude & Latitude: Nadir, actual, nominal, projection - * Altitude: Actual, nominal, projection + * Longitude & Latitude: nadir, actual, nominal, projection + * Altitude: actual, nominal, projection - A warning is issued when projection values have to be used because nothing else is available. + The provided ``preference`` can be any one of these individual + strings (nadir, actual, nominal, projection). If the + preference is not available then the original preference list is + used. A warning is issued when projection values have to be used because + nothing else is available and it wasn't provided as the ``preference``. Returns: Geodetic longitude, latitude, altitude """ + if preference is not None and preference not in ("nadir", "actual", "nominal", "projection"): + raise ValueError(f"Unrecognized satellite coordinate preference: {preference}") + lonlat_prefixes = ("nadir_", "satellite_actual_", "satellite_nominal_", "projection_") + alt_prefixes = _get_prefix_order_by_preference(lonlat_prefixes[1:], preference) + lonlat_prefixes = _get_prefix_order_by_preference(lonlat_prefixes, preference) try: - orb_params = dataset.attrs['orbital_parameters'] - - alt = _get_sat_altitude(orb_params) - - lon, lat = _get_sat_lonlat(orb_params) + lon, lat = _get_sat_lonlat(data_arr, lonlat_prefixes) + alt = _get_sat_altitude(data_arr, alt_prefixes) except KeyError: - # Legacy - lon = dataset.attrs['satellite_longitude'] - lat = dataset.attrs['satellite_latitude'] - alt = dataset.attrs['satellite_altitude'] - + raise KeyError("Unable to determine satellite position. Either the " + "reader doesn't provide that information or " + "geolocation datasets were not available.") return lon, lat, alt -def _get_sat_altitude(orb_params): - # Altitude +def _get_prefix_order_by_preference(prefixes, preference): + preferred_prefixes = [prefix for prefix in prefixes if preference and preference in prefix] + nonpreferred_prefixes = [prefix for prefix in prefixes if not preference or preference not in prefix] + if nonpreferred_prefixes[-1] == "projection_": + # remove projection as a prefix as it is our fallback + nonpreferred_prefixes = nonpreferred_prefixes[:-1] + return preferred_prefixes + nonpreferred_prefixes + + +def _get_sat_altitude(data_arr, key_prefixes): + orb_params = data_arr.attrs["orbital_parameters"] + alt_keys = [prefix + "altitude" for prefix in key_prefixes] try: - alt = orb_params['satellite_actual_altitude'] + alt = _get_first_available_item(orb_params, alt_keys) except KeyError: - try: - alt = orb_params['satellite_nominal_altitude'] - except KeyError: - alt = orb_params['projection_altitude'] - warnings.warn('Actual satellite altitude not available, using projection altitude instead.') + alt = orb_params['projection_altitude'] + warnings.warn('Actual satellite altitude not available, using projection altitude instead.') return alt -def _get_sat_lonlat(orb_params): - # Longitude & Latitude +def _get_sat_lonlat(data_arr, key_prefixes): + orb_params = data_arr.attrs["orbital_parameters"] + lon_keys = [prefix + "longitude" for prefix in key_prefixes] + lat_keys = [prefix + "latitude" for prefix in key_prefixes] try: - lon = orb_params['nadir_longitude'] - lat = orb_params['nadir_latitude'] + lon = _get_first_available_item(orb_params, lon_keys) + lat = _get_first_available_item(orb_params, lat_keys) except KeyError: + lon = orb_params['projection_longitude'] + lat = orb_params['projection_latitude'] + warnings.warn('Actual satellite lon/lat not available, using projection center instead.') + return lon, lat + + +def _get_first_available_item(data_dict, possible_keys): + for possible_key in possible_keys: try: - lon = orb_params['satellite_actual_longitude'] - lat = orb_params['satellite_actual_latitude'] + return data_dict[possible_key] except KeyError: - try: - lon = orb_params['satellite_nominal_longitude'] - lat = orb_params['satellite_nominal_latitude'] - except KeyError: - lon = orb_params['projection_longitude'] - lat = orb_params['projection_latitude'] - warnings.warn('Actual satellite lon/lat not available, using projection centre instead.') - return lon, lat + continue + raise KeyError("None of the possible keys found: {}".format(", ".join(possible_keys))) def recursive_dict_update(d, u): @@ -404,10 +397,11 @@ def _check_yaml_configs(configs, key): diagnostic = {} for i in configs: for fname in i: + msg = 'ok' + res = None with open(fname, 'r', encoding='utf-8') as stream: try: res = yaml.load(stream, Loader=UnsafeLoader) - msg = 'ok' except yaml.YAMLError as err: stream.seek(0) res = yaml.load(stream, Loader=BaseLoader) @@ -521,3 +515,110 @@ def ignore_invalid_float_warnings(): with np.errstate(invalid="ignore"), warnings.catch_warnings(): warnings.simplefilter("ignore", RuntimeWarning) yield + + +def get_chunk_size_limit(dtype): + """Compute the chunk size limit in bytes given *dtype*. + + Returns: + If PYTROLL_CHUNK_SIZE is not defined, this function returns None, + otherwise it returns the computed chunk size in bytes. + """ + pixel_size = get_chunk_pixel_size() + if pixel_size is not None: + return pixel_size * np.dtype(dtype).itemsize + return None + + +def get_chunk_pixel_size(): + """Compute the maximum chunk size from CHUNK_SIZE.""" + if CHUNK_SIZE is None: + return None + + if isinstance(CHUNK_SIZE, (tuple, list)): + array_size = np.product(CHUNK_SIZE) + else: + array_size = CHUNK_SIZE ** 2 + return array_size + + +def convert_remote_files_to_fsspec(filenames, storage_options=None): + """Check filenames for transfer protocols, convert to FSFile objects if possible.""" + if storage_options is None: + storage_options = {} + if isinstance(filenames, dict): + return _check_file_protocols_for_dicts(filenames, storage_options) + return _check_file_protocols(filenames, storage_options) + + +def _check_file_protocols_for_dicts(filenames, storage_options): + res = {} + for reader, files in filenames.items(): + opts = storage_options.get(reader, {}) + res[reader] = _check_file_protocols(files, opts) + return res + + +def _check_file_protocols(filenames, storage_options): + local_files, remote_files, fs_files = _sort_files_to_local_remote_and_fsfiles(filenames) + + if remote_files: + return local_files + fs_files + _filenames_to_fsfile(remote_files, storage_options) + + return local_files + fs_files + + +def _sort_files_to_local_remote_and_fsfiles(filenames): + from satpy.readers import FSFile + + local_files = [] + remote_files = [] + fs_files = [] + for f in filenames: + if isinstance(f, FSFile): + fs_files.append(f) + elif urlparse(f).scheme in ('', 'file') or "\\" in f: + local_files.append(f) + else: + remote_files.append(f) + return local_files, remote_files, fs_files + + +def _filenames_to_fsfile(filenames, storage_options): + import fsspec + + from satpy.readers import FSFile + + if filenames: + fsspec_files = fsspec.open_files(filenames, **storage_options) + return [FSFile(f) for f in fsspec_files] + return [] + + +def get_storage_options_from_reader_kwargs(reader_kwargs): + """Read and clean storage options from reader_kwargs.""" + if reader_kwargs is None: + return None, None + storage_options = reader_kwargs.pop('storage_options', None) + storage_opt_dict = _get_storage_dictionary_options(reader_kwargs) + storage_options = _merge_storage_options(storage_options, storage_opt_dict) + + return storage_options, reader_kwargs + + +def _get_storage_dictionary_options(reader_kwargs): + storage_opt_dict = {} + for k, v in reader_kwargs.items(): + if isinstance(v, dict): + storage_opt_dict[k] = v.pop('storage_options', None) + + return storage_opt_dict + + +def _merge_storage_options(storage_options, storage_opt_dict): + if storage_opt_dict: + if storage_options: + storage_opt_dict['storage_options'] = storage_options + storage_options = storage_opt_dict + + return storage_options diff --git a/satpy/writers/__init__.py b/satpy/writers/__init__.py index 60e54aa526..d649f3a10e 100644 --- a/satpy/writers/__init__.py +++ b/satpy/writers/__init__.py @@ -23,6 +23,7 @@ import logging import os import warnings +from typing import Optional import dask.array as da import numpy as np @@ -812,7 +813,13 @@ def save_dataset(self, dataset, filename=None, fill_value=None, decorate=decorate, fill_value=fill_value) return self.save_image(img, filename=filename, compute=compute, fill_value=fill_value, **kwargs) - def save_image(self, img, filename=None, compute=True, **kwargs): + def save_image( + self, + img: XRImage, + filename: Optional[str] = None, + compute: bool = True, + **kwargs + ): """Save Image object to a given ``filename``. Args: diff --git a/satpy/writers/awips_tiled.py b/satpy/writers/awips_tiled.py index 691dbfd9d5..0a9a61f837 100644 --- a/satpy/writers/awips_tiled.py +++ b/satpy/writers/awips_tiled.py @@ -1006,14 +1006,14 @@ def _set_xy_coords_attrs(self, new_ds, crs): if crs.is_geographic: self._fill_units_and_standard_name(y_attrs, 'degrees_north', 'latitude') else: - self._fill_units_and_standard_name(y_attrs, 'meter', 'projection_y_coordinate') + self._fill_units_and_standard_name(y_attrs, 'meters', 'projection_y_coordinate') y_attrs['axis'] = 'Y' x_attrs = new_ds.coords['x'].attrs if crs.is_geographic: self._fill_units_and_standard_name(x_attrs, 'degrees_east', 'longitude') else: - self._fill_units_and_standard_name(x_attrs, 'meter', 'projection_x_coordinate') + self._fill_units_and_standard_name(x_attrs, 'meters', 'projection_x_coordinate') x_attrs['axis'] = 'X' @staticmethod @@ -1021,6 +1021,9 @@ def _fill_units_and_standard_name(attrs, units, standard_name): """Fill in units and standard_name if not set in `attrs`.""" if attrs.get('units') is None: attrs['units'] = units + if attrs['units'] in ('meter', 'metre'): + # AWIPS doesn't like 'meter' + attrs['units'] = 'meters' if attrs.get('standard_name') is None: attrs['standard_name'] = standard_name diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index c469d04af7..6b3584d672 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -656,7 +656,6 @@ def da2cf(dataarray, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, compr new_data.encoding.update(compression) new_data = CFWriter._encode_time(new_data, epoch) - new_data = CFWriter._encode_coords(new_data) if 'long_name' not in new_data.attrs and 'standard_name' not in new_data.attrs: @@ -702,8 +701,13 @@ def _encode_time(new_data, epoch): new_data['time'].encoding['units'] = epoch new_data['time'].attrs['standard_name'] = 'time' new_data['time'].attrs.pop('bounds', None) - if 'time' not in new_data.dims: - new_data = new_data.expand_dims('time') + new_data = CFWriter._add_time_dimension(new_data) + return new_data + + @staticmethod + def _add_time_dimension(new_data): + if 'time' not in new_data.dims and new_data["time"].size not in new_data.shape: + new_data = new_data.expand_dims('time') return new_data @staticmethod diff --git a/satpy/writers/geotiff.py b/satpy/writers/geotiff.py index 283d40c452..befc5bdbd3 100644 --- a/satpy/writers/geotiff.py +++ b/satpy/writers/geotiff.py @@ -16,15 +16,20 @@ # You should have received a copy of the GNU General Public License along with # satpy. If not, see . """GeoTIFF writer objects for creating GeoTIFF files from `DataArray` objects.""" +from __future__ import annotations import logging +from typing import Any, Optional, Union import numpy as np # make sure we have rasterio even though we don't use it until trollimage # saves the image import rasterio # noqa +from trollimage.colormap import Colormap +from trollimage.xrimage import XRImage +from satpy._compat import DTypeLike from satpy.writers import ImageWriter LOG = logging.getLogger(__name__) @@ -53,6 +58,10 @@ class GeoTIFFWriter(ImageWriter): >>> scn.save_dataset(dataset_name, writer='geotiff', ... tags={'offset': 291.8, 'scale': -0.35}) + Images are tiled by default. To create striped TIFF files ``tiled=False`` can be specified: + + >>> scn.save_datasets(writer='geotiff', tiled=False) + For performance tips on creating geotiffs quickly and making them smaller see the :ref:`faq`. @@ -79,7 +88,25 @@ class GeoTIFFWriter(ImageWriter): "profile", "bigtiff", "pixeltype", - "copy_src_overviews",) + "copy_src_overviews", + # COG driver options (different from GTiff above) + "blocksize", + "resampling", + "quality", + "level", + "overview_resampling", + "warp_resampling", + "overview_compress", + "overview_quality", + "overview_predictor", + "tiling_scheme", + "zoom_level_strategy", + "target_srs", + "res", + "extent", + "aligned_levels", + "add_alpha", + ) def __init__(self, dtype=None, tags=None, **kwargs): """Init the writer.""" @@ -110,11 +137,26 @@ def separate_init_kwargs(cls, kwargs): return init_kwargs, kwargs - def save_image(self, img, filename=None, dtype=None, fill_value=None, - compute=True, keep_palette=False, cmap=None, tags=None, - overviews=None, overviews_minsize=256, - overviews_resampling=None, include_scale_offset=False, - scale_offset_tags=None, **kwargs): + def save_image( + self, + img: XRImage, + filename: Optional[str] = None, + compute: bool = True, + dtype: Optional[DTypeLike] = None, + fill_value: Optional[Union[int, float]] = None, + keep_palette: bool = False, + cmap: Optional[Colormap] = None, + tags: Optional[dict[str, Any]] = None, + overviews: Optional[list[int]] = None, + overviews_minsize: int = 256, + overviews_resampling: Optional[str] = None, + include_scale_offset: bool = False, + scale_offset_tags: Optional[tuple[str, str]] = None, + colormap_tag: Optional[str] = None, + driver: Optional[str] = None, + tiled: bool = True, + **kwargs + ): """Save the image to the given ``filename`` in geotiff_ format. Note for faster output and reduced memory usage the ``rasterio`` @@ -127,14 +169,6 @@ def save_image(self, img, filename=None, dtype=None, fill_value=None, ``filename`` passed during writer creation. Unlike the creation ``filename`` keyword argument, this filename does not get formatted with data attributes. - dtype (numpy.dtype): Numpy data type to save the image as. - Defaults to 8-bit unsigned integer (``np.uint8``) or the data - type of the data to be saved if ``enhance=False``. If the - ``dtype`` argument is provided during writer creation then - that will be used as the default. - fill_value (int or float): Value to use where data values are - NaN/null. If this is specified in the writer configuration - file that value will be used as the default. compute (bool): Compute dask arrays and save the image immediately. If ``False`` then the return value can be passed to :func:`~satpy.writers.compute_writer_results` to do the @@ -143,6 +177,14 @@ def save_image(self, img, filename=None, dtype=None, fill_value=None, them multiple times. Defaults to ``True`` in the writer by itself, but is typically passed as ``False`` by callers where calculations can be combined. + dtype (DTypeLike): Numpy data type to save the image as. + Defaults to 8-bit unsigned integer (``np.uint8``) or the data + type of the data to be saved if ``enhance=False``. If the + ``dtype`` argument is provided during writer creation then + that will be used as the default. + fill_value (float or int): Value to use where data values are + NaN/null. If this is specified in the writer configuration + file that value will be used as the default. keep_palette (bool): Save palette/color table to geotiff. To be used with images that were palettized with the "palettize" enhancement. Setting this to ``True`` will cause @@ -183,6 +225,17 @@ def save_image(self, img, filename=None, dtype=None, fill_value=None, tag. The value of this argument should be a keyword argument ``(scale_label, offset_label)``, for example, ``("scale", "offset")``, indicating the labels to be used. + colormap_tag (Optional[str]): If set and the image being saved was + colorized or palettized then a comma-separated version of the + colormap is saved to a custom geotiff tag with the provided + name. See :meth:`trollimage.colormap.Colormap.to_csv` for more + information. + driver (Optional[str]): Name of GDAL driver to use to save the + geotiff. If not specified or None (default) the "GTiff" driver + is used. Another common option is "COG" for Cloud Optimized + GeoTIFF. See GDAL documentation for more information. + tiled (bool): For performance this defaults to ``True``. + Pass ``False`` to created striped TIFF files. include_scale_offset (deprecated, bool): Deprecated. Use ``scale_offset_tags=("scale", "offset")`` to include scale and offset tags. @@ -224,14 +277,17 @@ def save_image(self, img, filename=None, dtype=None, fill_value=None, tags = {} tags.update(self.tags) - return img.save(filename, fformat='tif', fill_value=fill_value, + return img.save(filename, fformat='tif', driver=driver, + fill_value=fill_value, dtype=dtype, compute=compute, keep_palette=keep_palette, cmap=cmap, tags=tags, include_scale_offset_tags=include_scale_offset, scale_offset_tags=scale_offset_tags, + colormap_tag=colormap_tag, overviews=overviews, overviews_resampling=overviews_resampling, overviews_minsize=overviews_minsize, + tiled=tiled, **gdal_options) def _get_gdal_options(self, kwargs): diff --git a/satpy/writers/ninjogeotiff.py b/satpy/writers/ninjogeotiff.py index 253f8724d8..f1b663ee93 100644 --- a/satpy/writers/ninjogeotiff.py +++ b/satpy/writers/ninjogeotiff.py @@ -72,6 +72,7 @@ and ``max_stretch`` arguments. """ +import copy import datetime import logging @@ -144,7 +145,11 @@ def save_image( SatelliteNameID (int) NinJo Satellite ID PhysicUnit (str) - NinJo label for unit (example: "C") + NinJo label for unit (example: "C"). If PhysicValue is set to + "Temperature", PhysicUnit is set to "C", but data attributes + incidate the data have unit "K", then the writer will adapt the + header ``ninjo_AxisIntercept`` such that data are interpreted + in units of "C". PhysicValue (str) NinJo label for quantity (example: "temperature") @@ -174,6 +179,7 @@ def save_image( SatelliteNameID=SatelliteNameID, **ntg_opts) ninjo_tags = {f"ninjo_{k:s}": v for (k, v) in ntg.get_all_tags().items()} + image = self._fix_units(image, PhysicValue, PhysicUnit) return super().save_image( image, @@ -189,6 +195,31 @@ def save_image( scale_offset_tags=None if image.mode.startswith("RGB") else ("ninjo_Gradient", "ninjo_AxisIntercept"), **gdal_opts) + def _fix_units(self, image, quantity, unit): + """Adapt units between °C and K. + + This will return a new XRImage, to make sure the old data and + enhancement history aren't touched. + """ + data_units = image.data.attrs.get("units") + if (quantity.lower() == "temperature" and + unit == "C" and + data_units == "K"): + logger.debug("Adding offset for K → °C conversion") + new_attrs = copy.deepcopy(image.data.attrs) + im2 = type(image)(image.data.copy()) + im2.data.attrs = new_attrs + # this scale/offset has to be applied before anything else + im2.data.attrs["enhancement_history"].insert(0, {"scale": 1, "offset": 273.15}) + return im2 + if unit != data_units and unit.lower() != "n/a": + logger.warning( + f"Writing {unit!s} to ninjogeotiff headers, but " + f"data attributes have unit {data_units!s}. " + "No conversion applied.") + + return image + class NinJoTagGenerator: """Class to collect NinJo tags. @@ -431,7 +462,14 @@ def get_ref_lat_1(self): f"{self.dataset.attrs['area'].description}") def get_transparent_pixel(self): - """Get the transparent pixel value, also known as the fill value.""" + """Get the transparent pixel value, also known as the fill value. + + When the no fill value is defined (value `None`), such as for RGBA or + LA images, returns -1, in accordance with the file format + specification. + """ + if self.fill_value is None: + return -1 return self.fill_value def get_xmaximum(self): diff --git a/satpy/writers/ninjotiff.py b/satpy/writers/ninjotiff.py index 2736893589..557b2749d0 100644 --- a/satpy/writers/ninjotiff.py +++ b/satpy/writers/ninjotiff.py @@ -116,8 +116,9 @@ def convert_units(dataset, in_unit, out_unit): return dataset if in_unit.lower() in {"k", "kelvin"} and out_unit.lower() in {"c", "celsius"}: + logger.debug("Converting temperature units from K to °C") with xr.set_options(keep_attrs=True): - new_dataset = dataset + 273.15 + new_dataset = dataset - 273.15 new_dataset.attrs["units"] = out_unit return new_dataset @@ -177,12 +178,14 @@ def save_image(self, img, filename=None, compute=True, **kwargs): # floating_po return nt.save(img, filename, data_is_scaled_01=True, compute=compute, **kwargs) def save_dataset( - self, dataset, filename=None, fill_value=None, compute=True, **kwargs + self, dataset, filename=None, fill_value=None, compute=True, + convert_temperature_units=True, **kwargs ): """Save a dataset to ninjotiff format. This calls `save_image` in turn, but first preforms some unit conversion - if necessary. + if necessary and desired. Unit conversion can be suppressed by passing + ``convert_temperature_units=False``. """ nunits = kwargs.get("physic_unit", None) if nunits is None: @@ -201,7 +204,10 @@ def save_dataset( "Saving to physical ninjo file without units defined in dataset!" ) else: - dataset = convert_units(dataset, units, nunits) + if convert_temperature_units: + dataset = convert_units(dataset, units, nunits) + else: + logger.debug("Omitting unit conversion") return super(NinjoTIFFWriter, self).save_dataset( dataset, filename=filename, compute=compute, fill_value=fill_value, **kwargs ) diff --git a/setup.py b/setup.py index 46b5a964db..543c5458b3 100644 --- a/setup.py +++ b/setup.py @@ -37,7 +37,8 @@ test_requires = ['behave', 'h5py', 'netCDF4', 'pyhdf', 'imageio', 'pylibtiff', 'rasterio', 'geoviews', 'trollimage', 'fsspec', 'bottleneck', - 'rioxarray', 'pytest', 'pytest-lazy-fixture'] + 'rioxarray', 'pytest', 'pytest-lazy-fixture', 'defusedxml', + 's3fs'] extras_require = { # Readers: @@ -55,7 +56,7 @@ 'hrit_msg': ['pytroll-schedule'], 'msi_safe': ['rioxarray', "bottleneck", "python-geotiepoints"], 'nc_nwcsaf_msg': ['netCDF4 >= 1.1.8'], - 'sar_c': ['python-geotiepoints >= 1.1.7', 'rasterio', 'rioxarray'], + 'sar_c': ['python-geotiepoints >= 1.1.7', 'rasterio', 'rioxarray', 'defusedxml'], 'abi_l1b': ['h5netcdf'], 'seviri_l1b_hrit': ['pyorbital >= 1.3.1'], 'seviri_l1b_native': ['pyorbital >= 1.3.1'], @@ -63,6 +64,7 @@ 'seviri_l2_bufr': ['eccodes-python'], 'seviri_l2_grib': ['eccodes-python'], 'hsaf_grib': ['pygrib'], + 'remote_reading': ['fsspec'], # Writers: 'cf': ['h5netcdf >= 0.7.3'], 'awips_tiled': ['netCDF4 >= 1.1.8'],