diff --git a/.bandit b/.bandit
new file mode 100644
index 0000000000..d89efffe79
--- /dev/null
+++ b/.bandit
@@ -0,0 +1,3 @@
+[bandit]
+skips: B506
+exclude: satpy/tests
diff --git a/.git_archival.txt b/.git_archival.txt
index 082d6c2563..95cb3eea4e 100644
--- a/.git_archival.txt
+++ b/.git_archival.txt
@@ -1 +1 @@
-ref-names: $Format:%D$
\ No newline at end of file
+ref-names: $Format:%D$
diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 8870ebb201..407bdfb612 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -30,7 +30,7 @@ satpy/etc/readers/avhrr_l1b_gaclac.yaml @mraspaud @sfinkens
satpy/etc/readers/avhrr_l1b_hrpt.yaml @mraspaud
satpy/etc/readers/clavrx.yaml @djhoese
satpy/etc/readers/electrol_hrit.yaml @sfinkens @mraspaud
-satpy/etc/readers/fci_l1c_fdhsi.yaml @mraspaud
+satpy/etc/readers/fci_l1c_nc.yaml @ameraner @gerritholl
satpy/etc/readers/geocat.yaml @djhoese
satpy/etc/readers/goes-imager_hrit.yaml @sfinkens @mraspaud
satpy/etc/readers/goes-imager_nc.yaml @sfinkens @mraspaud
@@ -68,7 +68,7 @@ satpy/readers/clavrx.py @djhoese
satpy/readers/electrol_hrit.py @sfinkens @mraspaud
satpy/readers/eps_l1b.py @mraspaud @pnuu @adybbroe
satpy/readers/eum_base.py @sjoro @sfinkens @adybbroe
-satpy/readers/fci_l1c_fdhsi.py @mraspaud
+satpy/readers/fci_l1c_nc.py @ameraner @gerritholl
satpy/readers/geocat.py @djhoese
satpy/readers/goes_imager_hrit.py @sfinkens @mraspaud
satpy/readers/goes_imager_nc.py @sfinkens @mraspaud
diff --git a/.github/dependabot.yml b/.github/dependabot.yml
new file mode 100644
index 0000000000..90e05c40d0
--- /dev/null
+++ b/.github/dependabot.yml
@@ -0,0 +1,11 @@
+# To get started with Dependabot version updates, you'll need to specify which
+# package ecosystems to update and where the package manifests are located.
+# Please see the documentation for all configuration options:
+# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
+
+version: 2
+updates:
+ - package-ecosystem: "github-actions" # See documentation for possible values
+ directory: "/" # Location of package manifests
+ schedule:
+ interval: "weekly"
diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml
index 8f94201670..7087ed154c 100644
--- a/.github/workflows/ci.yaml
+++ b/.github/workflows/ci.yaml
@@ -1,16 +1,24 @@
name: CI
+# https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#concurrency
+# https://docs.github.com/en/developers/webhooks-and-events/events/github-event-types#pullrequestevent
+concurrency:
+ group: ${{ github.workflow }}-${{ github.event.number }}-${{ github.event.type }}
+ cancel-in-progress: true
on: [push, pull_request]
+env:
+ CACHE_NUMBER: 0
+
jobs:
lint:
name: lint and style checks
runs-on: ubuntu-latest
steps:
- name: Checkout source
- uses: actions/checkout@v2
+ uses: actions/checkout@v3
- name: Set up Python
- uses: actions/setup-python@v2
+ uses: actions/setup-python@v4
with:
python-version: 3.9
- name: Install dependencies
@@ -24,37 +32,6 @@ jobs:
run: |
flake8 satpy/
- website:
- name: build website
- runs-on: ubuntu-latest
- steps:
- - name: Checkout source
- uses: actions/checkout@v2
- with:
- fetch-depth: 0
-
- - name: Setup Conda Environment
- uses: conda-incubator/setup-miniconda@v2
- with:
- miniconda-version: "latest"
- python-version: "3.8"
- mamba-version: "*"
- channels: conda-forge,defaults
- environment-file: continuous_integration/environment.yaml
- activate-environment: test-environment
-
- - name: Install Satpy
- shell: bash -l {0}
- run: |
- pip install sphinx sphinx_rtd_theme sphinxcontrib-apidoc; \
- pip install --no-deps -e .
-
- - name: Run Sphinx Build
- shell: bash -l {0}
- run: |
- cd doc; \
- make html SPHINXOPTS="-W"
-
test:
runs-on: ${{ matrix.os }}
continue-on-error: ${{ matrix.experimental }}
@@ -63,10 +40,10 @@ jobs:
fail-fast: true
matrix:
os: ["windows-latest", "ubuntu-latest", "macos-latest"]
- python-version: ["3.7", "3.8"]
+ python-version: ["3.8", "3.9", "3.10"]
experimental: [false]
include:
- - python-version: "3.8"
+ - python-version: "3.9"
os: "ubuntu-latest"
experimental: true
@@ -78,21 +55,39 @@ jobs:
steps:
- name: Checkout source
- uses: actions/checkout@v2
+ uses: actions/checkout@v3
- name: Setup Conda Environment
uses: conda-incubator/setup-miniconda@v2
with:
- miniconda-version: "latest"
+ miniforge-variant: Mambaforge
+ miniforge-version: latest
+ use-mamba: true
python-version: ${{ matrix.python-version }}
- mamba-version: "*"
- channels: conda-forge,defaults
- environment-file: continuous_integration/environment.yaml
activate-environment: test-environment
+ - name: Set cache environment variables
+ shell: bash -l {0}
+ run: |
+ echo "DATE=$(date +'%Y%m%d')" >> $GITHUB_ENV
+ CONDA_PREFIX=$(python -c "import sys; print(sys.prefix)")
+ echo "CONDA_PREFIX=$CONDA_PREFIX" >> $GITHUB_ENV
+
+ - uses: actions/cache@v3
+ with:
+ path: ${{ env.CONDA_PREFIX }}
+ key: ${{ matrix.os }}-${{matrix.python-version}}-conda-${{ hashFiles('continuous_integration/environment.yaml') }}-${{ env.DATE }}-${{matrix.experimental}}-${{ env.CACHE_NUMBER }}
+ id: cache
+
+ - name: Update environment
+ run: mamba env update -n test-environment -f continuous_integration/environment.yaml
+ if: steps.cache.outputs.cache-hit != 'true'
+
- name: Install unstable dependencies
if: matrix.experimental == true
shell: bash -l {0}
+ # We must get LD_PRELOAD for stdlibc++ or else the manylinux wheels
+ # may break the conda-forge libraries trying to use newer glibc versions
run: |
python -m pip install \
--index-url https://pypi.anaconda.org/scipy-wheels-nightly/simple/ \
@@ -108,22 +103,26 @@ jobs:
git+https://github.com/dask/distributed \
git+https://github.com/zarr-developers/zarr \
git+https://github.com/Unidata/cftime \
- git+https://github.com/mapbox/rasterio \
+ git+https://github.com/rasterio/rasterio \
git+https://github.com/pydata/bottleneck \
- git+https://github.com/pydata/xarray;
+ git+https://github.com/pydata/xarray \
+ git+https://github.com/astropy/astropy;
+ LD_PRELOAD=$(python -c "import sys; print(sys.prefix)")/lib/libstdc++.so
+ echo "LD_PRELOAD=${LD_PRELOAD}" >> $GITHUB_ENV
- name: Install satpy
shell: bash -l {0}
run: |
- pip install --no-deps -e .
+ python -m pip install --no-deps -e .
- name: Run unit tests
shell: bash -l {0}
run: |
+ export LD_PRELOAD=${{ env.LD_PRELOAD }};
pytest --cov=satpy satpy/tests --cov-report=xml --cov-report=
- name: Upload unittest coverage to Codecov
- uses: codecov/codecov-action@v1
+ uses: codecov/codecov-action@v3
with:
flags: unittests
file: ./coverage.xml
@@ -139,11 +138,12 @@ jobs:
- name: Run behaviour tests
shell: bash -l {0}
run: |
+ export LD_PRELOAD=${{ env.LD_PRELOAD }};
coverage run --source=satpy -m behave satpy/tests/features --tags=-download
coverage xml
- name: Upload behaviour test coverage to Codecov
- uses: codecov/codecov-action@v1
+ uses: codecov/codecov-action@v3
with:
flags: behaviourtests
file: ./coverage.xml
@@ -157,4 +157,3 @@ jobs:
uses: AndreMiras/coveralls-python-action@develop
with:
parallel-finished: true
-
diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml
index c85a80b889..7e3afb25c2 100644
--- a/.github/workflows/deploy-sdist.yaml
+++ b/.github/workflows/deploy-sdist.yaml
@@ -11,7 +11,7 @@ jobs:
steps:
- name: Checkout source
- uses: actions/checkout@v2
+ uses: actions/checkout@v3
- name: Create sdist
shell: bash -l {0}
@@ -19,7 +19,7 @@ jobs:
- name: Publish package to PyPI
if: github.event.action == 'published'
- uses: pypa/gh-action-pypi-publish@v1.4.1
+ uses: pypa/gh-action-pypi-publish@v1.5.1
with:
user: __token__
- password: ${{ secrets.pypi_password }}
\ No newline at end of file
+ password: ${{ secrets.pypi_password }}
diff --git a/.gitignore b/.gitignore
index 2eb121b3b9..8990fa1d46 100644
--- a/.gitignore
+++ b/.gitignore
@@ -74,3 +74,4 @@ doc/source/_build/*
# this should be generated automatically when installed
satpy/version.py
doc/source/api/*.rst
+doc/source/reader_table.rst
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 68fd7028ce..3d36ec4301 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,8 +1,40 @@
exclude: '^$'
fail_fast: false
repos:
-- repo: https://github.com/pre-commit/pre-commit-hooks
- rev: v2.2.3
+ - repo: https://github.com/PyCQA/flake8
+ rev: 5.0.4
hooks:
- - id: flake8
- additional_dependencies: [flake8-docstrings, flake8-debugger, flake8-bugbear]
+ - id: flake8
+ additional_dependencies: [flake8-docstrings, flake8-debugger, flake8-bugbear, mccabe]
+ args: [--max-complexity, "10"]
+ - repo: https://github.com/pre-commit/pre-commit-hooks
+ rev: v4.3.0
+ hooks:
+ - id: trailing-whitespace
+ - id: end-of-file-fixer
+ - id: check-yaml
+ args: [--unsafe]
+ - repo: https://github.com/PyCQA/bandit
+ rev: '1.7.4' # Update me!
+ hooks:
+ - id: bandit
+ args: [--ini, .bandit]
+ - repo: https://github.com/pre-commit/mirrors-mypy
+ rev: 'v0.982' # Use the sha / tag you want to point at
+ hooks:
+ - id: mypy
+ additional_dependencies:
+ - types-docutils
+ - types-pkg-resources
+ - types-PyYAML
+ - types-requests
+ args: ["--python-version", "3.8", "--ignore-missing-imports"]
+ - repo: https://github.com/pycqa/isort
+ rev: 5.10.1
+ hooks:
+ - id: isort
+ language_version: python3
+ci:
+ # To trigger manually, comment on a pull request with "pre-commit.ci autofix"
+ autofix_prs: false
+ skip: [bandit]
diff --git a/.readthedocs.yml b/.readthedocs.yml
index 1da3abe673..7a15d5578b 100644
--- a/.readthedocs.yml
+++ b/.readthedocs.yml
@@ -4,9 +4,14 @@ version: 2
# Build documentation in the docs/ directory with Sphinx
sphinx:
configuration: doc/source/conf.py
+ fail_on_warning: true
# Optionally build your docs in additional formats such as PDF and ePub
formats: all
+build:
+ os: "ubuntu-20.04"
+ tools:
+ python: "mambaforge-4.10"
conda:
environment: doc/rtd_environment.yml
diff --git a/.travis.yml b/.travis.yml
deleted file mode 100644
index f48e5beb6b..0000000000
--- a/.travis.yml
+++ /dev/null
@@ -1,90 +0,0 @@
-language: python
-env:
- global:
- # Set defaults to avoid repeating in most cases
- - PYTHON_VERSION=$TRAVIS_PYTHON_VERSION
- - NUMPY_VERSION=stable
- - MAIN_CMD='python setup.py'
- - CONDA_DEPENDENCIES='xarray dask distributed toolz Cython sphinx cartopy pillow matplotlib scipy pyyaml
- pyproj pyresample coveralls coverage codecov behave netcdf4 h5py h5netcdf gdal rasterio imageio pyhdf
- mock libtiff geoviews zarr python-eccodes geoviews pytest pytest-cov fsspec pylibtiff'
- - PIP_DEPENDENCIES='trollsift trollimage pyspectral pyorbital'
- - SETUP_XVFB=False
- - EVENT_TYPE='push pull_request'
- - SETUP_CMD='test'
- - CONDA_CHANNELS='conda-forge'
- - CONDA_CHANNEL_PRIORITY='strict'
- - MAMBA=True
- - UNSTABLE_DEPS=False
-matrix:
- include:
- - env: PYTHON_VERSION=3.8
- os: linux
- - env: PYTHON_VERSION=3.8
- os: osx
- language: generic
- - env: PYTHON_VERSION=3.8
- os: windows
- language: bash
- - env: PYTHON_VERSION=3.7
- os: linux
- - env: PYTHON_VERSION=3.7
- os: osx
- language: generic
- # allowed to fail:
- - os: linux
- env:
- - PYTHON_VERSION=3.8
- - UNSTABLE_DEPS=True
-
- allow_failures:
- - os: linux
- env:
- - PYTHON_VERSION=3.8
- - UNSTABLE_DEPS=True
-install:
- - git clone --depth 1 git://github.com/astropy/ci-helpers.git
- - source ci-helpers/travis/setup_conda.sh
- # See https://github.com/travis-ci/travis-ci/issues/8920
- - if [ $TRAVIS_OS_NAME != "windows" ]; then
- python -c "import fcntl; fcntl.fcntl(1, fcntl.F_SETFL, 0)";
- fi
- - if [ "$UNSTABLE_DEPS" == "True" ]; then
- python -m pip install
- -f https://7933911d6844c6c53a7d-47bd50c35cd79bd838daf386af554a83.ssl.cf2.rackcdn.com
- --no-deps --pre --upgrade
- matplotlib
- numpy
- pandas
- scipy;
- python -m pip install
- --no-deps --upgrade
- git+https://github.com/dask/dask
- git+https://github.com/dask/distributed
- git+https://github.com/zarr-developers/zarr
- git+https://github.com/Unidata/cftime
- git+https://github.com/mapbox/rasterio
- git+https://github.com/pydata/bottleneck
- git+https://github.com/pydata/xarray;
- fi
- - pip install --no-deps -e .
-script:
-- pytest --cov=satpy satpy/tests
-- coverage run -a --source=satpy -m behave satpy/tests/features --tags=-download
-- if [ "$TRAVIS_EVENT_TYPE" == "cron" ]; then coverage run -a --source=satpy -m behave satpy/tests/features; fi
-#after_success:
-#- if [[ $PYTHON_VERSION == 3.8 ]]; then coveralls; codecov; fi
-#deploy:
-# - provider: pypi
-# user: dhoese
-# password:
-# secure: frK+0k1STeTM7SizRseP0qdTfOVz9ZMIra+3qEytPdxCLceXAH8LxPU16zj5rdNQxasF1hZ6rAd952fly+ypw2TEf5r2WnStrt7G5QlyE7VB6XGSDpIUxKF1FYccLvYs0/R6Y35MTEPqdM51PM5yEBjoY5b4tA3RF3fDq11cqc/SiWr6DgSLB1WJZULOdtCzBbfGbm5LyJ7yeNbISASSAwVvZTGWw7kJDgi0W5zxwEX82N5tBGbfKIu59qmxyj8FxmcrUwKZ4P3rQNg1kN1utzAB+PSf3GAVvbZfWJQuAKwMqpZgaV9lX0V7eUd/AxPobzEk9WyoNBMIdrSPej5BKWTDiYvaeRTOsggoUCSQJJA/SITEvkJgLWXoKKX2OWrM8RBUO4MoZJpPGXN42PRtMJkV2sx6ZigkpJlHdn39SsIRZX31zsfv8bBhclb70bt1Ts0fDd0rVdZAI6gMI+sgUePwEUn+XbWrvI0sMfDX3QsXDMV393RHgaIPxd+lRqUlYsNOxjsWpsbsvX55ePLxYHsNrv11KKyL/iGjGotVeVUO5D78qvfd4JrsUnMalQyZfW8NTEKa5Ebcs7gYJTwYEOTCQU12BkHOv1zFkjZG5RdGwkEvG3pykLhx+qDyYEd7pKB3TvhzLPqZPSrPxirwcoc0UzCc6ocYdzpqVuViFuk=
-# distributions: sdist
-# skip_existing: true
-# on:
-# tags: true
-# repo: pytroll/satpy
-#notifications:
-# slack:
-# rooms:
-# - pytroll:96mNSYSI1dBjGyzVXkBT6qFt#github
diff --git a/AUTHORS.md b/AUTHORS.md
index 50f12afab8..703c88d413 100644
--- a/AUTHORS.md
+++ b/AUTHORS.md
@@ -9,13 +9,16 @@ The following people have made contributions to this project:
- [Trygve Aspenes (TAlonglong)](https://github.com/TAlonglong)
- [Talfan Barnie (TalfanBarnie)](https://github.com/TalfanBarnie)
+- [Jonathan Beavers (jon4than)](https://github.com/jon4than)
- [Suyash Behera (Suyash458)](https://github.com/Suyash458)
- [Ray Bell (raybellwaves)](https://github.com/raybellwaves)
- [Jorge Bravo (jhbravo)](https://github.com/jhbravo)
+- [Sebastian Brodehl (sbrodehl)](https://github.com/sbrodehl)
- [Andrew Brooks (howff)](https://github.com/howff)
- Guido della Bruna - meteoswiss
- [Pierre de Buyl (pdebuyl)](https://github.com/pdebuyl)
- [Eric Bruning (deeplycloudy)](https://github.com/deeplycloudy)
+- [Manuel Carranza (manucarran)](https://github.com/manucarran)
- [Lorenzo Clementi (loreclem)](https://github.com/loreclem)
- [Colin Duff (ColinDuff)](https://github.com/ColinDuff)
- [Radar, Satellite and Nowcasting Division (meteoswiss-mdr)](https://github.com/meteoswiss-mdr)
@@ -23,12 +26,13 @@ The following people have made contributions to this project:
- [Adam Dybbroe (adybbroe)](https://github.com/adybbroe)
- [Ulrik Egede (egede)](https://github.com/egede)
- [Joleen Feltz (joleenf)](https://github.com/joleenf)
-- [Stephan Finkensieper (sfinkens)](https://github.com/sfinkens)
+- [Stephan Finkensieper (sfinkens)](https://github.com/sfinkens) - Deutscher Wetterdienst
- [Andrea Grillini (AppLEaDaY)](https://github.com/AppLEaDaY)
- [Blanka Gvozdikova (gvozdikb)](https://github.com/gvozdikb)
- [Nina Håkansson (ninahakansson)](https://github.com/ninahakansson)
- [Ulrich Hamann](https://github.com/)
-- [Gerrit Holl (gerritholl)](https://github.com/gerritholl)
+- [Mitch Herbertson (mherbertson)](https://github.com/mherbertson)
+- [Gerrit Holl (gerritholl)](https://github.com/gerritholl) - Deutscher Wetterdienst
- [David Hoese (djhoese)](https://github.com/djhoese)
- [Marc Honnorat (honnorat)](https://github.com/honnorat)
- [Mikhail Itkin (mitkin)](https://github.com/mitkin)
@@ -39,7 +43,9 @@ The following people have made contributions to this project:
- [Janne Kotro (jkotro)](https://github.com/jkotro)
- [Ralph Kuehn (ralphk11)](https://github.com/ralphk11)
- [Panu Lahtinen (pnuu)](https://github.com/pnuu)
-- [Thomas Leppelt (m4sth0)](https://github.com/m4sth0)
+- [Jussi Leinonen (jleinonen)](https://github.com/jleinonen) - meteoswiss
+- [Thomas Leppelt (m4sth0)](https://github.com/m4sth0) - Deutscher Wetterdienst
+- [Lu Liu (yukaribbba)](https://github.com/yukaribbba)
- [Andrea Meraner (ameraner)](https://github.com/ameraner)
- [Aronne Merrelli (aronnem)](https://github.com/aronnem)
- [Lucas Meyer (LTMeyer)](https://github.com/LTMeyer)
@@ -48,6 +54,7 @@ The following people have made contributions to this project:
- [Esben S. Nielsen (storpipfugl)](https://github.com/storpipfugl)
- [Tom Parker (tparker-usgs)](https://github.com/tparker-usgs)
- [Christian Peters (peters77)](https://github.com/peters77)
+- [Pepe Phillips (pepephillips)](https://github.com/pepephillips)
- [Ghislain Picard (ghislainp)](https://github.com/ghislainp)
- [Simon R. Proud (simonrp84)](https://github.com/simonrp84)
- [Lars Ørum Rasmussen (loerum)](https://github.com/loerum)
@@ -56,16 +63,19 @@ The following people have made contributions to this project:
- [Pascale Roquet (roquetp)](https://github.com/roquetp)
- [Kristian Rune Larsen](https://github.com/)
- [RutgerK (RutgerK)](https://github.com/RutgerK)
+- [Bengt Rydberg (BengtRydberg)](https://github.com/BengtRydberg)
- Marco Sassi - meteoswiss
- [Stefan Scheiblauer (StefanSnippetCoder)](https://github.com/StefanSnippetCoder)
- [Ronald Scheirer](https://github.com/)
- [Hauke Schulz (observingClouds)](https://github.com/observingClouds)
- [Jakub Seidl (seidlj)](https://github.com/seidlj)
- [Eysteinn Sigurðsson (eysteinn)](https://github.com/eysteinn)
+- [Jean-Luc Shaw (jeanlucshaw)](https://github.com/jeanlucshaw)
- [Dario Stelitano (bornagain1981)](https://github.com/bornagain1981)
- [Johan Strandgren (strandgren)](https://github.com/strandgren)
- [Matias Takala (elfsprite)](https://github.com/elfsprite)
- [Taiga Tsukada (tsukada-cs)](https://github.com/tsukada-cs)
+- [Christian Versloot (christianversloot)](https://github.com/christianversloot)
- [Helga Weber (helgaweb)](https://github.com/helgaweb)
- [hazbottles (hazbottles)](https://github.com/hazbottles)
- [oananicola (oananicola)](https://github.com/oananicola)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 2e49f7df7a..9bbfbbd2dd 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,677 @@
+## Version 0.38.0 (2022/11/11)
+
+### Issues Closed
+
+* [Issue 2258](https://github.com/pytroll/satpy/issues/2258) - VIIRS day_microphysics array broadcast error ([PR 2260](https://github.com/pytroll/satpy/pull/2260) by [@djhoese](https://github.com/djhoese))
+* [Issue 2250](https://github.com/pytroll/satpy/issues/2250) - Is reader modis_l1b available?
+* [Issue 2249](https://github.com/pytroll/satpy/issues/2249) - ahi-hsd raise keyerror when load B08 in dataquery
+* [Issue 2241](https://github.com/pytroll/satpy/issues/2241) - Slow loading speed of TROPOMI L2 product
+* [Issue 2234](https://github.com/pytroll/satpy/issues/2234) - Scene `modifiers` keyword argument has no effect ([PR 2235](https://github.com/pytroll/satpy/pull/2235) by [@djhoese](https://github.com/djhoese))
+* [Issue 2233](https://github.com/pytroll/satpy/issues/2233) - 'cached_property' from 'functools' - seems incompatible with python3.7
+* [Issue 2228](https://github.com/pytroll/satpy/issues/2228) - Question: Why the ellipsoid of a geostationary satellite image is not typical WGS84?
+* [Issue 2227](https://github.com/pytroll/satpy/issues/2227) - CF writer output wrong for area with geographic CRS ([PR 2236](https://github.com/pytroll/satpy/pull/2236) by [@gerritholl](https://github.com/gerritholl))
+* [Issue 2215](https://github.com/pytroll/satpy/issues/2215) - Abi l2 nc reader can't handle AOD product ([PR 2216](https://github.com/pytroll/satpy/pull/2216) by [@mraspaud](https://github.com/mraspaud))
+* [Issue 2208](https://github.com/pytroll/satpy/issues/2208) - hy2_scat_l2b_h5 reader does not work any more due to space in `valid range` attribute ([PR 2268](https://github.com/pytroll/satpy/pull/2268) by [@TAlonglong](https://github.com/TAlonglong))
+* [Issue 2206](https://github.com/pytroll/satpy/issues/2206) - If you do `python -c "import scipy.sparse"` do you get a similar error?
+* [Issue 2202](https://github.com/pytroll/satpy/issues/2202) - AttributeError: 'AreaDefinition' object has no attribute 'crs'
+* [Issue 2192](https://github.com/pytroll/satpy/issues/2192) - Available Readers Problem
+* [Issue 2189](https://github.com/pytroll/satpy/issues/2189) - Supported readers page does not include many readers ([PR 2191](https://github.com/pytroll/satpy/pull/2191) by [@BENR0](https://github.com/BENR0))
+* [Issue 2183](https://github.com/pytroll/satpy/issues/2183) - Reading bzipped Seviri HRIT segment crashes when reading the data from disk ([PR 2185](https://github.com/pytroll/satpy/pull/2185) by [@mraspaud](https://github.com/mraspaud))
+* [Issue 2170](https://github.com/pytroll/satpy/issues/2170) - satpy_cf_nc Reader Fails to Read Data Written by cf Writer ([PR 2176](https://github.com/pytroll/satpy/pull/2176) by [@mraspaud](https://github.com/mraspaud))
+* [Issue 2154](https://github.com/pytroll/satpy/issues/2154) - module 'ntpath' has no attribute 'sep'"
+* [Issue 2111](https://github.com/pytroll/satpy/issues/2111) - Archived GOES datasets not loading. AttributeError: 'area' object has no attribute 'crs'
+* [Issue 1929](https://github.com/pytroll/satpy/issues/1929) - Two test failures in test_goes_imager_nc.py with Python 3.10
+* [Issue 1672](https://github.com/pytroll/satpy/issues/1672) - Add AreaDefinition support to the 'satpy_cf_nc' reader ([PR 1695](https://github.com/pytroll/satpy/pull/1695) by [@BENR0](https://github.com/BENR0))
+
+In this release 20 issues were closed.
+
+### Pull Requests Merged
+
+#### Bugs fixed
+
+* [PR 2262](https://github.com/pytroll/satpy/pull/2262) - Fix ratio sharpening not sharing invalid mask between bands ([556](https://github.com/ssec/polar2grid/issues/556))
+* [PR 2260](https://github.com/pytroll/satpy/pull/2260) - Fix VIIRS L1B I-band angle names being inconsistent with VIIRS SDR ([2258](https://github.com/pytroll/satpy/issues/2258))
+* [PR 2257](https://github.com/pytroll/satpy/pull/2257) - Fix failure creating directory if it already exists.
+* [PR 2246](https://github.com/pytroll/satpy/pull/2246) - Fix MODIS reader tests failing with new geotiepoints
+* [PR 2239](https://github.com/pytroll/satpy/pull/2239) - Fix incorrect rows_per_scan in 'acspo' reader ([498](https://github.com/ssec/polar2grid/issues/498))
+* [PR 2236](https://github.com/pytroll/satpy/pull/2236) - CF-compliant storage for lon/lat case ([2227](https://github.com/pytroll/satpy/issues/2227))
+* [PR 2235](https://github.com/pytroll/satpy/pull/2235) - Fix Scene.load modifiers keyword argument having no effect ([2234](https://github.com/pytroll/satpy/issues/2234))
+* [PR 2232](https://github.com/pytroll/satpy/pull/2232) - Make longitude masker & filler single band
+* [PR 2221](https://github.com/pytroll/satpy/pull/2221) - Fix ici after mws merge
+* [PR 2220](https://github.com/pytroll/satpy/pull/2220) - Fix CLAVR-x configuration in 'awips_tiled' writer to be backwards compatible
+* [PR 2216](https://github.com/pytroll/satpy/pull/2216) - Fix coord renaming for AOD product in 'abi_l2_nc' reader ([2215](https://github.com/pytroll/satpy/issues/2215))
+* [PR 2210](https://github.com/pytroll/satpy/pull/2210) - Fix VIIRS EDR Active Fires reader for new format and fix fine/coarse 1D swath handling ([458](https://github.com/ssec/polar2grid/issues/458))
+* [PR 2190](https://github.com/pytroll/satpy/pull/2190) - Fix some enhancements producing dask arrays wrapped in dask arrays
+* [PR 2185](https://github.com/pytroll/satpy/pull/2185) - Refactor HRIT readers to be smarter about compression and reading data ([2183](https://github.com/pytroll/satpy/issues/2183))
+* [PR 2177](https://github.com/pytroll/satpy/pull/2177) - Fix plugins not working with new versions of setuptools
+* [PR 2176](https://github.com/pytroll/satpy/pull/2176) - Fix cf write-read roundtrip ([2170](https://github.com/pytroll/satpy/issues/2170))
+* [PR 2166](https://github.com/pytroll/satpy/pull/2166) - Correct the sun azimuth angle range within satpy.
+
+#### Features added
+
+* [PR 2230](https://github.com/pytroll/satpy/pull/2230) - Add support for compressed FSFiles to HRIT readers
+* [PR 2209](https://github.com/pytroll/satpy/pull/2209) - Update seadas_l2 reader to handle alternative NetCDF file format ([457](https://github.com/ssec/polar2grid/issues/457))
+* [PR 2207](https://github.com/pytroll/satpy/pull/2207) - Add SEVIRI level 2 AMV BUFR
+* [PR 2203](https://github.com/pytroll/satpy/pull/2203) - Fix experimental dependency stdlibc++ issues in CI
+* [PR 2198](https://github.com/pytroll/satpy/pull/2198) - Add warning for SEVIRI native reader in case of bad data
+* [PR 2187](https://github.com/pytroll/satpy/pull/2187) - adding a reader for ATMS level1b data
+* [PR 2185](https://github.com/pytroll/satpy/pull/2185) - Refactor HRIT readers to be smarter about compression and reading data ([2183](https://github.com/pytroll/satpy/issues/2183))
+* [PR 2175](https://github.com/pytroll/satpy/pull/2175) - Add utility function to compute the relative azimuth angle.
+* [PR 2164](https://github.com/pytroll/satpy/pull/2164) - Add low level moisture composite
+* [PR 2125](https://github.com/pytroll/satpy/pull/2125) - Add reader for FY-4B / GHI data
+* [PR 2120](https://github.com/pytroll/satpy/pull/2120) - Add reader for MWS onboard EPS-SG-A
+* [PR 2118](https://github.com/pytroll/satpy/pull/2118) - Add a reader for EPS-SG Ice Cloud Imager
+* [PR 1695](https://github.com/pytroll/satpy/pull/1695) - Add `get_area_def` to cf reader ([1672](https://github.com/pytroll/satpy/issues/1672))
+
+#### Documentation changes
+
+* [PR 2247](https://github.com/pytroll/satpy/pull/2247) - Document behaviour on default enhancement
+* [PR 2225](https://github.com/pytroll/satpy/pull/2225) - Update writer table docs
+* [PR 2200](https://github.com/pytroll/satpy/pull/2200) - Remove mention of fallback to GDAL in geotiff writer
+* [PR 2195](https://github.com/pytroll/satpy/pull/2195) - Add additional logging information about enhancements being used
+* [PR 2191](https://github.com/pytroll/satpy/pull/2191) - Fix automatic reader table not listing readers with missing dependencies ([2189](https://github.com/pytroll/satpy/issues/2189))
+
+#### Clean ups
+
+* [PR 2268](https://github.com/pytroll/satpy/pull/2268) - Cleanup hy2 reader ([2208](https://github.com/pytroll/satpy/issues/2208))
+* [PR 2252](https://github.com/pytroll/satpy/pull/2252) - Create dependabot.yml
+* [PR 2240](https://github.com/pytroll/satpy/pull/2240) - Refactor RGB ratio sharpening again for better performance
+* [PR 2205](https://github.com/pytroll/satpy/pull/2205) - Update URL to rasterio repository in CI
+
+In this release 39 pull requests were closed.
+
+
+## Version 0.37.1 (2022/08/15)
+
+### Issues Closed
+
+* [Issue 2173](https://github.com/pytroll/satpy/issues/2173) - MetopC script fails after update to SatPy 0.37 ([PR 2174](https://github.com/pytroll/satpy/pull/2174) by [@mraspaud](https://github.com/mraspaud))
+
+In this release 1 issue was closed.
+
+### Pull Requests Merged
+
+#### Bugs fixed
+
+* [PR 2174](https://github.com/pytroll/satpy/pull/2174) - Fix 3d effect enhancement ([2173](https://github.com/pytroll/satpy/issues/2173))
+
+In this release 1 pull request was closed.
+
+
+## Version 0.37.0 (2022/08/05)
+
+### Issues Closed
+
+* [Issue 2163](https://github.com/pytroll/satpy/issues/2163) - ValueError: time data '2022-07-11T00:30:01Z' does not match format '%Y-%m-%dT%H:%M:%S.%fZ' ([PR 2165](https://github.com/pytroll/satpy/pull/2165) by [@simonrp84](https://github.com/simonrp84))
+* [Issue 2161](https://github.com/pytroll/satpy/issues/2161) - Plotting Scene with Cartopy gives correct borders but red background
+* [Issue 2155](https://github.com/pytroll/satpy/issues/2155) - AVHRR CLASS Filename prefix prevents reading with Scene. ([PR 2157](https://github.com/pytroll/satpy/pull/2157) by [@djhoese](https://github.com/djhoese))
+* [Issue 2145](https://github.com/pytroll/satpy/issues/2145) - Could not calculate destination definition resolution
+* [Issue 2143](https://github.com/pytroll/satpy/issues/2143) - Save geotiff with keep_palette still add an alpha band
+* [Issue 2139](https://github.com/pytroll/satpy/issues/2139) - Hostname neo.sci.gsfc.nasa.gov not reachable anymore
+* [Issue 2135](https://github.com/pytroll/satpy/issues/2135) - Images are slightly different between scn.crop() and original one
+* [Issue 2134](https://github.com/pytroll/satpy/issues/2134) - scene.coarsest_area and scene.resample not working on MSG1/MSG2 in satpy 0.29
+* [Issue 2130](https://github.com/pytroll/satpy/issues/2130) - Can't install satpy with pip
+* [Issue 2127](https://github.com/pytroll/satpy/issues/2127) - Raised RuntimeError when trying to make HIMAWARI-8 true color image ([PR 2128](https://github.com/pytroll/satpy/pull/2128) by [@mherbertson](https://github.com/mherbertson))
+* [Issue 2112](https://github.com/pytroll/satpy/issues/2112) - Trying to run scn.load(['true_color']) on GOES-16 ABI_L1b causes ValueError
+* [Issue 2093](https://github.com/pytroll/satpy/issues/2093) - Multiscene blend does not work well.
+* [Issue 2089](https://github.com/pytroll/satpy/issues/2089) - MultiScene.group doesn't work for differing identifier properties ([PR 2099](https://github.com/pytroll/satpy/pull/2099) by [@sfinkens](https://github.com/sfinkens))
+* [Issue 1996](https://github.com/pytroll/satpy/issues/1996) - FCI dataset attributes are missing orbital parameters ([PR 2110](https://github.com/pytroll/satpy/pull/2110) by [@ameraner](https://github.com/ameraner))
+* [Issue 1949](https://github.com/pytroll/satpy/issues/1949) - keep `delta_time` instead of renaming to `offset_time`
+* [Issue 1865](https://github.com/pytroll/satpy/issues/1865) - navigation available for all L1b readers
+* [Issue 1845](https://github.com/pytroll/satpy/issues/1845) - Add parallax correction ([PR 1904](https://github.com/pytroll/satpy/pull/1904) by [@gerritholl](https://github.com/gerritholl))
+* [Issue 1699](https://github.com/pytroll/satpy/issues/1699) - Remove `compression` argument from CF writer `save_datasets`
+* [Issue 1638](https://github.com/pytroll/satpy/issues/1638) - satpy_cf_nc reader not working with files written from seviri_l1b readers
+* [Issue 1348](https://github.com/pytroll/satpy/issues/1348) - 'AHIHSDFileHandler' object has no attribute 'area'
+* [Issue 1308](https://github.com/pytroll/satpy/issues/1308) - Error with yaml files during tutorial need example file I think
+* [Issue 1015](https://github.com/pytroll/satpy/issues/1015) - Add compute method to Scene ([PR 1017](https://github.com/pytroll/satpy/pull/1017) by [@BENR0](https://github.com/BENR0))
+
+In this release 22 issues were closed.
+
+### Pull Requests Merged
+
+#### Bugs fixed
+
+* [PR 2165](https://github.com/pytroll/satpy/pull/2165) - Update AGRI reader to deal with invalid `valid_range` HDF attribute ([2163](https://github.com/pytroll/satpy/issues/2163))
+* [PR 2151](https://github.com/pytroll/satpy/pull/2151) - Change default type of AWIPS tiled writer to int16 + _Unsigned
+* [PR 2150](https://github.com/pytroll/satpy/pull/2150) - Fix typos in reader table
+* [PR 2142](https://github.com/pytroll/satpy/pull/2142) - Fix MODIS readers applying add_offset incorrectly
+* [PR 2141](https://github.com/pytroll/satpy/pull/2141) - Fix expected test results for pyorbital 1.7.2
+* [PR 2137](https://github.com/pytroll/satpy/pull/2137) - Fix GPM IMERG reader.
+* [PR 2128](https://github.com/pytroll/satpy/pull/2128) - Fix AHI source file segment order after decompressing. ([2127](https://github.com/pytroll/satpy/issues/2127))
+* [PR 2123](https://github.com/pytroll/satpy/pull/2123) - Fix negative channel 3A slope calibration coefficient in AVHRR reader
+* [PR 2122](https://github.com/pytroll/satpy/pull/2122) - Fix yaml files for some seviri/abi/ahi BlackMarble background composites
+* [PR 2115](https://github.com/pytroll/satpy/pull/2115) - Update 'viirs_edr_active_fires' to work with newer Active Fires output
+* [PR 2114](https://github.com/pytroll/satpy/pull/2114) - Fix ACSPO 'sensor' attribute not being lowercase
+* [PR 2107](https://github.com/pytroll/satpy/pull/2107) - Fix 'ahi_hsd' reader crashing when 'observation_timeline' was invalid
+* [PR 2103](https://github.com/pytroll/satpy/pull/2103) - Update SEVIRI ICARE reader to properly use dask.
+* [PR 2100](https://github.com/pytroll/satpy/pull/2100) - Fix handling of non-existent reflectance bands in 'viirs_l1b' reader
+* [PR 2099](https://github.com/pytroll/satpy/pull/2099) - Fix MultiScene.group in case of differing identifier properties ([2089](https://github.com/pytroll/satpy/issues/2089))
+* [PR 2098](https://github.com/pytroll/satpy/pull/2098) - Fix Scene.coarsest/finest_area not returning consistent results
+* [PR 1877](https://github.com/pytroll/satpy/pull/1877) - Update SEVIRI native reader with 'time_parameters' metadata
+
+#### Features added
+
+* [PR 2160](https://github.com/pytroll/satpy/pull/2160) - Add reader for ESA's Ocean Color CCI data
+* [PR 2157](https://github.com/pytroll/satpy/pull/2157) - Add filename pattern for CLASS subscription files ([2155](https://github.com/pytroll/satpy/issues/2155))
+* [PR 2156](https://github.com/pytroll/satpy/pull/2156) - Added filename pattern for CLASS subscription files.
+* [PR 2147](https://github.com/pytroll/satpy/pull/2147) - added the CRR-Ph files to the nwcsaf geo yaml file
+* [PR 2146](https://github.com/pytroll/satpy/pull/2146) - Update Metimage reader for L2 test data
+* [PR 2140](https://github.com/pytroll/satpy/pull/2140) - Add Natural Color / IR composites
+* [PR 2133](https://github.com/pytroll/satpy/pull/2133) - Rewrite 'apply_enhancement' as individual decorators to allow for easier dask map_blocks usage
+* [PR 2110](https://github.com/pytroll/satpy/pull/2110) - Add orbital_parameters to fci_l1c_nc reader ([1996](https://github.com/pytroll/satpy/issues/1996))
+* [PR 2105](https://github.com/pytroll/satpy/pull/2105) - Enable solar zenith angle caching for the DayNightCompositor
+* [PR 2102](https://github.com/pytroll/satpy/pull/2102) - Add more products to fci_l2_nc reader
+* [PR 2097](https://github.com/pytroll/satpy/pull/2097) - [pre-commit.ci] pre-commit autoupdate
+* [PR 2096](https://github.com/pytroll/satpy/pull/2096) - Convert remote files to FSFile objects automatically
+* [PR 1919](https://github.com/pytroll/satpy/pull/1919) - Implement adaptive FCI chunks padding and create a new GEOVariableSegmentYAMLReader class
+* [PR 1904](https://github.com/pytroll/satpy/pull/1904) - Add parallax correction via new `ParallaxCorrectionModifier` ([1845](https://github.com/pytroll/satpy/issues/1845), []([bug](https://github.com/pytroll/satpy/pull/1904/issues/))
+* [PR 1769](https://github.com/pytroll/satpy/pull/1769) - Add new composite (true_color_with_night_fires) to GOES/ABI: True color (day) with fires (night)
+* [PR 1547](https://github.com/pytroll/satpy/pull/1547) - Add support for fsspec files to seviri_l1b_nc reader
+* [PR 1017](https://github.com/pytroll/satpy/pull/1017) - Add pass through of xr compute, persist and chunk to Scene ([1015](https://github.com/pytroll/satpy/issues/1015))
+
+#### Documentation changes
+
+* [PR 2153](https://github.com/pytroll/satpy/pull/2153) - Document alternative for deprecated get_min/max_area
+* [PR 2138](https://github.com/pytroll/satpy/pull/2138) - Add plugin functionality for readers, writers, and enhancements
+* [PR 2108](https://github.com/pytroll/satpy/pull/2108) - Functions to automatically generate reader table for documentation
+* [PR 2104](https://github.com/pytroll/satpy/pull/2104) - Improvements in custom reader documentation
+* [PR 2091](https://github.com/pytroll/satpy/pull/2091) - Fix link to rad2refl document in SEVIRI base reader
+* [PR 1886](https://github.com/pytroll/satpy/pull/1886) - Update quickstart documentation so that HRV channel is not loaded
+
+In this release 40 pull requests were closed.
+
+
+## Version 0.36.0 (2022/04/14)
+
+### Issues Closed
+
+* [Issue 2082](https://github.com/pytroll/satpy/issues/2082) - Some composite are produced with the wrong colors
+* [Issue 2073](https://github.com/pytroll/satpy/issues/2073) - Creating scene with SEVIRI HRIT reader fails with UnicodeDecodeError ([PR 2077](https://github.com/pytroll/satpy/pull/2077) by [@pdebuyl](https://github.com/pdebuyl))
+* [Issue 2066](https://github.com/pytroll/satpy/issues/2066) - RGBs should never have units, but some do ([PR 2068](https://github.com/pytroll/satpy/pull/2068) by [@gerritholl](https://github.com/gerritholl))
+* [Issue 2062](https://github.com/pytroll/satpy/issues/2062) - Can make Trollimage colorbar in scene
+* [Issue 1975](https://github.com/pytroll/satpy/issues/1975) - bunzip regular seviri hrit segments ([PR 2060](https://github.com/pytroll/satpy/pull/2060) by [@pdebuyl](https://github.com/pdebuyl))
+* [Issue 1954](https://github.com/pytroll/satpy/issues/1954) - Cloud Phase/Type/Phase Distinction RGBs for VIIRS and FCI ([PR 1957](https://github.com/pytroll/satpy/pull/1957) by [@gerritholl](https://github.com/gerritholl))
+* [Issue 1702](https://github.com/pytroll/satpy/issues/1702) - Resampling not working with SLSTR ancillary datasets
+
+In this release 7 issues were closed.
+
+### Pull Requests Merged
+
+#### Bugs fixed
+
+* [PR 2084](https://github.com/pytroll/satpy/pull/2084) - Fix CREFL using incorrect coefficients for MODIS
+* [PR 2083](https://github.com/pytroll/satpy/pull/2083) - Fix VIIRS L1B reader sensor not matching VIIRS SDR reader
+* [PR 2080](https://github.com/pytroll/satpy/pull/2080) - Ignore alpha when adding luminance in Sandwich compositor
+* [PR 2079](https://github.com/pytroll/satpy/pull/2079) - Remove marine_clean_aerosol from default AHI rayleigh_corrected modifier
+* [PR 2077](https://github.com/pytroll/satpy/pull/2077) - Fix missing 'rb' mode for opening files ([2073](https://github.com/pytroll/satpy/issues/2073))
+* [PR 2070](https://github.com/pytroll/satpy/pull/2070) - Fix lru_cache memory leaks and other linting errors
+* [PR 2048](https://github.com/pytroll/satpy/pull/2048) - Fix CMIC CRE product in nwcsaf yaml reader
+* [PR 2016](https://github.com/pytroll/satpy/pull/2016) - Fix the sensor name for msu_gsa_l1b reader
+* [PR 1410](https://github.com/pytroll/satpy/pull/1410) - Fix osisaf SST reader
+
+#### Features added
+
+* [PR 2086](https://github.com/pytroll/satpy/pull/2086) - Update FCI reader for new test data release and add patches for IDPF data
+* [PR 2078](https://github.com/pytroll/satpy/pull/2078) - Add possibility to define the dataset rectification longitude in seviri_l2_bufr reader
+* [PR 2076](https://github.com/pytroll/satpy/pull/2076) - Support reading FSFiles in SEVIRI HRIT reader.
+* [PR 2068](https://github.com/pytroll/satpy/pull/2068) - Make sure RGBs do not have units attributes. ([2066](https://github.com/pytroll/satpy/issues/2066))
+* [PR 2065](https://github.com/pytroll/satpy/pull/2065) - Add filename to YAML for NASA NRT VIIRS files with creation date/time.
+* [PR 2060](https://github.com/pytroll/satpy/pull/2060) - Allow reading Bz2 hrit segments ([1975](https://github.com/pytroll/satpy/issues/1975))
+* [PR 2057](https://github.com/pytroll/satpy/pull/2057) - Add option to replace saturated MODIS L1b values with max valid value
+* [PR 1980](https://github.com/pytroll/satpy/pull/1980) - Adapt AAPP reader for generic chunk size
+* [PR 1957](https://github.com/pytroll/satpy/pull/1957) - Add RGBs for cloud phase (distinction) and type ([1954](https://github.com/pytroll/satpy/issues/1954))
+* [PR 1410](https://github.com/pytroll/satpy/pull/1410) - Fix osisaf SST reader
+
+#### Documentation changes
+
+* [PR 2075](https://github.com/pytroll/satpy/pull/2075) - Add documentation on how the colorize enhancement can be used
+* [PR 2071](https://github.com/pytroll/satpy/pull/2071) - Add example to the documentation using multiple readers
+
+#### Refactoring
+
+* [PR 2087](https://github.com/pytroll/satpy/pull/2087) - Refactor HRIT/LRIT format reader.
+
+In this release 22 pull requests were closed.
+
+
+## Version 0.35.0 (2022/03/16)
+
+### Issues Closed
+
+* [Issue 2063](https://github.com/pytroll/satpy/issues/2063) - Unable to commit changes due to bandit (use of subprocess module)
+* [Issue 2037](https://github.com/pytroll/satpy/issues/2037) - Why the lon/lat is interpolated to 1km while data are still 5km for MOD06 product
+* [Issue 2012](https://github.com/pytroll/satpy/issues/2012) - Define time metadata options and usage ([PR 2031](https://github.com/pytroll/satpy/pull/2031) by [@djhoese](https://github.com/djhoese))
+* [Issue 1973](https://github.com/pytroll/satpy/issues/1973) - Using cached geolocation and angles results in an error if chunk size not appropriately set. ([PR 2041](https://github.com/pytroll/satpy/pull/2041) by [@djhoese](https://github.com/djhoese))
+* [Issue 1842](https://github.com/pytroll/satpy/issues/1842) - Update needed for vii_l1b_nc reader to match a change to the Test Data and processor ([PR 1979](https://github.com/pytroll/satpy/pull/1979) by [@pepephillips](https://github.com/pepephillips))
+* [Issue 1110](https://github.com/pytroll/satpy/issues/1110) - NWCSAF reader does not support GOES or HIMAWARI
+* [Issue 1022](https://github.com/pytroll/satpy/issues/1022) - Factorize area def computation in goes_imager_hrit ([PR 1934](https://github.com/pytroll/satpy/pull/1934) by [@sfinkens](https://github.com/sfinkens))
+* [Issue 956](https://github.com/pytroll/satpy/issues/956) - UnboundLocalError when passing "empty" generator as filenames
+* [Issue 723](https://github.com/pytroll/satpy/issues/723) - Passing multiple readers fails if `generic_image` is among them
+* [Issue 684](https://github.com/pytroll/satpy/issues/684) - Gracefully handle pykdtree's use of OpenMP (OMP_NUM_THREADS) with dask
+
+In this release 10 issues were closed.
+
+### Pull Requests Merged
+
+#### Bugs fixed
+
+* [PR 2054](https://github.com/pytroll/satpy/pull/2054) - Fix DifferenceCompositor not using metadata from YAML
+* [PR 2049](https://github.com/pytroll/satpy/pull/2049) - Fix dataset attribute typo and reduce amount of categorical dataset filtering in fci_l2_nc reader
+* [PR 2044](https://github.com/pytroll/satpy/pull/2044) - Fix unit handling in ERF DNB normalization's saturation correction
+* [PR 2041](https://github.com/pytroll/satpy/pull/2041) - Fix angle generation caching not working with irregular chunks ([1973](https://github.com/pytroll/satpy/issues/1973))
+* [PR 2032](https://github.com/pytroll/satpy/pull/2032) - Fix various metadata bugs in 'awips_tiled' writer ([417](https://github.com/ssec/polar2grid/issues/417))
+* [PR 1933](https://github.com/pytroll/satpy/pull/1933) - Change tested Python versions to 3.8, 3.9 and 3.10
+
+#### Features added
+
+* [PR 2056](https://github.com/pytroll/satpy/pull/2056) - Update SLSTR calibration coefficients
+* [PR 2055](https://github.com/pytroll/satpy/pull/2055) - Skip dataset flipping in GEOFlippableFileYAMLReader in case of SwathDefinition data
+* [PR 2047](https://github.com/pytroll/satpy/pull/2047) - Add missing GOES-18 support to glm_l2 reader
+* [PR 2034](https://github.com/pytroll/satpy/pull/2034) - Update angle generation to prefer "actual" satellite position
+* [PR 2033](https://github.com/pytroll/satpy/pull/2033) - Remove use of legacy satellite position attributes
+* [PR 2031](https://github.com/pytroll/satpy/pull/2031) - Update AHI HSD reader with observation/scheduled times and nominal satellite position ([2012](https://github.com/pytroll/satpy/issues/2012))
+* [PR 2030](https://github.com/pytroll/satpy/pull/2030) - Add 'preference' option to 'get_satpos' utility
+* [PR 2028](https://github.com/pytroll/satpy/pull/2028) - Add 'colormap_tag' keyword argument to geotiff writer
+* [PR 1993](https://github.com/pytroll/satpy/pull/1993) - Add 'l2_flags' quality filtering to 'seadas_l2' reader
+* [PR 1979](https://github.com/pytroll/satpy/pull/1979) - Update VII reader for test data v2 ([1842](https://github.com/pytroll/satpy/issues/1842))
+* [PR 1933](https://github.com/pytroll/satpy/pull/1933) - Change tested Python versions to 3.8, 3.9 and 3.10
+* [PR 1927](https://github.com/pytroll/satpy/pull/1927) - Add support for more FCI L2 products and datasets
+
+#### Refactoring
+
+* [PR 2040](https://github.com/pytroll/satpy/pull/2040) - Refactor composite generation to avoid unneeded warnings
+* [PR 1934](https://github.com/pytroll/satpy/pull/1934) - Factorize area computation in goes_imager_hrit ([1022](https://github.com/pytroll/satpy/issues/1022))
+
+In this release 20 pull requests were closed.
+
+
+## Version 0.34.0 (2022/02/18)
+
+### Issues Closed
+
+* [Issue 2026](https://github.com/pytroll/satpy/issues/2026) - Missing units in avhrr_l1b_eps reader ([PR 2027](https://github.com/pytroll/satpy/pull/2027) by [@gerritholl](https://github.com/gerritholl))
+* [Issue 2024](https://github.com/pytroll/satpy/issues/2024) - Allow to skip unit conversion in ninjotiff writer ([PR 2025](https://github.com/pytroll/satpy/pull/2025) by [@gerritholl](https://github.com/gerritholl))
+* [Issue 2023](https://github.com/pytroll/satpy/issues/2023) - Allow to keep units in composite
+* [Issue 2022](https://github.com/pytroll/satpy/issues/2022) - save_dataset changes dataset in-place
+* [Issue 2018](https://github.com/pytroll/satpy/issues/2018) - Wrong AxisIntercept (add_offset) when writing °C temperature units with ninjogeotiff writer
+* [Issue 2014](https://github.com/pytroll/satpy/issues/2014) - Problem in converting VIIRS hdf to geotif
+* [Issue 2010](https://github.com/pytroll/satpy/issues/2010) - AHI HSD true_color incorrect with cache_sensor_angles ([PR 2013](https://github.com/pytroll/satpy/pull/2013) by [@djhoese](https://github.com/djhoese))
+* [Issue 2008](https://github.com/pytroll/satpy/issues/2008) - abi_l1b reader leaks memory in Python-3.7 ([PR 2011](https://github.com/pytroll/satpy/pull/2011) by [@sfinkens](https://github.com/sfinkens))
+* [Issue 2004](https://github.com/pytroll/satpy/issues/2004) - Configure image type returned by MaskingCompositor ([PR 2005](https://github.com/pytroll/satpy/pull/2005) by [@gerritholl](https://github.com/gerritholl))
+* [Issue 2001](https://github.com/pytroll/satpy/issues/2001) - Failed to load AVHRR LAC data
+* [Issue 1999](https://github.com/pytroll/satpy/issues/1999) - Reader for Арктика-М (Arktika-M) МСУ-ГС (MSU-GS) data ([PR 2000](https://github.com/pytroll/satpy/pull/2000) by [@simonrp84](https://github.com/simonrp84))
+* [Issue 1998](https://github.com/pytroll/satpy/issues/1998) - Add reader for Arctica M N-1 hdf5 data
+* [Issue 1995](https://github.com/pytroll/satpy/issues/1995) - AttributeError when cropping data for VIIRS
+* [Issue 1959](https://github.com/pytroll/satpy/issues/1959) - Unittest failure in test_modifiers.py
+* [Issue 1948](https://github.com/pytroll/satpy/issues/1948) - Contribute to Satpy
+* [Issue 1945](https://github.com/pytroll/satpy/issues/1945) - Wrong dtype of `uint32` array saved by the cf_writer
+* [Issue 1943](https://github.com/pytroll/satpy/issues/1943) - sza_check from trollflow2 fails with KeyError: 'start_time'
+* [Issue 1883](https://github.com/pytroll/satpy/issues/1883) - Test failure on i386 and armhf ([PR 1966](https://github.com/pytroll/satpy/pull/1966) by [@djhoese](https://github.com/djhoese))
+* [Issue 1384](https://github.com/pytroll/satpy/issues/1384) - AHI HRIT reader has gotten slower ([PR 1986](https://github.com/pytroll/satpy/pull/1986) by [@pnuu](https://github.com/pnuu))
+* [Issue 1099](https://github.com/pytroll/satpy/issues/1099) - `find_files_and_readers` read unneeded files
+
+In this release 20 issues were closed.
+
+### Pull Requests Merged
+
+#### Bugs fixed
+
+* [PR 2027](https://github.com/pytroll/satpy/pull/2027) - Include units with AVHRR EPS metadata ([2026](https://github.com/pytroll/satpy/issues/2026))
+* [PR 2017](https://github.com/pytroll/satpy/pull/2017) - Fix ABI rayleigh_corrected_crefl modifier using deprecated DEM specifier
+* [PR 2015](https://github.com/pytroll/satpy/pull/2015) - Fix various dask array bugs in CREFL modifier
+* [PR 2013](https://github.com/pytroll/satpy/pull/2013) - Fix angle generation caching occassionally swapping results ([2010](https://github.com/pytroll/satpy/issues/2010))
+* [PR 2011](https://github.com/pytroll/satpy/pull/2011) - Fix memory leak in cached_property backport ([2008](https://github.com/pytroll/satpy/issues/2008), [2008](https://github.com/pytroll/satpy/issues/2008))
+* [PR 2006](https://github.com/pytroll/satpy/pull/2006) - Fix Scene not being serializable
+* [PR 2002](https://github.com/pytroll/satpy/pull/2002) - Update tests to be more flexible to CRS and enhancement changes
+* [PR 1991](https://github.com/pytroll/satpy/pull/1991) - Update reference to dask distributed setup page
+* [PR 1988](https://github.com/pytroll/satpy/pull/1988) - Update geometry.py docstring from compositor to modifier
+* [PR 1987](https://github.com/pytroll/satpy/pull/1987) - Check that time is not already a coordinate in CF writer
+* [PR 1983](https://github.com/pytroll/satpy/pull/1983) - More general filename filter for ascat soil moisture, allowing for Metop-B and Metop-C
+* [PR 1982](https://github.com/pytroll/satpy/pull/1982) - Fix ninjotiff writer from erraneous K to C conversion
+
+#### Features added
+
+* [PR 2025](https://github.com/pytroll/satpy/pull/2025) - Allow skipping unit conversion in NinJoTIFF ([2024](https://github.com/pytroll/satpy/issues/2024))
+* [PR 2007](https://github.com/pytroll/satpy/pull/2007) - Update abi_l2_nc to include filename metadata similar to abi_l1b
+* [PR 2005](https://github.com/pytroll/satpy/pull/2005) - Add flag to MaskingCompositor to return RGBA for single-band input ([2004](https://github.com/pytroll/satpy/issues/2004))
+* [PR 2000](https://github.com/pytroll/satpy/pull/2000) - Add a reader for the MSU-GS/A + Arctica-M1 data ([1999](https://github.com/pytroll/satpy/issues/1999))
+* [PR 1992](https://github.com/pytroll/satpy/pull/1992) - Add support for CMIC product from PPSv2021
+* [PR 1989](https://github.com/pytroll/satpy/pull/1989) - read the "elevation" variable in slstr_l1b
+* [PR 1986](https://github.com/pytroll/satpy/pull/1986) - Add reader kwarg to 'ahi_hrit' to disable exact start_time ([1384](https://github.com/pytroll/satpy/issues/1384))
+* [PR 1967](https://github.com/pytroll/satpy/pull/1967) - Add ability to read comma-separated colormaps during enhancement
+* [PR 1966](https://github.com/pytroll/satpy/pull/1966) - Reduce MODIS L1b/L2 test case size for better test performance ([1883](https://github.com/pytroll/satpy/issues/1883))
+* [PR 1962](https://github.com/pytroll/satpy/pull/1962) - Use a dependency matrix for benchmarking
+
+#### Documentation changes
+
+* [PR 2020](https://github.com/pytroll/satpy/pull/2020) - Clarify documentation regarding attributes used in get_angles
+* [PR 1991](https://github.com/pytroll/satpy/pull/1991) - Update reference to dask distributed setup page
+* [PR 1988](https://github.com/pytroll/satpy/pull/1988) - Update geometry.py docstring from compositor to modifier
+* [PR 1969](https://github.com/pytroll/satpy/pull/1969) - Improve modifier documentation
+* [PR 1968](https://github.com/pytroll/satpy/pull/1968) - Improve API documentation in CompositeBase
+* [PR 1961](https://github.com/pytroll/satpy/pull/1961) - Update documentation to refer to all EO satellite data
+* [PR 1960](https://github.com/pytroll/satpy/pull/1960) - Add release notes and security policy to documentation
+* [PR 1950](https://github.com/pytroll/satpy/pull/1950) - Fix formatting in configuration documentation
+
+In this release 30 pull requests were closed.
+
+
+## Version 0.33.1 (2021/12/17)
+
+### Issues Closed
+
+* [Issue 1937](https://github.com/pytroll/satpy/issues/1937) - Add SECURITY.md
+* [Issue 1932](https://github.com/pytroll/satpy/issues/1932) - warnings of `invalid value encountered in true_divide` and `invalid value encountered in double_scalars` in
+* [Issue 1903](https://github.com/pytroll/satpy/issues/1903) - MPEF Product Header record definition , in seviri_base.py, needs to be updated
+* [Issue 1799](https://github.com/pytroll/satpy/issues/1799) - Deprecate Scene.attrs property
+* [Issue 1192](https://github.com/pytroll/satpy/issues/1192) - Harmonize SEVIRI area definitions
+
+In this release 5 issues were closed.
+
+### Pull Requests Merged
+
+#### Bugs fixed
+
+* [PR 1946](https://github.com/pytroll/satpy/pull/1946) - Fix angle generation not working for StackedAreaDefinitions
+* [PR 1942](https://github.com/pytroll/satpy/pull/1942) - Fix dynamic_dnb composite converting NaNs to 0s
+* [PR 1941](https://github.com/pytroll/satpy/pull/1941) - Fix SAFE SAR azimuth noise array construction
+* [PR 1918](https://github.com/pytroll/satpy/pull/1918) - Fix geo interpolation for aapp data
+
+#### Features added
+
+* [PR 1674](https://github.com/pytroll/satpy/pull/1674) - Feature add support for AHI True Color Reproduction
+
+In this release 5 pull requests were closed.
+
+
+## Version 0.33.0 (2021/12/10)
+
+### Issues Closed
+
+* [Issue 1930](https://github.com/pytroll/satpy/issues/1930) - ninjogeotiff writer produces file with ninjo_TransparentPixel=None ([PR 1931](https://github.com/pytroll/satpy/pull/1931) by [@gerritholl](https://github.com/gerritholl))
+* [Issue 1902](https://github.com/pytroll/satpy/issues/1902) - High memory usage generating composites from ABI/AHI
+
+In this release 2 issues were closed.
+
+### Pull Requests Merged
+
+#### Bugs fixed
+
+* [PR 1931](https://github.com/pytroll/satpy/pull/1931) - When no fill value is used, write TransparentPixel=-1 in ninjogeotiff headers ([1930](https://github.com/pytroll/satpy/issues/1930))
+* [PR 1926](https://github.com/pytroll/satpy/pull/1926) - Update seadas_l2 chlor_a enhancement to use new log10 stretch
+* [PR 1922](https://github.com/pytroll/satpy/pull/1922) - Fix ABI cloud_phase composite recipe and enhancement
+
+#### Features added
+
+* [PR 1917](https://github.com/pytroll/satpy/pull/1917) - Add support to read and visualize NOAA GOESR L2+ cloud mask products
+* [PR 1912](https://github.com/pytroll/satpy/pull/1912) - Add Frequency range
+* [PR 1908](https://github.com/pytroll/satpy/pull/1908) - Update AHI HSD calibration coefficients
+* [PR 1905](https://github.com/pytroll/satpy/pull/1905) - Updated mpef product header to include new fields
+* [PR 1882](https://github.com/pytroll/satpy/pull/1882) - Update GDAL_OPTIONS with driver= and COG-specific options
+* [PR 1370](https://github.com/pytroll/satpy/pull/1370) - Add support for reading AAPP level-1c MHS/AMSU-B data
+
+#### Refactoring
+
+* [PR 1910](https://github.com/pytroll/satpy/pull/1910) - Refactor SZA and cos(SZA) generation to reduce duplicate computations
+
+In this release 10 pull requests were closed.
+
+
+## Version 0.32.0 (2021/12/01)
+
+### Issues Closed
+
+* [Isse1900](https://github.com/pytroll/satpy/issues/1900) - Load composites mixed from files or provided data ([PR1901](https://github.com/pytroll/satpy/pull/1901) by [@djhoese](https://github.com/djhoese))
+* [Isse1898](https://github.com/pytroll/satpy/issues/1898) - Loading composites without file handlers fails with KeyError ([PR1899](https://github.com/pytroll/satpy/pull/1899) by [@erritholl](https://github.com/gerritholl))
+* [Isse1893](https://github.com/pytroll/satpy/issues/1893) - Download and install Satpy for raspberry pi
+* [Isse1889](https://github.com/pytroll/satpy/issues/1889) - Question: How to release loaded data from memory?
+* [Isse1880](https://github.com/pytroll/satpy/issues/1880) - Add area definitions corresponding to geostationary imager fields of regard ([PR1881](https://github.com/pytroll/satpy/pull/1881) by [@erritholl](https://github.com/gerritholl))
+* [Isse1879](https://github.com/pytroll/satpy/issues/1879) - How to use histogram enhancement in yaml files?
+* [Isse1749](https://github.com/pytroll/satpy/issues/1749) - Load from blended scene ([PR 1797](https://github.com/pytroll/satpy/pull/1797) by [@djhoese](https://github.com/djhoese))
+* [Issue 1747](https://github.com/pytroll/satpy/issues/1747) - Load composites without file handlers. ([PR 1797](https://github.com/pytroll/satpy/pull/1797) by [@djhoese](https://github.com/djhoese))
+* [Issue 1456](https://github.com/pytroll/satpy/issues/1456) - Default cache directory should respect XDG Base Directory Specification.
+* [Issue 583](https://github.com/pytroll/satpy/issues/583) - PPP_CONFIG_DIR set locally does not include the global dir for the eps_l1b reader
+
+In this release 10 issues were closed.
+
+### Pull Requests Merged
+
+#### Bugs fixed
+
+* [PR 1899](https://github.com/pytroll/satpy/pull/1899) - Fix loading multi-sensor composites for manually added data ([1898](https://github.com/pytroll/satpy/issues/1898))
+* [PR 1891](https://github.com/pytroll/satpy/pull/1891) - Fix file handlers improperly matching some file types
+* [PR 1884](https://github.com/pytroll/satpy/pull/1884) - Fix nucaps reader failing when given multiple input files
+
+#### Features added
+
+* [PR 1901](https://github.com/pytroll/satpy/pull/1901) - Update Scene.sensor_names to include sensors from readers and contained data ([1900](https://github.com/pytroll/satpy/issues/1900))
+* [PR 1897](https://github.com/pytroll/satpy/pull/1897) - Update AHI gridded reader to use HTTP instead of FTP
+* [PR 1894](https://github.com/pytroll/satpy/pull/1894) - Add 'seadas_l2' reader for 'chlor_a' product
+* [PR 1892](https://github.com/pytroll/satpy/pull/1892) - Add new pre-commit checks
+* [PR 1888](https://github.com/pytroll/satpy/pull/1888) - Optimize composite YAML loading
+* [PR 1885](https://github.com/pytroll/satpy/pull/1885) - Add optional on-disk zarr caching to sensor angle generation
+* [PR 1881](https://github.com/pytroll/satpy/pull/1881) - Add area definitions for GOES ABI FOR ([1880](https://github.com/pytroll/satpy/issues/1880))
+* [PR 1797](https://github.com/pytroll/satpy/pull/1797) - Allow loading of composites after Scene resampling ([1752](https://github.com/pytroll/satpy/issues/1752), [1749](https://github.com/pytroll/satpy/issues/1749), [1747](https://github.com/pytroll/satpy/issues/1747))
+
+#### Documentation changes
+
+* [PR 1873](https://github.com/pytroll/satpy/pull/1873) - Fix a typo in the ninjogeotiff documentation
+
+In this release 12 pull requests were closed.
+
+
+## Version 0.31.0 (2021/11/03)
+
+### Issues Closed
+
+* [Isse1866](https://github.com/pytroll/satpy/issues/1866) - Data Type of AHI NetCDF Output
+* [Isse1859](https://github.com/pytroll/satpy/issues/1859) - Yaml UnsafeLoader ImportErrror on colab.google ([PR1860](https://github.com/pytroll/satpy/pull/1860) by [@arammer](https://github.com/abrammer))
+* [Isse1853](https://m/pytroll/satpy/pull/1864) by [@djhoese](https://github.com/djhoese))
+
+In this release 12 issues were closed.
+
+### Pull Requests Merged
+
+#### Bugs fixed
+
+* [PR1868](https://github.com/pytroll/satpy/pull/1868) - Fix MiRS reader not working with new versions of dask
+* [PR1860](https://github.com/pytroll/satpy/pull/1860) - Catch ImportError on UnsafeLoader in composites/config_loader ([1859](https://github.com/pytroll/satpy/issues/1859))
+* [PR1855](https://github.com/pytroll/satpy/pull/1855) - Fix 'acspo' reader producing non-y/x dimension names
+* [PR1854](https://github.com/pytroll/satpy/pull/1854) - Fix 'awips_tiled' writer doing unnecessary attribute formatting
+* [PR1849](https://github.com/pytroll/satpy/pull/1849) - Update AGRI reader to ensure that angles can be correctly loaded.
+
+#### Features added
+
+* [PR1850](https://github.com/pytroll/satpy/pull/1850) - Adapt msi-safe to the new product format ([1847](https://github.com/pytroll/satpy/issues/1847))
+* [PR1839](https://github.com/pytroll/satpy/pull/1839) - Add ninjogeotiff writer to write GeoTIFFs including necessary NinJo tags in GDALMetadata ([1838](https://github.com/pytroll/satpy/issues/1838))
+* [PR1743](https://github.com/pytroll/satpy/pull/1743) - Add option to configure group_files behaviour on empty groups in case of multiple readers ([1742](https://github.com/pytroll/satpy/issues/1742))
+
+#### Documentation changes
+
+* [PR1867](https://github.com/pytroll/satpy/pull/1867) - Update PDF metadata for sphinx documentation
+* [PR1864](https://github.com/pytroll/satpy/pull/1864) - Update Scene.save_datasets to clarify what will be saved ([1138](https://github.com/pytroll/satpy/issues/1138))
+* [PR1862](https://github.com/pytroll/satpy/pull/1862) - Correct phrasing of upside-down
+* [PR1852](https://github.com/pytroll/satpy/pull/1852) - Fix reference to dask distributed setup page
+
+In this release 12 pull requests were closed.
+
+
+## Version 0.30.1 (2021/09/28)
+
+### Issues Closed
+
+* [Isse1835](https://github.com/pytroll/satpy/issues/1835) - scipy module error?
+* [Isse1832](https://github.com/pytroll/satpy/issues/1832) - variable from python to composite
+* [Isse1831](https://github.com/pytroll/satpy/issues/1831) - example yml files for other readers
+* [Isse1829](https://github.com/pytroll/satpy/issues/1829) - pytest satpy/tests does not work ([PR1830](https://github.com/pytroll/satpy/pull/1830) by [@djhoese](https://github.com/djhoese))
+* [Isse1828](https://github.com/pytroll/satpy/issues/1828) - Error occurred plotting Himawari-8
+* [Isse1484](https://github.com/pytroll/satpy/issues/1484) - Broken links to new EUMETSAT website ([PR1827](https://github.com/pytroll/satpy/pull/1827) by [@pdeyl](https://github.com/pdebuyl))
+
+In this release 6 issues were closed.
+
+### Pull Requests Merged
+
+#### Bugs fixed
+
+* [PR1837](https://github.com/pytroll/satpy/pull/1837) - Fix config path handling on Windows
+* [PR1827](https://github.com/pytroll/satpy/pull/1827) - Fix eumetsat urls in satpy/readers ([1484](https://github.com/pytroll/satpy/issues/1484))
+
+#### Documentation changes
+
+* [PR1837](https://github.com/pytroll/satpy/pull/1837) - Fix config path handling on Windows
+* [PR1830](https://github.com/pytroll/satpy/pull/1830) - Move tests_require to special "tests" extra for easier installation ([1829](https://github.com/pytroll/satpy/issues/1829))
+* [PR1827](https://github.com/pytroll/satpy/pull/1827) - Fix eumetsat urls in satpy/readers ([1484](https://github.com/pytroll/satpy/issues/1484))
+
+In this release 5 pull requests were closed.
+
+
+## Version 0.30.0 (2021/09/17)
+
+### Issues Closed
+
+* [Issue 1821](https://github.com/pytroll/satpy/issues/1821) - Resampling to `true_color_with_night_ir_hires` no longer works. ([PR 1823](https://github.com/pytroll/satpy/pull/1823))
+* [Issue 1803](https://github.com/pytroll/satpy/issues/1803) - how to xRITDecompress files for using satpy
+* [Issue 1796](https://github.com/pytroll/satpy/issues/1796) - Extend use of bz2 compression for input files for seviri_l1b_hrit ([PR 1798](https://github.com/pytroll/satpy/pull/1798))
+* [Issue 1794](https://github.com/pytroll/satpy/issues/1794) - ir_overview vs cloudtop
+* [Issue 1793](https://github.com/pytroll/satpy/issues/1793) - Different `y_bounds` and `x_bounds` shapes of TROPOMI MultiScene
+* [Issue 1791](https://github.com/pytroll/satpy/issues/1791) - Memory usage has increased drastically
+* [Issue 1786](https://github.com/pytroll/satpy/issues/1786) - The `viirs_sdr` reader does not function correctly with `GMODO` geolocation. ([PR 1787](https://github.com/pytroll/satpy/pull/1787))
+* [Issue 1783](https://github.com/pytroll/satpy/issues/1783) - Metadata name problem in HY-2B L2B reader ([PR 1785](https://github.com/pytroll/satpy/pull/1785))
+* [Issue 1780](https://github.com/pytroll/satpy/issues/1780) - What shoud I do if I only want to keep the day part of DayNightCompositor? ([PR 1816](https://github.com/pytroll/satpy/pull/1816))
+* [Issue 1779](https://github.com/pytroll/satpy/issues/1779) - piecewise_linear_stretch didn't work properly on GK-2A AMI data
+* [Issue 1773](https://github.com/pytroll/satpy/issues/1773) - [Question] Geolocation information of FengYun4A (FY-4A) AGRI L1B data ([PR 1782](https://github.com/pytroll/satpy/pull/1782))
+* [Issue 1759](https://github.com/pytroll/satpy/issues/1759) - Ask For Help: How to operate SunZenithCorrector manually?
+* [Issue 1750](https://github.com/pytroll/satpy/issues/1750) - MultiScene.blend does not document the interface for the blend function ([PR 1751](https://github.com/pytroll/satpy/pull/1751))
+* [Issue 1745](https://github.com/pytroll/satpy/issues/1745) - Resampling MODIS Level 1B data
+* [Issue 1738](https://github.com/pytroll/satpy/issues/1738) - available_dataset_names omits composites depending on more than one reader
+* [Issue 1730](https://github.com/pytroll/satpy/issues/1730) - geotiff writer ignores dtype argument, always writes float if enhance=False ([PR 1733](https://github.com/pytroll/satpy/pull/1733))
+* [Issue 1728](https://github.com/pytroll/satpy/issues/1728) - Unable to read HY-2B SCA L2B file
+* [Issue 1727](https://github.com/pytroll/satpy/issues/1727) - 'NoData' area is not black(clean) in the Sentinel-2 MSI output ([PR 1628](https://github.com/pytroll/satpy/pull/1628))
+* [Issue 1722](https://github.com/pytroll/satpy/issues/1722) - 'ModuleNotFoundError' when processing Sentinel-2 MSI data ([PR 1723](https://github.com/pytroll/satpy/pull/1723))
+* [Issue 1718](https://github.com/pytroll/satpy/issues/1718) - Raw metadata handling impacts performance ([PR 1795](https://github.com/pytroll/satpy/pull/1795))
+* [Issue 1661](https://github.com/pytroll/satpy/issues/1661) - Support for clavrx netcdf files ([PR 1716](https://github.com/pytroll/satpy/pull/1716))
+* [Issue 1625](https://github.com/pytroll/satpy/issues/1625) - Part of Sentinel-2 images missing when atmospheric corrected ([PR 1628](https://github.com/pytroll/satpy/pull/1628))
+* [Issue 1584](https://github.com/pytroll/satpy/issues/1584) - to_xarray_dataset on empty scene fails with TypeError ([PR 1698](https://github.com/pytroll/satpy/pull/1698))
+
+In this release 23 issues were closed.
+
+### Pull Requests Merged
+
+#### Bugs fixed
+
+* [PR 1823](https://github.com/pytroll/satpy/pull/1823) - Fix unify_chunks usage in compositors and fix image mode in BackgroundCompositor ([1821](https://github.com/pytroll/satpy/issues/1821))
+* [PR 1814](https://github.com/pytroll/satpy/pull/1814) - Add missing metadata to MODIS L1b and L2 readers
+* [PR 1813](https://github.com/pytroll/satpy/pull/1813) - Fix composites failing when inputs are different chunk sizes
+* [PR 1808](https://github.com/pytroll/satpy/pull/1808) - Fix ReflectanceCorrector (crefl) for MODIS data
+* [PR 1804](https://github.com/pytroll/satpy/pull/1804) - Fix consistency with nucaps sensor metadata (set/lowercase)
+* [PR 1802](https://github.com/pytroll/satpy/pull/1802) - Add warning in 'awips_tiled' writer when 'units' are too long
+* [PR 1800](https://github.com/pytroll/satpy/pull/1800) - Fix for missing attributes when requesting 'counts' calibration from ABI L1B reader.
+* [PR 1792](https://github.com/pytroll/satpy/pull/1792) - Maintain categorical clavrx data as integer arrays
+* [PR 1787](https://github.com/pytroll/satpy/pull/1787) - Fix 'viirs_sdr' repeating data when TC geolocation was not available ([1786](https://github.com/pytroll/satpy/issues/1786))
+* [PR 1784](https://github.com/pytroll/satpy/pull/1784) - Fix ABI readers not assigning 'platform_name' for GOES-18/19
+* [PR 1782](https://github.com/pytroll/satpy/pull/1782) - Update AGRI/L1 geolocation ([1773](https://github.com/pytroll/satpy/issues/1773))
+* [PR 1777](https://github.com/pytroll/satpy/pull/1777) - Fix mviri l1b fiduceo reader compatibility with newer xarray
+* [PR 1776](https://github.com/pytroll/satpy/pull/1776) - Fix 'awips_tiled' writer producing an invalid y coordinate
+* [PR 1774](https://github.com/pytroll/satpy/pull/1774) - Fix the seviri benchmarks
+* [PR 1771](https://github.com/pytroll/satpy/pull/1771) - Fix VIIRS SDR reader not handling multi-granule files with fewer scans
+* [PR 1770](https://github.com/pytroll/satpy/pull/1770) - Fix CLAVR-x reader and 'awips_tiled' writer to produce AWIPS-compatible output
+* [PR 1744](https://github.com/pytroll/satpy/pull/1744) - Fix VIRR reader handling valid_range when it is a numpy array
+* [PR 1734](https://github.com/pytroll/satpy/pull/1734) - Remove valid_range from attributes in VIRR L1b reader
+* [PR 1733](https://github.com/pytroll/satpy/pull/1733) - Fix geotiff writer ignoring dtype argument ([1730](https://github.com/pytroll/satpy/issues/1730), [1730](https://github.com/pytroll/satpy/issues/1730))
+* [PR 1724](https://github.com/pytroll/satpy/pull/1724) - Replace doc references to PPP_CONFIG_DIR ([1724](https://github.com/pytroll/satpy/issues/1724))
+* [PR 1723](https://github.com/pytroll/satpy/pull/1723) - Fix package dependencies for the `msi_safe` reader ([1722](https://github.com/pytroll/satpy/issues/1722))
+* [PR 1698](https://github.com/pytroll/satpy/pull/1698) - Fix error when calling to_xarray_dataset on an empty scene ([1584](https://github.com/pytroll/satpy/issues/1584))
+* [PR 1628](https://github.com/pytroll/satpy/pull/1628) - Fix for transposed angles in safe-msi reader ([1727](https://github.com/pytroll/satpy/issues/1727), [1625](https://github.com/pytroll/satpy/issues/1625))
+
+#### Features added
+
+* [PR 1824](https://github.com/pytroll/satpy/pull/1824) - Add additional ACSPO reader file patterns
+* [PR 1817](https://github.com/pytroll/satpy/pull/1817) - Fix ninjotiff writer for mode P
+* [PR 1816](https://github.com/pytroll/satpy/pull/1816) - Add 'day_night' flag to DayNightCompositor for day-only or night-only results ([1780](https://github.com/pytroll/satpy/issues/1780))
+* [PR 1815](https://github.com/pytroll/satpy/pull/1815) - Add MODIS L2 products produced by IMAPP
+* [PR 1805](https://github.com/pytroll/satpy/pull/1805) - Add 'reader' name to all produced DataArrays
+* [PR 1801](https://github.com/pytroll/satpy/pull/1801) - added link to the GOES-2-go package in the docs as a download source.
+* [PR 1798](https://github.com/pytroll/satpy/pull/1798) - Add on-the-fly bz2 decompression for HRIT MSG PRO and EPI files ([1796](https://github.com/pytroll/satpy/issues/1796))
+* [PR 1790](https://github.com/pytroll/satpy/pull/1790) - Add ABI L1B benchmarks
+* [PR 1785](https://github.com/pytroll/satpy/pull/1785) - Feature handle data from HY-2B SCAT files directly from NSOAS ([1783](https://github.com/pytroll/satpy/issues/1783))
+* [PR 1772](https://github.com/pytroll/satpy/pull/1772) - Add access point to global_attrs to netCDF4FileHandler
+* [PR 1760](https://github.com/pytroll/satpy/pull/1760) - Add benchmarks for seviri hrit
+* [PR 1720](https://github.com/pytroll/satpy/pull/1720) - Add a test to ensure seviri hrv has priority over vis008 when requesting 0.8µm
+* [PR 1717](https://github.com/pytroll/satpy/pull/1717) - Add low resolution file patterns for AHI HSD reader
+* [PR 1716](https://github.com/pytroll/satpy/pull/1716) - Update Clavrx reader for netcdf files ([1661](https://github.com/pytroll/satpy/issues/1661))
+* [PR 1692](https://github.com/pytroll/satpy/pull/1692) - Add raw 'counts' calibration to 'abi_l1b' reader
+* [PR 1297](https://github.com/pytroll/satpy/pull/1297) - Add support for MCMIP GOES ABI L2 files ([1162](https://github.com/pytroll/satpy/issues/1162))
+
+#### Documentation changes
+
+* [PR 1819](https://github.com/pytroll/satpy/pull/1819) - Fix invalid YAML syntax in enhancement documentation
+* [PR 1801](https://github.com/pytroll/satpy/pull/1801) - added link to the GOES-2-go package in the docs as a download source.
+* [PR 1765](https://github.com/pytroll/satpy/pull/1765) - Add missing demo data directory entry to config documentation
+* [PR 1751](https://github.com/pytroll/satpy/pull/1751) - Improve documentation for MultiScene.blend ([1750](https://github.com/pytroll/satpy/issues/1750))
+* [PR 1726](https://github.com/pytroll/satpy/pull/1726) - Point out get_area_def in resample documentation ([1726](https://github.com/pytroll/satpy/issues/1726))
+* [PR 1724](https://github.com/pytroll/satpy/pull/1724) - Replace doc references to PPP_CONFIG_DIR ([1724](https://github.com/pytroll/satpy/issues/1724))
+
+In this release 45 pull requests were closed.
+
+
+## Version 0.29.0 (2021/06/04)
+
+### Issues Closed
+
+* [Issue 1714](https://github.com/pytroll/satpy/issues/1714) - Plotting day night composite satellite image
+* [Issue 1689](https://github.com/pytroll/satpy/issues/1689) - BackgroundCompositor using IR Sandwich (masked so only coldest clouds are visible) and True Color as inputs ([PR 1690](https://github.com/pytroll/satpy/pull/1690))
+* [Issue 1684](https://github.com/pytroll/satpy/issues/1684) - Rename fci_l1c_fdhsi to fci_l1c_nc ([PR 1712](https://github.com/pytroll/satpy/pull/1712))
+* [Issue 1293](https://github.com/pytroll/satpy/issues/1293) - DOC: broken link for geoview ([PR 1697](https://github.com/pytroll/satpy/pull/1697))
+* [Issue 1120](https://github.com/pytroll/satpy/issues/1120) - Broken-off sentence in `cf_writer` module documentation: "If a non-dimensional coordinate is identical for" ([PR 1697](https://github.com/pytroll/satpy/pull/1697))
+* [Issue 1104](https://github.com/pytroll/satpy/issues/1104) - NUCAPS reader uses incorrect _FillValue ([PR 1710](https://github.com/pytroll/satpy/pull/1710))
+* [Issue 1097](https://github.com/pytroll/satpy/issues/1097) - Deprecate satpy.readers.utils.get_area_slices
+* [Issue 1085](https://github.com/pytroll/satpy/issues/1085) - Add tonemapping modifiers for truecolor images
+* [Issue 1060](https://github.com/pytroll/satpy/issues/1060) - Reorder installation instructions to put conda before PyPI ([PR 1711](https://github.com/pytroll/satpy/pull/1711))
+* [Issue 1028](https://github.com/pytroll/satpy/issues/1028) - Mitiff tests failing on python 3.7 travis environments
+* [Issue 990](https://github.com/pytroll/satpy/issues/990) - Documentation on storing area definitions has a broken fragment identifier link to pyresample ([PR 1697](https://github.com/pytroll/satpy/pull/1697))
+* [Issue 973](https://github.com/pytroll/satpy/issues/973) - For VIIRS composite there are two composites with the same name.
+* [Issue 936](https://github.com/pytroll/satpy/issues/936) - Swap names for Vis/IR default natural_color and natural_color_sun composites
+* [Issue 722](https://github.com/pytroll/satpy/issues/722) - Standardise self.mda for SEVIRI attributes
+* [Issue 608](https://github.com/pytroll/satpy/issues/608) - Update to fix deprecation warning from dask regarding atop
+* [Issue 566](https://github.com/pytroll/satpy/issues/566) - Add AbstractScene class
+* [Issue 500](https://github.com/pytroll/satpy/issues/500) - Add ability to add proper references to published algorithms
+* [Issue 495](https://github.com/pytroll/satpy/issues/495) - Update tests to skip tests if dependencies are missing
+* [Issue 425](https://github.com/pytroll/satpy/issues/425) - Add DART compatible observation writer
+* [Issue 346](https://github.com/pytroll/satpy/issues/346) - lat-lon as the default dimensions
+* [Issue 334](https://github.com/pytroll/satpy/issues/334) - Add 'Performance Tips' section to documentation
+* [Issue 164](https://github.com/pytroll/satpy/issues/164) - Should enhancers know the data type beforehand
+* [Issue 102](https://github.com/pytroll/satpy/issues/102) - Fix meteosat 10 area
+* [Issue 100](https://github.com/pytroll/satpy/issues/100) - Add background color option to simple image writer
+* [Issue 99](https://github.com/pytroll/satpy/issues/99) - Adding coastlines does not preserve transparency
+* [Issue 92](https://github.com/pytroll/satpy/issues/92) - Merge area definition files
+* [Issue 9](https://github.com/pytroll/satpy/issues/9) - Convert mpop readers to satpy yaml readers
+
+In this release 27 issues were closed.
+
+### Pull Requests Merged
+
+#### Bugs fixed
+
+* [PR 1710](https://github.com/pytroll/satpy/pull/1710) - Fix NUCAPS reader having incorrect _FillValue attribute ([1104](https://github.com/pytroll/satpy/issues/1104))
+* [PR 1706](https://github.com/pytroll/satpy/pull/1706) - Update SLSTR reader to choose correct file for interpolated angles
+* [PR 1691](https://github.com/pytroll/satpy/pull/1691) - Fix reference to sector_id global key in 'awips_tiled' writer YAML
+* [PR 1690](https://github.com/pytroll/satpy/pull/1690) - Fix SandwichCompositor modifying input data ([1689](https://github.com/pytroll/satpy/issues/1689))
+* [PR 1679](https://github.com/pytroll/satpy/pull/1679) - Remove extra attributes tag and fix indentation
+
+#### Features added
+
+* [PR 1715](https://github.com/pytroll/satpy/pull/1715) - Fix benchmarks to run with older commits
+* [PR 1701](https://github.com/pytroll/satpy/pull/1701) - Add pending deprecation reader names check
+* [PR 1680](https://github.com/pytroll/satpy/pull/1680) - Implement reading of index map and auxiliary data in FCI L1c reader
+
+#### Documentation changes
+
+* [PR 1711](https://github.com/pytroll/satpy/pull/1711) - Rewrite installation instructions to make conda use clearer ([1060](https://github.com/pytroll/satpy/issues/1060))
+* [PR 1697](https://github.com/pytroll/satpy/pull/1697) - Solve various documentation issues ([990](https://github.com/pytroll/satpy/issues/990), [1293](https://github.com/pytroll/satpy/issues/1293), [1120](https://github.com/pytroll/satpy/issues/1120))
+
+In this release 10 pull requests were closed.
+
+
## Version 0.28.1 (2021/05/18)
### Issues Closed
diff --git a/MANIFEST.in b/MANIFEST.in
index 948dcc64b9..7c8ea0e146 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -3,4 +3,4 @@ include doc/source/*
include doc/examples/*.py
include LICENSE.txt
include README.rst
-include satpy/version.py
\ No newline at end of file
+include satpy/version.py
diff --git a/README.rst b/README.rst
index c3c924af93..d971a872f5 100644
--- a/README.rst
+++ b/README.rst
@@ -9,7 +9,7 @@ Satpy
.. image:: https://badge.fury.io/py/satpy.svg
:target: https://badge.fury.io/py/satpy
-
+
.. image:: https://anaconda.org/conda-forge/satpy/badges/version.svg
:target: https://anaconda.org/conda-forge/satpy/
diff --git a/SECURITY.md b/SECURITY.md
new file mode 100644
index 0000000000..a743afa44c
--- /dev/null
+++ b/SECURITY.md
@@ -0,0 +1,24 @@
+# Security Policy
+
+## Supported Versions
+
+Satpy is currently pre-1.0 and includes a lot of changes in every release. As such we can't
+guarantee that releases before 1.0 will see security updates except for the most recent
+release. After 1.0, you can expect more stability in the interfaces and security fixes to be
+backported more regularly.
+
+| Version | Supported |
+| ------- | ------------------ |
+| 0.x.x (latest) | :white_check_mark: |
+| < 0.33.0 | :x: |
+
+## Unsafe YAML Loading
+
+Satpy allows for unsafe loading of YAML configuration files. Any YAML files
+from untrusted sources should be sanitized of possibly malicious code.
+
+## Reporting a Vulnerability
+
+Do you think you've found a security vulnerability or issue in this project? Let us know by sending
+an email to the maintainers at `pytroll-security@groups.io`. Please include as much information on
+the issue as possible like code examples, documentation on the issue in other packages, etc.
diff --git a/asv.conf.json b/asv.conf.json
index 07985af667..dbecadf79a 100644
--- a/asv.conf.json
+++ b/asv.conf.json
@@ -22,8 +22,7 @@
// Customizable commands for building, installing, and
// uninstalling the project. See asv.conf.json documentation.
//
- "install_command": ["in-dir={env_dir} python -mpip install {wheel_file} pyspectral pyorbital s3fs rasterio"],
- // "install_command": ["in-dir={env_dir} conda install {wheel_file} s3fs rasterio"],
+ //"install_command": ["in-dir={env_dir} python -mpip install {wheel_file} s3fs rasterio h5py netCDF4 pyhdf gcsfs shapely"],
// "uninstall_command": ["return-code=any python -mpip uninstall -y {project}"],
// "build_command": [
// "python setup.py build",
@@ -46,7 +45,8 @@
// If missing or the empty string, the tool will be automatically
// determined by looking for tools on the PATH environment
// variable.
- "environment_type": "virtualenv",
+ //"environment_type": "virtualenv",
+ "environment_type": "conda",
// timeout in seconds for installing any dependencies in environment
// defaults to 10 min
@@ -58,10 +58,11 @@
// The Pythons you'd like to test against. If not provided, defaults
// to the current version of Python used to run `asv`.
// "pythons": ["2.7", "3.6"],
+ "pythons": ["3.9", "3.10"],
// The list of conda channel names to be searched for benchmark
// dependency packages in the specified order
- // "conda_channels": ["conda-forge", "defaults"],
+ "conda_channels": ["conda-forge"],
// The matrix of dependencies to test. Each key is the name of a
// package (in PyPI) and the values are version numbers. An empty
@@ -78,6 +79,23 @@
// "six": ["", null], // test with and without six installed
// "pip+emcee": [""], // emcee is only available for install with pip.
// },
+ "matrix": {
+ "pyresample": ["1.22.3"],
+ "trollimage": ["1.17.0"],
+ "pyorbital": ["1.7.1"],
+ "pyspectral": ["0.10.6"],
+ "rasterio": ["1.2.10"],
+ "dask": ["2021.12.0"],
+ "xarray": ["0.20.2"],
+ "numpy": ["1.22.0"],
+ "s3fs": [],
+ "h5py": [],
+ "netCDF4": [],
+ "pyhdf": [],
+ "gcsfs": [],
+ "shapely": [],
+ "trollsift": []
+ },
// Combinations of libraries/python versions can be excluded/included
// from the set to test. Each entry is a dictionary containing additional
diff --git a/benchmarks/abi_l1b_benchmarks.py b/benchmarks/abi_l1b_benchmarks.py
new file mode 100644
index 0000000000..368d4f4036
--- /dev/null
+++ b/benchmarks/abi_l1b_benchmarks.py
@@ -0,0 +1,76 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Benchmark ABI L1B operations."""
+from __future__ import annotations
+
+import os
+
+from pyspectral.rayleigh import check_and_download as download_luts
+from pyspectral.rsr_reader import check_and_download as download_rsr
+
+from benchmarks.utils import GeoBenchmarks, get_filenames
+
+
+class ABIL1B(GeoBenchmarks):
+ """Benchmark ABI L1B reading."""
+
+ timeout = 600
+ data_files: list[str] = []
+ subdir = os.path.join("abi_l1b", "20190314_us_midlatitude_cyclone")
+ reader = "abi_l1b"
+
+ def setup_cache(self):
+ """Fetch the data files."""
+ try:
+ from satpy.demo import get_us_midlatitude_cyclone_abi
+ get_us_midlatitude_cyclone_abi()
+ except ImportError:
+ if len(get_filenames(self.subdir)) != 16:
+ raise RuntimeError("Existing data files do not match the expected number of files.")
+ download_rsr()
+ download_luts(aerosol_type='rayleigh_only')
+
+ def setup(self):
+ """Set up the benchmarks."""
+ import satpy
+ self.data_files = get_filenames(self.subdir)
+ satpy.CHUNK_SIZE = 2048
+
+ def time_load_one_channel(self):
+ """Time the loading of one channel."""
+ self.compute_channel("C01")
+
+ def peakmem_load_one_channel(self):
+ """Check peak memory usage of loading one channel."""
+ self.compute_channel("C01")
+
+ def time_load_true_color(self):
+ """Time the loading of the generation of true_color."""
+ self.compute_composite("true_color")
+
+ def peakmem_load_true_color(self):
+ """Check peak memory usage of the generation of true_color."""
+ self.compute_composite("true_color")
+
+ def time_save_true_color_nocorr_to_geotiff(self):
+ """Time the generation and saving of true_color_nocorr."""
+ self.save_composite_as_geotiff("true_color_nocorr")
+
+ def peakmem_save_true_color_to_geotiff(self):
+ """Check peak memory usage of the generation and saving of true_color_nocorr."""
+ self.save_composite_as_geotiff("true_color_nocorr")
diff --git a/benchmarks/ahi_hsd_benchmarks.py b/benchmarks/ahi_hsd_benchmarks.py
index 71544eebe8..fbd2fe7162 100644
--- a/benchmarks/ahi_hsd_benchmarks.py
+++ b/benchmarks/ahi_hsd_benchmarks.py
@@ -15,83 +15,62 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
-"""Benchmark satpy."""
+"""Benchmark AHI HSD operations.."""
-from pyspectral.utils import download_rsr, download_luts
+from __future__ import annotations
+import os
-class HimawariHSD:
+from pyspectral.rayleigh import check_and_download as download_luts
+from pyspectral.rsr_reader import check_and_download as download_rsr
+
+from benchmarks.utils import GeoBenchmarks, get_filenames
+
+
+class HimawariHSD(GeoBenchmarks):
"""Benchmark Himawari HSD reading."""
timeout = 600
- data_files = []
+ data_files: list[str] = []
+ subdir = os.path.join("ahi_hsd", "20210417_0500_typhoon_surigae")
+ reader = 'ahi_hsd'
def setup_cache(self):
"""Fetch the data files."""
- from satpy.demo import download_typhoon_surigae_ahi
- download_typhoon_surigae_ahi(channels=[1, 2, 3, 4], segments=[4])
+ try:
+ from satpy.demo import download_typhoon_surigae_ahi
+ download_typhoon_surigae_ahi(channels=[1, 2, 3, 4], segments=[4])
+ except ImportError:
+ assert len(get_filenames(self.subdir)) == 4
download_rsr()
download_luts(aerosol_type='rayleigh_only')
def setup(self):
"""Set up the benchmarks."""
import satpy
- from satpy.demo import download_typhoon_surigae_ahi
- # This just returns the filenames, as the data already is downloaded above
- self.data_files = download_typhoon_surigae_ahi(channels=[1, 2, 3, 4], segments=[4])
+ self.data_files = get_filenames(self.subdir)
satpy.CHUNK_SIZE = 2048
def time_load_one_channel(self):
"""Time the loading of one channel."""
- self.compute_B01()
+ self.compute_channel("B01")
def peakmem_load_one_channel(self):
"""Check peak memory usage of loading one channel."""
- self.compute_B01()
+ self.compute_channel("B01")
def time_load_true_color(self):
"""Time the loading of the generation of true_color."""
- self.compute_true_color()
+ self.compute_composite("true_color")
def peakmem_load_true_color(self):
"""Check peak memory usage of the generation of true_color."""
- self.compute_true_color()
+ self.compute_composite("true_color")
def time_save_true_color_nocorr_to_geotiff(self):
"""Time the generation and saving of true_color_nocorr."""
- self.save_true_color_nocorr_as_geotiff()
+ self.save_composite_as_geotiff("true_color_nocorr")
def peakmem_save_true_color_to_geotiff(self):
"""Check peak memory usage of the generation and saving of true_color_nocorr."""
- self.save_true_color_nocorr_as_geotiff()
-
- def load(self, composite):
- """Load one composite."""
- from satpy import Scene
- scn = Scene(filenames=self.data_files, reader='ahi_hsd')
- scn.load([composite], pad_data=False)
- return scn
-
- def load_and_native_resample(self, composite):
- """Load and native resample a composite."""
- scn = self.load(composite)
- lscn = scn.resample(resampler='native')
- return lscn
-
- def compute_B01(self):
- """Load and compute one channel."""
- composite = "B01"
- scn = self.load(composite)
- scn[composite].compute()
-
- def compute_true_color(self):
- """Compute a true color image."""
- composite = "true_color"
- lscn = self.load_and_native_resample(composite)
- lscn[composite].compute()
-
- def save_true_color_nocorr_as_geotiff(self):
- """Save a true_color_nocorr to disk as geotiff."""
- composite = "true_color_nocorr"
- lscn = self.load_and_native_resample(composite)
- lscn.save_dataset(composite, filename='test.tif', tiled=True)
+ self.save_composite_as_geotiff("true_color_nocorr")
diff --git a/benchmarks/fci_benchmarks.py b/benchmarks/fci_benchmarks.py
new file mode 100644
index 0000000000..2df5430db8
--- /dev/null
+++ b/benchmarks/fci_benchmarks.py
@@ -0,0 +1,143 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Benchmark FCI.
+
+Benchmarks for reading and processing data from the Meteosat Third Generation
+(MTG) Flexible Combined Imager (FCI). Uses pre-launch simulated test data as
+published by EUMETSAT in 2020.
+
+Also includes some benchmarks trying different resamplers.
+"""
+
+from __future__ import annotations
+
+import fnmatch
+import os
+
+import satpy
+import satpy.demo.fci
+
+from .utils import GeoBenchmarks
+
+
+class FCI(GeoBenchmarks):
+ """Benchmark FCI FDHSI test data reading."""
+
+ timeout = 600
+ region = "eurol"
+ reader = "fci_l1c_nc"
+ filenames: list[str] = []
+
+ def setup_cache(self, *args):
+ """Fetch the data files."""
+ fns = self.get_filenames()
+ cnt = len(fns)
+ if cnt > 40:
+ raise ValueError(f"Expected 41 files, found {cnt:d}")
+ if cnt < 40:
+ fns = satpy.demo.download_fci_test_data()
+
+ def setup(self, *args):
+ """Set location of data files."""
+ self.filenames = self.get_filenames()
+
+ def get_filenames(self):
+ """Get filenames of FCI test data as already available."""
+ p = satpy.demo.fci.get_fci_test_data_dir()
+ g = p.glob("UNCOMPRESSED/NOMINAL/*-CHK-BODY-*.nc")
+ return [os.fspath(fn) for fn in g]
+
+ def time_create_scene(self, chunk):
+ """Time to create a scene."""
+ names = self._get_filename_selection(chunk)
+ self.create_scene(names)
+ time_create_scene.params = ["some", "all"] # type: ignore
+ time_create_scene.param_names = ["channel subset"] # type: ignore
+
+ def peakmem_create_scene(self, chunk):
+ """Peak RAM to create a scene."""
+ names = self._get_filename_selection(chunk)
+ self.create_scene(names)
+ peakmem_create_scene.params = time_create_scene.params # type: ignore
+ peakmem_create_scene.param_names = time_create_scene.param_names # type: ignore
+
+ def time_load(self, chunk, loadable):
+ """Time to create a scene and load one channel or composite."""
+ names = self._get_filename_selection(chunk)
+ self.load_no_padding(loadable, names)
+ time_load.params = (time_create_scene.params, # type: ignore
+ ["ir_105", "natural_color_raw"])
+ time_load.param_names = time_create_scene.param_names + ["dataset"] # type: ignore
+
+ def peakmem_load(self, chunk, loadable):
+ """Peak RAM to create a scene and load one channel or composite."""
+ names = self._get_filename_selection(chunk)
+ self.load_no_padding(loadable, names)
+ peakmem_load.params = time_load.params # type: ignore
+ peakmem_load.param_names = time_load.param_names # type: ignore
+
+ def time_compute(self, chunk, loadable):
+ """Time to create a scene and load and compute one channel."""
+ names = self._get_filename_selection(chunk)
+ self.compute_channel(loadable, names)
+ time_compute.params = time_load.params # type: ignore
+ time_compute.param_names = time_load.param_names # type: ignore
+
+ def peakmem_compute(self, chunk, loadable):
+ """Peak memory for creating a scene and loading and computing one channel."""
+ names = self._get_filename_selection(chunk)
+ self.compute_channel(loadable, names)
+ peakmem_compute.params = time_compute.params # type: ignore
+ peakmem_compute.param_names = time_compute.param_names # type: ignore
+
+ def time_load_resample_compute(self, chunk, loadable, mode):
+ """Time to load all chunks, resample, and compute."""
+ names = self._get_filename_selection(chunk)
+ self.compute_composite(loadable, mode, self.region, names)
+ time_load_resample_compute.params = time_load.params + ( # type: ignore
+ ["nearest", "bilinear", "gradient_search"],)
+ time_load_resample_compute.param_names = time_load.param_names + ["resampler"] # type: ignore
+
+ def peakmem_load_resample_compute(self, chunk, loadable, mode):
+ """Peak memory to load all chunks, resample, and compute."""
+ names = self._get_filename_selection(chunk)
+ self.compute_composite(loadable, mode, self.region, names)
+ peakmem_load_resample_compute.params = time_load_resample_compute.params # type: ignore
+ peakmem_load_resample_compute.param_names = time_load_resample_compute.param_names # type: ignore
+
+ def time_load_resample_save(self, chunk, loadable, mode):
+ """Time to load all chunks, resample, and save."""
+ names = self._get_filename_selection(chunk)
+ self.save_composite_as_geotiff(loadable, mode, self.region, names)
+ time_load_resample_save.params = time_load_resample_compute.params # type: ignore
+ time_load_resample_save.param_names = time_load_resample_compute.param_names # type: ignore
+
+ def peakmem_load_resample_save(self, chunk, loadable, mode):
+ """Peak memory to load all chunks, resample, and save."""
+ names = self._get_filename_selection(chunk)
+ self.save_composite_as_geotiff(loadable, mode, self.region, names)
+ peakmem_load_resample_save.params = time_load_resample_save.params # type: ignore
+ peakmem_load_resample_save.param_names = time_load_resample_save.param_names # type: ignore
+
+ def _get_filename_selection(self, selection):
+ if selection == "some":
+ return fnmatch.filter(self.filenames, "*3[0123].nc")
+ if selection == "all":
+ return self.filenames
+ raise ValueError("Expected selection some or all, got " +
+ selection)
diff --git a/benchmarks/seviri_hrit_benchmarks.py b/benchmarks/seviri_hrit_benchmarks.py
new file mode 100644
index 0000000000..efbe210462
--- /dev/null
+++ b/benchmarks/seviri_hrit_benchmarks.py
@@ -0,0 +1,76 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Benchmark SEVIRI HRIT operations."""
+
+from __future__ import annotations
+
+import os
+
+from pyspectral.rayleigh import check_and_download as download_luts
+from pyspectral.rsr_reader import check_and_download as download_rsr
+
+from benchmarks.utils import GeoBenchmarks, get_filenames
+
+
+class SEVIRIHRIT(GeoBenchmarks):
+ """Benchmark SEVIRI HRIT reading."""
+
+ timeout = 600
+ data_files: list[str] = []
+ subdir = os.path.join("seviri_hrit", "20180228_1500")
+ reader = "seviri_l1b_hrit"
+
+ def setup_cache(self):
+ """Fetch the data files."""
+ try:
+ from satpy.demo import download_seviri_hrit_20180228_1500
+ download_seviri_hrit_20180228_1500()
+ except ImportError:
+ assert len(get_filenames(self.subdir)) == 114
+ download_rsr()
+ download_luts(aerosol_type='rayleigh_only')
+
+ def setup(self):
+ """Set up the benchmarks."""
+ import satpy
+ self.data_files = get_filenames(self.subdir)
+ satpy.CHUNK_SIZE = 2048
+
+ def time_load_one_channel(self):
+ """Time the loading of one channel."""
+ self.compute_channel("VIS006")
+
+ def peakmem_load_one_channel(self):
+ """Check peak memory usage of loading one channel."""
+ self.compute_channel("VIS006")
+
+ def time_load_overview(self):
+ """Time the loading of the generation of overview."""
+ self.compute_composite("overview")
+
+ def peakmem_load_overview(self):
+ """Check peak memory usage of the generation of overview."""
+ self.compute_composite("overview")
+
+ def time_save_overview_to_geotiff(self):
+ """Time the generation and saving of overview."""
+ self.save_composite_as_geotiff("overview")
+
+ def peakmem_save_overview_to_geotiff(self):
+ """Check peak memory usage of the generation and saving of overview."""
+ self.save_composite_as_geotiff("overview")
diff --git a/benchmarks/utils.py b/benchmarks/utils.py
new file mode 100644
index 0000000000..54338d4eac
--- /dev/null
+++ b/benchmarks/utils.py
@@ -0,0 +1,72 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Benchmark utilities."""
+
+import os
+
+
+def get_filenames(subdir):
+ """Get the data filenames manually."""
+ import glob
+ base_dir = os.environ.get("SATPY_DEMO_DATA_DIR", ".")
+ return glob.glob(os.path.join(base_dir, subdir, "*"))
+
+
+class GeoBenchmarks:
+ """Class for geo benchmarks."""
+
+ def create_scene(self, filenames=None):
+ """Create a scene."""
+ from satpy import Scene
+ scn = Scene(filenames=filenames or self.data_files, reader=self.reader)
+ return scn
+
+ def load_no_padding(self, composite, filenames=None):
+ """Load one composite or channel."""
+ scn = self.create_scene(filenames=filenames)
+ scn.load([composite], pad_data=False)
+ return scn
+
+ def load_and_native_resample(self, composite):
+ """Load and native resample a composite or channel."""
+ return self.load_and_resample(composite, "native")
+
+ def load_and_resample(self, composite, resampler, area=None, filenames=None):
+ """Load and resample a composite or channel with resampler and area."""
+ scn = self.load_no_padding(composite, filenames=filenames)
+ ls = scn.resample(area, resampler=resampler)
+ ls._readers = scn._readers # workaround for GH#1861
+ return ls
+
+ def compute_composite(self, composite, resampler="native",
+ area=None, filenames=None):
+ """Compute a true color image."""
+ lscn = self.load_and_resample(
+ composite, resampler, area, filenames)
+ lscn[composite].compute()
+
+ def save_composite_as_geotiff(self, composite, resampler="native",
+ area=None, filenames=None):
+ """Save a composite to disk as geotiff."""
+ lscn = self.load_and_resample(composite, resampler, area, filenames)
+ lscn.save_dataset(composite, filename='test.tif', tiled=True)
+
+ def compute_channel(self, channel, filenames=None):
+ """Load and compute one channel."""
+ scn = self.load_no_padding(channel, filenames=filenames)
+ scn[channel].compute()
diff --git a/benchmarks/viirs_sdr_benchmarks.py b/benchmarks/viirs_sdr_benchmarks.py
new file mode 100644
index 0000000000..2f8849b671
--- /dev/null
+++ b/benchmarks/viirs_sdr_benchmarks.py
@@ -0,0 +1,122 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Benchmark VIIRS SDR operations.."""
+
+from __future__ import annotations
+
+import glob
+import os
+
+from pyspectral.rayleigh import check_and_download as download_luts
+from pyspectral.rsr_reader import check_and_download as download_rsr
+
+
+class VIIRSSDRBenchmarkBase:
+ """Shared methods for working with VIIRS SDR data."""
+
+ timeout = 600
+ data_files: list[str] = []
+
+ def setup_cache(self):
+ """Fetch the data files."""
+ try:
+ from satpy.demo import get_viirs_sdr_20170128_1229
+ get_viirs_sdr_20170128_1229(
+ channels=("I01", "M03", "M04", "M05"),
+ granules=(2, 3, 4))
+ except ImportError:
+ assert len(self.get_filenames()) == 6 * 3
+ download_rsr()
+ download_luts(aerosol_type='rayleigh_only')
+
+ def setup(self, name):
+ """Set up the benchmarks."""
+ import satpy
+ self.data_files = self.get_filenames()
+ satpy.CHUNK_SIZE = 2048
+
+ def get_filenames(self):
+ """Get the data filenames manually."""
+ base_dir = os.environ.get("SATPY_DEMO_DATA_DIR", ".")
+ return glob.glob(os.path.join(base_dir, "viirs_sdr", "20170128_1229", "*.h5"))
+
+ def load(self, composite):
+ """Load one composite."""
+ from satpy import Scene
+ scn = Scene(filenames=self.data_files, reader='viirs_sdr')
+ scn.load([composite])
+ return scn
+
+ def load_and_native_resample(self, composite):
+ """Load and native resample a composite."""
+ scn = self.load(composite)
+ lscn = scn.resample(resampler='native')
+ return lscn
+
+
+class VIIRSSDRReaderBenchmarks(VIIRSSDRBenchmarkBase):
+ """Benchmark reading and writing VIIRS SDR data."""
+
+ params = ["I01", "M03"]
+ param_names = ["name"]
+
+ def time_load_one_channel(self, name):
+ """Time the loading of one channel."""
+ self.compute_product(name)
+
+ def peakmem_load_one_channel(self, name):
+ """Check peak memory usage of loading one channel."""
+ self.compute_product(name)
+
+ def compute_product(self, name):
+ """Load and compute one channel."""
+ scn = self.load(name)
+ scn[name].compute()
+
+
+class VIIRSSDRCompositeBenchmarks(VIIRSSDRBenchmarkBase):
+ """Benchmark generating and writing composites from VIIRS SDR data."""
+
+ params = ["true_color", "true_color_crefl", "true_color_raw"]
+ param_names = ["name"]
+
+ def time_load_composite(self, name):
+ """Time the loading of the generation of a composite."""
+ self.compute_composite(name)
+
+ def peakmem_load_composite(self, name):
+ """Check peak memory usage of the generation of a composite."""
+ self.compute_composite(name)
+
+ def time_save_composite_to_geotiff(self, name):
+ """Time the generation and saving of a composite."""
+ self.save_composite_as_geotiff(name)
+
+ def peakmem_save_composite_raw_to_geotiff(self, name):
+ """Check peak memory usage of the generation and saving of a composite."""
+ self.save_composite_as_geotiff(name)
+
+ def compute_composite(self, name):
+ """Compute a composite."""
+ lscn = self.load_and_native_resample(name)
+ lscn[name].compute()
+
+ def save_composite_as_geotiff(self, name):
+ """Save a composite to disk as geotiff."""
+ lscn = self.load_and_native_resample(name)
+ lscn.save_dataset(name, filename='test.tif', tiled=True)
diff --git a/changelog_pre0.9.0.rst b/changelog_pre0.9.0.rst
index e4bbc5c021..90f9c65995 100644
--- a/changelog_pre0.9.0.rst
+++ b/changelog_pre0.9.0.rst
@@ -4374,6 +4374,3 @@ Other
- Modified image inversion unit test to reflect new behaviour. [Martin
Raspaud]
- New rebase. [Martin Raspaud]
-
-
-
diff --git a/continuous_integration/environment.yaml b/continuous_integration/environment.yaml
index 8b652fcb3c..f8a94233bc 100644
--- a/continuous_integration/environment.yaml
+++ b/continuous_integration/environment.yaml
@@ -2,7 +2,7 @@ name: test-environment
channels:
- conda-forge
dependencies:
- - xarray
+ - xarray!=2022.9.0
- dask
- distributed
- donfig
@@ -11,12 +11,13 @@ dependencies:
- Cython
- sphinx
- cartopy
+ - panel>=0.12.7
- pillow
- matplotlib
- scipy
- pyyaml
- pyproj
- - pyresample
+ - pyresample>=1.24
- coveralls
- coverage
- codecov
@@ -26,7 +27,9 @@ dependencies:
- h5netcdf
- gdal
- rasterio
+ - bottleneck
- rioxarray
+ - defusedxml
- imageio
- pyhdf
- mock
@@ -39,11 +42,15 @@ dependencies:
- geoviews
- pytest
- pytest-cov
+ - pytest-lazy-fixture
- fsspec
+ - s3fs
- pylibtiff
- python-geotiepoints
- pooch
- pip
+ - skyfield
+ - astropy
- pip:
- trollsift
- trollimage
diff --git a/doc/Makefile b/doc/Makefile
index 85bcb88ab0..624fe21234 100644
--- a/doc/Makefile
+++ b/doc/Makefile
@@ -2,7 +2,7 @@
#
# You can set these variables from the command line.
-SPHINXOPTS =
+SPHINXOPTS =
SPHINXBUILD = sphinx-build
PAPER =
BUILDDIR = build
@@ -29,6 +29,7 @@ help:
clean:
-rm -rf $(BUILDDIR)/*
+ -rm -rf source/api/*.rst
html:
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
diff --git a/doc/rtd_environment.yml b/doc/rtd_environment.yml
index a95a5e497c..041bf79ca8 100644
--- a/doc/rtd_environment.yml
+++ b/doc/rtd_environment.yml
@@ -2,16 +2,25 @@ name: readthedocs
channels:
- conda-forge
dependencies:
- - python=3.7
+ - python=3.10
- pip
+ - appdirs
- dask
+ - defusedxml
- donfig
- - appdirs
+ # 2.19.1 seems to cause library linking issues
+ - eccodes>=2.20
- graphviz
- numpy
- pillow
- pooch
- pyresample
+ - pytest
+ - pytest-lazy-fixture
+ - python-eccodes
+ - python-geotiepoints
+ - rasterio
+ - rioxarray
- setuptools
- setuptools_scm
- setuptools_scm_git_archive
diff --git a/doc/source/_static/main.js b/doc/source/_static/main.js
new file mode 100644
index 0000000000..188a335e71
--- /dev/null
+++ b/doc/source/_static/main.js
@@ -0,0 +1,6 @@
+$(document).ready( function () {
+ $('table.datatable').DataTable( {
+ "paging": false,
+ "dom": 'lfitp'
+} );
+} );
diff --git a/doc/source/_static/theme_overrides.css b/doc/source/_static/theme_overrides.css
index 174fade5f2..63ee6cc74c 100644
--- a/doc/source/_static/theme_overrides.css
+++ b/doc/source/_static/theme_overrides.css
@@ -10,4 +10,4 @@
.wy-table-responsive {
overflow: visible !important;
}
-}
\ No newline at end of file
+}
diff --git a/doc/source/composites.rst b/doc/source/composites.rst
index ac1ca9471f..aff0438af0 100644
--- a/doc/source/composites.rst
+++ b/doc/source/composites.rst
@@ -9,7 +9,6 @@ Composites are generated in satpy using Compositor classes. The attributes of th
resulting composites are usually a combination of the prerequisites' attributes and
the key/values of the DataID used to identify it.
-
Built-in Compositors
====================
@@ -108,15 +107,36 @@ first composite will be placed on the day-side of the scene, and the
second one on the night side. The transition from day to night is
done by calculating solar zenith angle (SZA) weighed average of the
two composites. The SZA can optionally be given as third dataset, and
-if not given, the angles will be calculated. Width of the blending
-zone can be defined when initializing the compositor (default values
-shown in the example below).
+if not given, the angles will be calculated. Three arguments are used
+to generate the image (default values shown in the example below).
+They can be defined when initializing the compositor::
+
+ - lim_low (float): lower limit of Sun zenith angle for the
+ blending of the given channels
+ - lim_high (float): upper limit of Sun zenith angle for the
+ blending of the given channels
+ Together with `lim_low` they define the width
+ of the blending zone
+ - day_night (string): "day_night" means both day and night portions will be kept
+ "day_only" means only day portion will be kept
+ "night_only" means only night portion will be kept
+
+Usage (with default values)::
>>> from satpy.composites import DayNightCompositor
- >>> compositor = DayNightCompositor("dnc", lim_low=85., lim_high=88.)
+ >>> compositor = DayNightCompositor("dnc", lim_low=85., lim_high=88., day_night="day_night")
>>> composite = compositor([local_scene['true_color'],
... local_scene['night_fog']])
+As above, with `day_night` flag it is also available to use only
+a day product or only a night product and mask out (make transparent)
+the opposite portion of the image (night or day). The example below
+provides only a day product with night portion masked-out::
+
+ >>> from satpy.composites import DayNightCompositor
+ >>> compositor = DayNightCompositor("dnc", lim_low=85., lim_high=88., day_night="day_only")
+ >>> composite = compositor([local_scene['true_color'])
+
RealisticColors
---------------
@@ -224,6 +244,28 @@ BackgroundCompositor
>>> background = local_scene['overview']
>>> composite = compositor([clouds, background])
+CategoricalDataCompositor
+-------------------------
+
+:class:`CategoricalDataCompositor` can be used to recategorize categorical data. This is for example useful to
+combine comparable categories into a common category. The category remapping from `data` to `composite` is done
+using a look-up-table (`lut`)::
+
+ composite = [[lut[data[0,0]], lut[data[0,1]], lut[data[0,Nj]]],
+ [[lut[data[1,0]], lut[data[1,1]], lut[data[1,Nj]],
+ [[lut[data[Ni,0]], lut[data[Ni,1]], lut[data[Ni,Nj]]]
+
+Hence, `lut` must have a length that is greater than the maximum value in `data` in orer to avoid an `IndexError`.
+Below is an example on how to create a binary clear-sky/cloud mask from a pseodu cloud type product with six
+categories representing clear sky (cat1/cat5), cloudy features (cat2-cat4) and missing/undefined data (cat0)::
+
+ >>> cloud_type = local_scene['cloud_type'] # 0 - cat0, 1 - cat1, 2 - cat2, 3 - cat3, 4 - cat4, 5 - cat5,
+ # categories: 0 1 2 3 4 5
+ >>> lut = [np.nan, 0, 1, 1, 1, 0]
+ >>> compositor = CategoricalDataCompositor('binary_cloud_mask', lut=lut)
+ >>> composite = compositor([cloud_type]) # 0 - cat1/cat5, 1 - cat2/cat3/cat4, nan - cat0
+
+
Creating composite configuration files
======================================
@@ -282,9 +324,9 @@ to be added.
Using modifiers
---------------
-In many cases the basic datasets need to be adjusted, e.g. for Solar
-zenith angle normalization. These modifiers can be applied in the
-following way::
+In many cases the basic datasets that go into the composite need to be
+adjusted, e.g. for Solar zenith angle normalization. These modifiers
+can be applied in the following way::
overview:
compositor: !!python/name:satpy.composites.GenericCompositor
@@ -305,6 +347,12 @@ Here we see two changes:
The modifier above is a built-in that normalizes the Solar zenith
angle to Sun being directly at the zenith.
+More examples can be found in Satpy source code directory
+`satpy/etc/composites `_.
+
+See the :doc:`modifiers` documentation for more information on
+available built-in modifiers.
+
Using other composites
----------------------
@@ -319,7 +367,7 @@ the day side, and another for the night side::
- night_fog
standard_name: natural_with_night_fog
-This compositor has two additional keyword arguments that can be
+This compositor has three additional keyword arguments that can be
defined (shown with the default values, thus identical result as
above)::
@@ -329,7 +377,8 @@ above)::
- natural_color
- night_fog
lim_low: 85.0
- lim_high: 95.0
+ lim_high: 88.0
+ day_night: "day_night"
standard_name: natural_with_night_fog
Defining other composites in-line
@@ -487,8 +536,27 @@ the file) as::
kwargs:
gamma: [1.7, 1.7, 1.7]
+.. warning::
+ If you define a composite with no matching enhancement, Satpy will by
+ default apply the :func:`~trollimage.xrimage.XRImage.stretch_linear` enhancement with
+ cutoffs of 0.5% and 99.5%. If you want no enhancement at all (maybe you
+ are enhancing a composite based on :class:`DayNightCompositor` where
+ the components have their own enhancements defined), you need to define
+ an enhancement that does nothing::
+
+ enhancements:
+ day_x:
+ standard_name: day_x
+ operations: []
+
+ It is recommended to define an enhancement even if you intend to use
+ the default, in case the default should change in future versions of
+ Satpy.
+
More examples can be found in SatPy source code directory
``satpy/etc/enhancements/generic.yaml``.
See the :doc:`enhancements` documentation for more information on
available built-in enhancements.
+
+.. include:: modifiers.rst
diff --git a/doc/source/conf.py b/doc/source/conf.py
index 8a2c5d787d..477cefd279 100644
--- a/doc/source/conf.py
+++ b/doc/source/conf.py
@@ -12,9 +12,12 @@
# serve to show the default.
"""Sphinx documentation configuration and setup."""
+from __future__ import annotations
+
import os
import sys
from datetime import datetime
+
from pkg_resources import get_distribution
# If extensions (or modules to document with autodoc) are in another directory,
@@ -23,6 +26,7 @@
sys.path.append(os.path.abspath('../../'))
sys.path.append(os.path.abspath(os.path.dirname(__file__)))
+from reader_table import generate_reader_table # noqa: E402
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
@@ -64,7 +68,7 @@ def __getattr__(cls, name):
# https://github.com/sphinx-doc/sphinx/issues/3920
MOCK_MODULES = ['h5py']
for mod_name in MOCK_MODULES:
- sys.modules[mod_name] = Mock()
+ sys.modules[mod_name] = Mock() # type: ignore
autodoc_mock_imports = ['cf', 'glymur', 'h5netcdf', 'imageio', 'mipp', 'netCDF4',
'pygac', 'pygrib', 'pyhdf', 'pyninjotiff',
@@ -72,13 +76,18 @@ def __getattr__(cls, name):
'zarr']
autoclass_content = 'both' # append class __init__ docstring to the class docstring
+# auto generate reader table from reader config files
+with open("reader_table.rst", mode="w") as f:
+ f.write(generate_reader_table())
+
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.coverage',
'sphinx.ext.doctest', 'sphinx.ext.napoleon', 'sphinx.ext.autosummary', 'doi_role',
- 'sphinx.ext.viewcode', 'sphinxcontrib.apidoc']
+ 'sphinx.ext.viewcode', 'sphinxcontrib.apidoc',
+ 'sphinx.ext.mathjax']
# API docs
apidoc_module_dir = "../../satpy"
@@ -122,7 +131,7 @@ def __getattr__(cls, name):
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
-exclude_trees = []
+exclude_trees: list[str] = []
# The reST default role (used for this markup: `text`) to use for all documents.
# default_role = None
@@ -182,8 +191,15 @@ def __getattr__(cls, name):
html_css_files = [
'theme_overrides.css', # override wide tables in RTD theme
+ 'https://cdn.datatables.net/1.10.23/css/jquery.dataTables.min.css',
]
+html_js_files = [
+ 'https://cdn.datatables.net/1.10.23/js/jquery.dataTables.min.js',
+ 'main.js',
+]
+
+
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
@@ -234,8 +250,8 @@ def __getattr__(cls, name):
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
- ('index', 'satpy.tex', u'satpy documentation',
- u'SMHI', 'manual'),
+ ('index', 'satpy.tex', 'Satpy Documentation',
+ 'Satpy Developers', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
@@ -261,18 +277,19 @@ def __getattr__(cls, name):
'dask': ('https://docs.dask.org/en/latest', None),
'geoviews': ('http://geoviews.org', None),
'jobqueue': ('https://jobqueue.dask.org/en/latest', None),
- 'numpy': ('https://docs.scipy.org/doc/numpy', None),
+ 'numpy': ('https://numpy.org/doc/stable', None),
'pydecorate': ('https://pydecorate.readthedocs.io/en/stable', None),
'pyorbital': ('https://pyorbital.readthedocs.io/en/stable', None),
'pyproj': ('https://pyproj4.github.io/pyproj/dev', None),
'pyresample': ('https://pyresample.readthedocs.io/en/stable', None),
'pytest': ('https://docs.pytest.org/en/stable/', None),
'python': ('https://docs.python.org/3', None),
- 'scipy': ('https://docs.scipy.org/doc/scipy/reference', None),
+ 'scipy': ('http://scipy.github.io/devdocs', None),
'trollimage': ('https://trollimage.readthedocs.io/en/stable', None),
'trollsift': ('https://trollsift.readthedocs.io/en/stable', None),
'xarray': ('https://xarray.pydata.org/en/stable', None),
'rasterio': ('https://rasterio.readthedocs.io/en/latest', None),
'donfig': ('https://donfig.readthedocs.io/en/latest', None),
'pooch': ('https://www.fatiando.org/pooch/latest/', None),
+ 'fsspec': ('https://filesystem-spec.readthedocs.io/en/latest/', None),
}
diff --git a/doc/source/config.rst b/doc/source/config.rst
index b578055ace..7378ceb910 100644
--- a/doc/source/config.rst
+++ b/doc/source/config.rst
@@ -93,6 +93,68 @@ defaults to a different path depending on your operating system following
the `appdirs `_
"user cache dir".
+.. _config_cache_lonlats_setting:
+
+Cache Longitudes and Latitudes
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+* **Environment variable**: ``SATPY_CACHE_LONLATS``
+* **YAML/Config Key**: ``cache_lonlats``
+* **Default**: ``False``
+
+Whether or not generated longitude and latitude coordinates should be cached
+to on-disk zarr arrays. Currently this only works in very specific cases.
+Mainly the lon/lats that are generated when computing sensor and solar zenith
+and azimuth angles used in various modifiers and compositors. This caching is
+only done for ``AreaDefinition``-based geolocation, not ``SwathDefinition``.
+Arrays are stored in ``cache_dir`` (see above).
+
+When setting this as an environment variable, this should be set with the
+string equivalent of the Python boolean values ``="True"`` or ``="False"``.
+
+See also ``cache_sensor_angles`` below.
+
+.. warning::
+
+ This caching does not limit the number of entries nor does it expire old
+ entries. It is up to the user to manage the contents of the cache
+ directory.
+
+.. _config_cache_sensor_angles_setting:
+
+Cache Sensor Angles
+^^^^^^^^^^^^^^^^^^^
+
+* **Environment variable**: ``SATPY_CACHE_SENSOR_ANGLES``
+* **YAML/Config Key**: ``cache_sensor_angles``
+* **Default**: ``False``
+
+Whether or not generated sensor azimuth and sensor zenith angles should be
+cached to on-disk zarr arrays. These angles are primarily used in certain
+modifiers and compositors. This caching is only done for
+``AreaDefinition``-based geolocation, not ``SwathDefinition``.
+Arrays are stored in ``cache_dir`` (see above).
+
+This caching requires producing an estimate of the angles to avoid needing to
+generate new angles for every new data case. This happens because the angle
+generation depends on the observation time of the data and the position of the
+satellite (longitude, latitude, altitude). The angles are estimated by using
+a constant observation time for all cases (maximum ~1e-10 error) and by rounding
+satellite position coordinates to the nearest tenth of a degree for longitude
+and latitude and nearest tenth meter (maximum ~0.058 error). Note these
+estimations are only done if caching is enabled (this parameter is True).
+
+When setting this as an environment variable, this should be set with the
+string equivalent of the Python boolean values ``="True"`` or ``="False"``.
+
+See also ``cache_lonlats`` above.
+
+.. warning::
+
+ This caching does not limit the number of entries nor does it expire old
+ entries. It is up to the user to manage the contents of the cache
+ directory.
+
.. _config_path_setting:
Component Configuration Path
@@ -119,7 +181,8 @@ Note that this value must be a list. In Python, this could be set by doing:
satpy.config.set(config_path=['/path/custom1', '/path/custom2'])
If setting an environment variable then it must be a
-colon-separated string and must be set **before** calling/importing Satpy.
+colon-separated (``:``) string on Linux/OSX or semicolon-separate (``;``)
+separated string and must be set **before** calling/importing Satpy.
If the environment variable is a single path it will be converted to a list
when Satpy is imported.
@@ -127,6 +190,12 @@ when Satpy is imported.
export SATPY_CONFIG_PATH="/path/custom1:/path/custom2"
+On Windows, with paths on the `C:` drive, these paths would be:
+
+.. code-block:: bash
+
+ set SATPY_CONFIG_PATH="C:/path/custom1;C:/path/custom2"
+
Satpy will always include the builtin configuration files that it
is distributed with regardless of this setting. When a component supports
merging of configuration files, they are merged in reverse order. This means
@@ -150,6 +219,16 @@ defaults to a different path depending on your operating system following the
.. _download_aux_setting:
+Demo Data Directory
+^^^^^^^^^^^^^^^^^^^
+
+* **Environment variable**: ``SATPY_DEMO_DATA_DIR``
+* **YAML/Config Key**: ``demo_data_dir``
+* **Default**:
+
+Directory where demo data functions will download data files to. Available
+demo data functions can be found in :mod:`satpy.demo` subpackage.
+
Download Auxiliary Data
^^^^^^^^^^^^^^^^^^^^^^^
@@ -163,6 +242,22 @@ will download and cache any necessary data files to :ref:`data_dir_setting`
when needed. If ``False`` then pre-downloaded files will be used, but any
other files will not be downloaded or checked for validity.
+Sensor Angles Position Preference
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+* **Environment variable**: ``SATPY_SENSOR_ANGLES_POSITION_PREFERENCE``
+* **YAML/Config Key**: ``sensor_angles_position_preference``
+* **Default**: "actual"
+
+Control which satellite position should be preferred when generating sensor
+azimuth and sensor zenith angles. This value is passed directly to the
+:func:`~satpy.utils.get_satpos` function. See the documentation for that
+function for more information on how the value will be used. This is used
+as part of the :func:`~satpy.modifiers.angles.get_angles` and
+:func:`~satpy.modifiers.angles.get_satellite_zenith_angle` functions which is
+used by multiple modifiers and composites including the default rayleigh
+correction.
+
.. _component_configuration:
Component Configuration
diff --git a/doc/source/data_download.rst b/doc/source/data_download.rst
index fdf56fd574..b8742fac96 100644
--- a/doc/source/data_download.rst
+++ b/doc/source/data_download.rst
@@ -36,7 +36,9 @@ NOAA GOES on Amazon Web Services
* `Data Browser `__
* Associated Readers: ``abi_l1b``
-In addition ot the pages above, Brian Blaylock has prepared some instructions
+In addition to the pages above, Brian Blaylock's `GOES-2-Go `_
+python package is useful for downloading GOES data to your local machine.
+Brian also prepared some instructions
for using the ``rclone`` tool for downloading AWS data to a local machine. The
instructions can be found
`here `_.
diff --git a/doc/source/dev_guide/custom_reader.rst b/doc/source/dev_guide/custom_reader.rst
index 86c7be99aa..cef0cc45c1 100644
--- a/doc/source/dev_guide/custom_reader.rst
+++ b/doc/source/dev_guide/custom_reader.rst
@@ -60,17 +60,24 @@ if needed (ex. goes-imager).
:file format: If the file format of the files is informative to the user or
can distinguish one reader from another then this field should be
specified. Common format names should be abbreviated following existing
- abbreviations like `nc` for NetCDF3 or NetCDF4, `hdf` for HDF4, `h5` for
+ abbreviations like ``nc`` for NetCDF3 or NetCDF4, ``hdf`` for HDF4, ``h5`` for
HDF5.
The existing :ref:`reader's table ` can be used for reference.
-When in doubt, reader names can be discussed in the github pull
-request when this reader is added to Satpy or a github issue.
+When in doubt, reader names can be discussed in the GitHub pull
+request when this reader is added to Satpy, or in a GitHub issue.
The YAML file
-------------
-The yaml file is composed of three sections:
+If your reader is going to be part of Satpy, the YAML file should be
+located in the ``satpy/etc/readers`` directory, along with the YAML
+files for all other readers. If you are developing a reader for internal
+purposes (such as for unpublished data), the YAML file should be located
+in any directory in ``$SATPY_CONFIG_PATH`` within the subdirectory
+``readers/`` (see :doc:`../../config`).
+
+The YAML file is composed of three sections:
- the :ref:`reader ` section,
that provides basic parameters for the reader
@@ -88,28 +95,37 @@ The ``reader`` section provides basic parameters for the overall reader.
The parameters to provide in this section are:
- - name: This is the name of the reader, it should be the same as the
- filename (without the .yaml extension). The naming convention for
- this is described above in the :ref:`reader_naming` section above.
- - short_name (optional): Human-readable version of the reader 'name'.
- If not provided, applications using this can default to taking the 'name',
- replacing ``_`` with spaces and uppercasing every letter.
- - long_name: Human-readable title for the reader. This may be used as a
- section title on a website or in GUI applications using Satpy. Default
- naming scheme is `` Level []``.
- For example, for the ``abi_l1b`` reader this is ``"GOES-R ABI Level 1b"``
- where "GOES-R" is the name of the program and **not** the name of the
- platform/satellite. This scheme may not work for all readers, but in
- general should be followed. See existing readers for more examples.
- - description: General description of the reader. This may include any
- `restructuredtext `_
- formatted text like links to PDFs or sites with more information on the
- file format. This can be multiline if formatted properly in YAML (see
- example below).
- - sensors: The list of sensors this reader will support. This must be
- all lowercase letters for full support throughout in Satpy.
- - reader: The main python reader class to use, in most cases the
- ``FileYAMLReader`` is a good choice.
+ name
+ This is the name of the reader, it should be the same as the
+ filename (without the .yaml extension). The naming convention for
+ this is described above in the :ref:`reader_naming` section above.
+ short_name (optional): Human-readable version of the reader 'name'.
+ If not provided, applications using this can default to taking the 'name',
+ replacing ``_`` with spaces and uppercasing every letter.
+ long_name
+ Human-readable title for the reader. This may be used as a
+ section title on a website or in GUI applications using Satpy. Default
+ naming scheme is `` Level []``.
+ For example, for the ``abi_l1b`` reader this is ``"GOES-R ABI Level 1b"``
+ where "GOES-R" is the name of the program and **not** the name of the
+ platform/satellite. This scheme may not work for all readers, but in
+ general should be followed. See existing readers for more examples.
+ description
+ General description of the reader. This may include any
+ `restructuredtext `_
+ formatted text like links to PDFs or sites with more information on the
+ file format. This can be multiline if formatted properly in YAML (see
+ example below).
+ status
+ The status of the reader (one of: Nominal, Beta, Alpha)
+ supports_fsspec
+ If the reader supports reading data via fsspec (either true or false).
+ sensors
+ The list of sensors this reader will support. This must be
+ all lowercase letters for full support throughout in Satpy.
+ reader
+ The main python reader class to use, in most cases the
+ ``FileYAMLReader`` is a good choice.
.. code:: yaml
@@ -122,8 +138,8 @@ The parameters to provide in this section are:
sensors: [seviri]
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
-Optionally, if you need to customize the `DataID` for this reader, you can provide the
-relevant keys with a `data_identification_keys` item here. See the :doc:`satpy_internals`
+Optionally, if you need to customize the ``DataID`` for this reader, you can provide the
+relevant keys with a ``data_identification_keys`` item here. See the :doc:`satpy_internals`
section for more information.
.. _custom_reader_file_types_section:
@@ -203,7 +219,7 @@ Parameters you can define for example are:
is optional if the data being read is gridded already. Swath data,
from example data from some polar-orbiting satellites, should have these
defined or no geolocation information will be available when the data
- is loaded. For gridded datasets a `get_area_def` function will be
+ are loaded. For gridded datasets a ``get_area_def`` function will be
implemented in python (see below) to define geolocation information.
- Any other field that is relevant for the reader or could be useful metadata
provided to the user.
@@ -433,7 +449,7 @@ This method is good when you want to:
1. Define datasets dynamically without needing to define them in the YAML.
2. Supplement metadata from the YAML file with information from the file
- content (ex. `resolution`).
+ content (ex. ``resolution``).
3. Determine if a dataset is available by the file contents. This differs from
the default behavior of a dataset being considered loadable if its
"file_type" is loaded.
@@ -479,6 +495,9 @@ needs to implement a few methods:
:meth:`xarray.DataArray.rename` method for more information and its use
in the example below.
+ If the reader should be compatible with opening remote files see
+ :doc:`remote_file_support`.
+
- the ``get_area_def`` method, that takes as single argument the
:class:`~satpy.dataset.DataID` for which we want
the area. It should return a :class:`~pyresample.geometry.AreaDefinition`
@@ -500,6 +519,11 @@ needs to implement a few methods:
On top of that, two attributes need to be defined: ``start_time`` and
``end_time``, that define the start and end times of the sensing.
+See the :ref:`time_metadata` section for a description of the different
+times that Satpy readers typically use and what times should be used
+for the ``start_time`` and ``end_time``. Note that these properties will
+be assigned to the ``start_time`` and ``end_time`` metadata of any DataArrays
+returned by ``get_dataset``, any existing values will be overwritten.
If you are writing a file handler for more common formats like HDF4, HDF5, or
NetCDF4 you may want to consider using the utility base classes for each:
@@ -579,4 +603,4 @@ Auxiliary File Download
If your reader needs additional data files to do calibrations, corrections,
or anything else see the :doc:`aux_data` document for more information on
how to download and cache these files without including them in the Satpy
-python package.
\ No newline at end of file
+python package.
diff --git a/doc/source/dev_guide/index.rst b/doc/source/dev_guide/index.rst
index 34006a900b..9cbb1d75ff 100644
--- a/doc/source/dev_guide/index.rst
+++ b/doc/source/dev_guide/index.rst
@@ -14,6 +14,7 @@ at the pages listed below.
CONTRIBUTING
xarray_migration
custom_reader
+ remote_file_support
plugins
satpy_internals
aux_data
@@ -30,6 +31,8 @@ Satpy is now Python 3 only and it is no longer needed to support Python 2.
Check ``setup.py`` for the current Python versions any new code needs
to support.
+.. _devinstall:
+
Development installation
========================
@@ -59,7 +62,7 @@ clone your fork. The package can then be installed in development mode by doing:
The first command will install all dependencies needed by the Satpy
conda-forge package, but won't actually install Satpy. The second command
should be run from the root of the cloned Satpy repository (where the
-`setup.py` is) and will install the actual package.
+``setup.py`` is) and will install the actual package.
You can now edit the python files in your cloned repository and have them
immediately reflected in your conda environment.
@@ -67,12 +70,25 @@ immediately reflected in your conda environment.
Running tests
=============
-Satpy tests are written using the python :mod:`unittest` module and the
-third-party :doc:`pytest ` package. Satpy tests can be executed by
-running::
+Satpy tests are written using the third-party :doc:`pytest `
+package. There is usually no need to run all Satpy tests, but instead only
+run the tests related to the component you are working on. All tests are
+automatically run from the GitHub Pull Request using multiple versions of
+Python, multiple operating systems, and multiple versions of dependency
+libraries. If you want to run all Satpy tests you will need to install
+additional dependencies that aren't needed for regular Satpy usage. To install
+them run::
+
+ pip install -e .[tests]
+
+Satpy tests can be executed by running::
pytest satpy/tests
+You can also run a specific tests by specifying a sub-directory or module::
+
+ pytest satpy/tests/reader_tests/test_abi_l1b.py
+
Running benchmarks
==================
diff --git a/doc/source/dev_guide/plugins.rst b/doc/source/dev_guide/plugins.rst
index 99ff3913a7..bce72dabae 100644
--- a/doc/source/dev_guide/plugins.rst
+++ b/doc/source/dev_guide/plugins.rst
@@ -1,34 +1,218 @@
-================================================
- Adding new functionality to Satpy via plugins
-================================================
+===========================
+Extending Satpy via plugins
+===========================
.. warning::
This feature is experimental and being modified without warnings.
For now, it should not be used for anything else than toy examples and
should not be relied on.
-Satpy has the capability of using plugins. At the moment, new composites can be
-added to satpy through external plugins. Plugins for reader and writers may be
-added at a later date (PRs are welcome!).
+Satpy is able to load additional functionality outside of the builtin features
+in the library. It does this by searching a series of configured paths for
+additional configuration files for:
-Here is an
-`example `_ of a
-composites plugin.
+* readers
+* composites and modifiers
+* enhancements
+* writers
-The key is to use the same configuration directory structure as satpy and add
-a `satpy.composites` entry point in the setup.py file of the plugin:
+For basic testing and temporary configuration changes, you can follow
+the instructions in :ref:`component_configuration`. This will tell Satpy
+where to look for your custom YAML configuration files and import any Python
+code you'd like it to use for these components. However, this requires telling
+Satpy of these paths on every execution (either as an environment variable or
+by using ``satpy.config``).
-.. code: python
+Satpy also supports being told this information via setuptools "entry points".
+Once your custom Python package with entry points is installed Satpy will
+automatically discover it when searching for composites without the user
+needing to explicitly import your package. This has the added
+benefit of organizing your YAML configuration files and any custom python code
+into a single python package. How to structure a package in this way is
+described below.
+
+An example project showing the usage of these entry points is available at
+`this github repository `_
+where a custom compositor is created. This repository also includes common
+configuration files and tools for writing clean code and automatically testing
+your python code.
+
+Plugin package structure
+========================
+
+The below sections will use the example package name ``satpy-myplugin``. This
+is only an example and naming a plugin package with a ``satpy-`` prefix is not
+required.
+
+A plugin package should consist of three main parts:
+
+1. ``pyproject.toml`` or ``setup.py``: These files define the metadata and
+ entry points for your package. Only one of them is needed. With only a few
+ exceptions it is recommended to use a ``pyproject.toml`` as this is the new
+ and future way Python package configuration will be supported by the ``pip``
+ package manager. See below for examples of the contents of this file.
+2. ``mypkg/etc/``: A directory of Satpy-compatible component YAML files. These
+ YAML files should be in ``readers/``, ``composites/``, ``enhancements/``,
+ and ``writers/`` directories. These YAML files must follow the Satpy naming
+ conventions for each component. For example, composites and enhancements
+ allow for sensor-specific configuration files. Other directories can be
+ added in this ``etc`` directory and will be ignored by Satpy. Satpy will
+ collect all available YAML files from all installed plugins and merge them
+ with those builtin to Satpy. The Satpy builtins will be used as a "base"
+ configuration with all external YAML files applied after.
+3. ``mypkg/``: The python package with any custom python code. This code should
+ be based on or at least compatible with Satpy's base classes for each
+ component or use utilities available from Satpy whenever possible.
+
+ * readers: :class:`~satpy.readers.yaml_reader.FileYAMLReader` for any
+ reader subclasses and
+ :class:`~satpy.readers.file_handlers.BaseFileHandler` for any custom file
+ handlers. See :doc:`custom_reader` for more information.
+ * composites and modifiers: :class:`~satpy.composites.CompositeBase` for
+ any generic compositor and :class:`~satpy.composites.GenericCompositor`
+ for any composite that represents an image (RGB, L, etc). For modifiers,
+ use :class:`~satpy.modifiers.ModifierBase`.
+ * enhancements: Although not required, consider using
+ :func:`satpy.enhancements.apply_enhancement`.
+ * writers: :class:`~satpy.writers.Writer`
+
+ Lastly, this directory should be structured like a standard python package.
+ This primarily means a ``mypkg/__init__.py`` file should exist.
+
+pyproject.toml
+--------------
+
+We recommend using a
+`pyproject.toml `_
+file can be used to define the
+metadata and configuration for a python package. With this file it is possible
+to use package building tools to make an installable package. By using a
+special feature called "entry points" we can configure our package to its
+satpy features are automatically discovered by Satpy.
+
+A ``pyproject.toml`` file is typically placed in the root of a project
+repository and at the same level as the package (ex. ``satpy_myplugin/``
+directory). An example for a package called ``satpy-myplugin`` with
+custom composites is shown below.
+
+.. code:: toml
+
+ [project]
+ name = "satpy-myplugin"
+ description = "Example Satpy plugin package definition."
+ version = "1.0.0"
+ readme = "README.md"
+ license = {text = "GPL-3.0-or-later"}
+ requires-python = ">=3.8"
+ dependencies = [
+ "satpy",
+ ]
+
+ [tool.setuptools]
+ packages = ["satpy_myplugin"]
+
+ [build-system]
+ requires = ["setuptools", "wheel"]
+ build-backend = "setuptools.build_meta"
+
+ [project.entry-points."satpy.composites"]
+ example_composites = "satpy_myplugin"
+
+This definition uses
+`setuptools `_
+to build the resulting package (under ``build-system``). There are other
+alternative tools (like `poetry `_)
+that can be used.
+
+Other custom components like readers and writers can be defined in the same
+package by using additional entry points named ``satpy.readers`` for readers,
+``satpy.writers`` for writers, and ``satpy.enhancements`` for enhancements.
+
+Note the difference between the usage of the package name (``satpy-myplugin``)
+which includes a hyphen and the package directory (``satpy_myplugin``) which uses
+an underscore. Your package name does not need to have a separator (hyphen) in
+it, but is used here due to the common practice of naming plugins this way.
+Package directories can't use hyphens as this would be a syntax error when
+trying to import the package. Underscores can't be used in package names as
+this is not allowed by PyPI.
+
+The first ``project`` section in this TOML file specifies metadata about the
+package. This is most important if you plan on distributing your package on
+PyPI or similar package repository. We specify that our package depends on
+``satpy`` so if someone installs it Satpy will automatically be installed.
+The second ``tools.setuptools`` section
+tells the package building (via ``setuptools``) what directory the Python
+code is in. The third section, ``build-system``, says what tool(s) should be
+used for building the package and what extra requirements are needed during
+this build process.
+
+The last section, ``project.entry-points."satpy.composites"`` is the only
+section specific to this package being a Satpy plugin. At the time of writing
+the ``example_composites = "satpy_myplugin"`` portion is not actually used
+by Satpy but is required to properly define the entry point in the plugin
+package. Instead Satpy will assume that a package that defines the
+``satpy.composites`` (or any of the other component types) entry point will
+have a ``etc/`` directory in the root of the package structure. Even so,
+for future compatibility, it is best to use the name of the package directory
+on the right-hand side of the ``=``.
+
+.. warning::
+
+ Due to some limitations in setuptools you must also define a ``setup.py``
+ file in addition to ``pyproject.toml`` if you'd like to use "editable"
+ installations (``pip install -e .``). Once
+ `this setuptools issue `_
+ is resolved this won't be needed. For now this minimal ``setup.py`` will
+ work:
+
+ .. code-block:: python
+
+ from setuptools import setup
+ setup()
+
+**Alternative: setup.py**
+
+If you are more comfortable creating a ``setup.py``-based python package you
+can use ``setup.py`` instead of ``pyproject.toml``. When used for custom
+composites, in a package called ``satpy-myplugin`` it would look something like
+this:
+
+.. code:: python
from setuptools import setup
import os
setup(
- name='satpy_cpe',
+ name='satpy-myplugin',
entry_points={
'satpy.composites': [
- 'example_composites = satpy_cpe',
+ 'example_composites = satpy_myplugin',
],
},
- package_data={'satpy_cpe': [os.path.join('etc', 'composites/*.yaml')]},
+ package_data={'satpy_myplugin': [os.path.join('etc', 'composites/*.yaml')]},
+ install_requires=["satpy"],
)
+
+Note the difference between the usage of the package name (``satpy-plugin``)
+which includes a hyphen and the package directory (``satpy_plugin``) which uses
+an underscore. Your package name does not need to have a separator (hyphen) in
+it, but is used here due to the common practice of naming plugins this way.
+See the ``pyproject.toml`` information above for more information on what each
+of these values means.
+
+Licenses
+--------
+
+Disclaimer: We are not lawyers.
+
+Satpy source code is under the GPLv3 license. This license requires any
+derivative works to also be GPLv3 or GPLv3 compatible. It is our understanding
+that importing a Python module could be considered "linking" that source code
+to your own (thus being a derivative work) and would therefore require your
+code to be licensed with a GPLv3-compatible license. It is currently only
+possible to make a Satpy-compatible plugin without importing Satpy if it
+contains only enhancements. Writers and compositors are possible without
+subclassing, but are likely difficult to implement. Readers are even more
+difficult to implement without using Satpy's base classes and utilities.
+It is also our understanding that if your custom Satpy plugin code is not
+publicly released then it does not need to be GPLv3.
diff --git a/doc/source/dev_guide/remote_file_support.rst b/doc/source/dev_guide/remote_file_support.rst
new file mode 100644
index 0000000000..fa1b4ae125
--- /dev/null
+++ b/doc/source/dev_guide/remote_file_support.rst
@@ -0,0 +1,50 @@
+======================================
+Adding remote file support to a reader
+======================================
+
+.. warning::
+ This feature is currently very new and might improve and change
+ in the future.
+
+As of Satpy version 0.25.1 the possibility to search for files on remote
+file systems (see :ref:`search_for_files`) as well as the possibility
+for supported readers to read from remote filesystems has been added.
+
+To add this feature to a reader the call to :func:`xarray.open_dataset`
+has to be replaced by the function :func:`~satpy.readers.file_handlers.open_dataset`
+included in Satpy which handles passing on the filename to be opened regardless
+if it is a local file path or a :class:`~satpy.readers.FSFile` object which can wrap
+:func:`fsspec.open` objects.
+
+To be able to cache the ``open_dataset`` call which is favourable for remote files
+it should be separated from the ``get_dataset`` method which needs to be implemented
+in every reader. This could look like:
+
+.. code-block:: python
+
+ from satpy import CHUNK_SIZE
+ from satpy._compat importe cached_property
+ from satpy.readers.file_handlers import BaseFileHandler, open_dataset
+
+ class Reader(BaseFileHandler):
+
+ def __init__(self, filename, filename_info, filetype_info):
+ super(Reader).__init__(filename, filename_info, filetype_info):
+
+ @cached_property
+ def nc(self):
+ return open_dataset(self.filename, chunks=CHUNK_SIZE)
+
+ def get_dataset(self):
+ # Access the opened dataset
+ data = self.nc["key"]
+
+
+Any parameters allowed for :func:`xarray.open_dataset` can be passed as
+keywords to :func:`~satpy.readers.file_handlers.open_dataset` if needed.
+
+.. note::
+ It is important to know that for remote files xarray might use a different
+ backend to open the file than for local files (e.g. h5netcdf instead of netcdf4),
+ which might result in some attributes being returned as arrays instead of scalars.
+ This has to be accounted for when accessing attributes in the reader.
diff --git a/doc/source/dev_guide/xarray_migration.rst b/doc/source/dev_guide/xarray_migration.rst
index 065939306c..5a42fabf16 100644
--- a/doc/source/dev_guide/xarray_migration.rst
+++ b/doc/source/dev_guide/xarray_migration.rst
@@ -313,7 +313,6 @@ Helpful functions
- :func:`~dask.array.store`
- :func:`~dask.array.tokenize`
- :func:`~dask.compute`
-- :doc:`delayed`
+- :doc:`dask:delayed`
- :func:`~dask.array.rechunk`
- :attr:`~dask.array.Array.vindex`
-
diff --git a/doc/source/doi_role.py b/doc/source/doi_role.py
index 4b7386f580..b7c64a14ac 100644
--- a/doc/source/doi_role.py
+++ b/doc/source/doi_role.py
@@ -1,22 +1,21 @@
# -*- coding: utf-8 -*-
-"""
- doilinks
- ~~~~~~~~~~~~~~~~~~~
- Extension to add links to DOIs. With this extension you can use e.g.
- :doi:`10.1016/S0022-2836(05)80360-2` in your documents. This will
- create a link to a DOI resolver
- (``https://doi.org/10.1016/S0022-2836(05)80360-2``).
- The link caption will be the raw DOI.
- You can also give an explicit caption, e.g.
- :doi:`Basic local alignment search tool <10.1016/S0022-2836(05)80360-2>`.
-
- :copyright: Copyright 2015 Jon Lund Steffensen. Based on extlinks by
- the Sphinx team.
- :license: BSD.
+"""Create sphinx roles for referencing the DOI of a published paper.
+
+Extension to add links to DOIs. With this extension you can use e.g.
+:doi:`10.1016/S0022-2836(05)80360-2` in your documents. This will
+create a link to a DOI resolver
+(``https://doi.org/10.1016/S0022-2836(05)80360-2``).
+The link caption will be the raw DOI.
+You can also give an explicit caption, e.g.
+:doi:`Basic local alignment search tool <10.1016/S0022-2836(05)80360-2>`.
+
+:copyright: Copyright 2015 Jon Lund Steffensen. Based on extlinks by
+ the Sphinx team.
+:license: BSD.
+
"""
from docutils import nodes, utils
-
from sphinx.util.nodes import split_explicit_title
@@ -49,10 +48,10 @@ def arxiv_role(typ, rawtext, text, lineno, inliner, options=None, content=None):
def setup_link_role(app):
- app.add_role('doi', doi_role)
- app.add_role('DOI', doi_role)
- app.add_role('arXiv', arxiv_role)
- app.add_role('arxiv', arxiv_role)
+ app.add_role('doi', doi_role, override=True)
+ app.add_role('DOI', doi_role, override=True)
+ app.add_role('arXiv', arxiv_role, override=True)
+ app.add_role('arxiv', arxiv_role, override=True)
def setup(app):
diff --git a/doc/source/enhancements.rst b/doc/source/enhancements.rst
index d8178e2d23..635f3919b6 100644
--- a/doc/source/enhancements.rst
+++ b/doc/source/enhancements.rst
@@ -27,7 +27,7 @@ on both ends of the scale, but these can be overridden with
method: !!python/name:satpy.enhancements.stretch
kwargs:
stretch: linear
- cutoffs: (0.003, 0.005)
+ cutoffs: [0.003, 0.005]
.. note::
@@ -93,6 +93,67 @@ lookup
colorize
--------
+
+The colorize enhancement can be used to map scaled/calibrated physical values
+to colors. One or several `standard Trollimage color maps`_ may be used as in
+the example here::
+
+ - name: colorize
+ method: !!python/name:satpy.enhancements.colorize
+ kwargs:
+ palettes:
+ - {colors: spectral, min_value: 193.15, max_value: 253.149999}
+ - {colors: greys, min_value: 253.15, max_value: 303.15}
+
+It is also possible to provide your own custom defined color mapping by
+specifying a list of RGB values and the corresponding min and max values
+between which to apply the colors. This is for instance a common use case for
+Sea Surface Temperature (SST) imagery, as in this example with the EUMETSAT
+Ocean and Sea Ice SAF (OSISAF) GHRSST product::
+
+ - name: osisaf_sst
+ method: !!python/name:satpy.enhancements.colorize
+ kwargs:
+ palettes:
+ - colors: [
+ [255, 0, 255],
+ [195, 0, 129],
+ [129, 0, 47],
+ [195, 0, 0],
+ [255, 0, 0],
+ [236, 43, 0],
+ [217, 86, 0],
+ [200, 128, 0],
+ [211, 154, 13],
+ [222, 180, 26],
+ [233, 206, 39],
+ [244, 232, 52],
+ [255.99609375, 255.99609375, 63.22265625],
+ [203.125, 255.99609375, 52.734375],
+ [136.71875, 255.99609375, 27.34375],
+ [0, 255.99609375, 0],
+ [0, 207.47265625, 0],
+ [0, 158.94921875, 0],
+ [0, 110.42578125, 0],
+ [0, 82.8203125, 63.99609375],
+ [0, 55.21484375, 127.9921875],
+ [0, 27.609375, 191.98828125],
+ [0, 0, 255.99609375],
+ [100.390625, 100.390625, 255.99609375],
+ [150.5859375, 150.5859375, 255.99609375]]
+ min_value: 296.55
+ max_value: 273.55
+
+The RGB color values will be interpolated to give a smooth result. This is
+contrary to using the palettize enhancement.
+
+The above examples are just two different ways to apply colors to images with
+Satpy. There is a wealth of other options for how to declare a colormap, please
+see :func:`~satpy.enhancements.create_colormap` for more inspiration.
+
+.. _`standard Trollimage color maps`: https://trollimage.readthedocs.io/en/latest/colormap.html#default-colormaps
+
+
palettize
---------
diff --git a/doc/source/examples/fci_l1c_natural_color.rst b/doc/source/examples/fci_l1c_natural_color.rst
index f7e81ce8b3..016cb889b7 100644
--- a/doc/source/examples/fci_l1c_natural_color.rst
+++ b/doc/source/examples/fci_l1c_natural_color.rst
@@ -11,6 +11,22 @@ to generate a Natural Color RGB composite over the European area.
not work with the currently released version of Satpy. Additional updates
to this example will be coming soon.
+.. note::
+
+ For reading compressed data, a decompression library is
+ needed. Either install the FCIDECOMP library (see the `FCI L1 Product User
+ Guide `_, or the
+ ``hdf5plugin`` package with::
+
+ pip install hdf5plugin
+
+ or::
+
+ conda install hdf5plugin -c conda-forge
+
+ If you use ``hdf5plugin``, make sure to add the line ``import hdf5plugin``
+ at the top of your script.
+
.. code-block:: python
from satpy.scene import Scene
@@ -20,7 +36,7 @@ to generate a Natural Color RGB composite over the European area.
path_to_data = 'your/path/to/FCI/data/folder/'
# find files and assign the FCI reader
- files = find_files_and_readers(base_dir=path_to_data, reader='fci_l1c_fdhsi')
+ files = find_files_and_readers(base_dir=path_to_data, reader='fci_l1c_nc')
# create an FCI scene from the selected files
scn = Scene(filenames=files)
@@ -32,7 +48,12 @@ to generate a Natural Color RGB composite over the European area.
print(scn.available_composite_names())
# load the datasets/composites of interest
- scn.load(['natural_color','vis_04'])
+ scn.load(['natural_color','vis_04'], upper_right_corner='NE')
+ # note: the data inside the FCI files is stored upside down. The upper_right_corner='NE' argument
+ # flips it automatically in upright position.
+
+ # you can access the values of a dataset as a Numpy array with
+ vis_04_values = scn['vis_04'].values
# resample the scene to a specified area (e.g. "eurol1" for Europe in 1km resolution)
scn_resampled = scn.resample("eurol", resampler='nearest', radius_of_influence=5000)
diff --git a/doc/source/faq.rst b/doc/source/faq.rst
index ea1b161272..2544132d13 100644
--- a/doc/source/faq.rst
+++ b/doc/source/faq.rst
@@ -27,8 +27,7 @@ workers by doing the following at the **top** of your python code:
.. code-block:: python
import dask
- from multiprocessing.pool import ThreadPool
- dask.config.set(pool=ThreadPool(8))
+ dask.config.set(num_workers=8)
# all other Satpy imports and code
This will limit dask to using 8 workers. Typically numbers between 4 and 8
@@ -130,13 +129,13 @@ control the number of threads used during compression by specifying the
to set this to at least the same number of dask workers you use. Do this by
adding ``num_threads`` to your `save_dataset` or `save_datasets` call::
- scn.save_datasets(base_dir='/tmp', tiled=True, num_threads=8)
+ scn.save_datasets(base_dir='/tmp', num_threads=8)
-Here we're also using the `tiled` option to store our data as "tiles" instead
+Satpy also stores our data as "tiles" instead
of "stripes" which is another way to get more efficient compression of our
-GeoTIFF image.
+GeoTIFF image. You can disable this with ``tiled=False``.
See the
`GDAL GeoTIFF documentation `_
for more information on the creation options available including other
-compression choices.
\ No newline at end of file
+compression choices.
diff --git a/doc/source/index.rst b/doc/source/index.rst
index 1dac9bb4b5..713653732d 100644
--- a/doc/source/index.rst
+++ b/doc/source/index.rst
@@ -3,7 +3,7 @@ Satpy's Documentation
=====================
Satpy is a python library for reading, manipulating, and writing data from
-remote-sensing earth-observing meteorological satellite instruments. Satpy
+remote-sensing earth-observing satellite instruments. Satpy
provides users with readers that convert geophysical parameters from various
file formats to the common Xarray :class:`~xarray.DataArray` and
:class:`~xarray.Dataset` classes for easier interoperability with other
@@ -20,17 +20,17 @@ files. Satpy also allows users to resample data to geographic projected grids
The Satpy library acts as a high-level abstraction layer on top of other
libraries maintained by the Pytroll group including:
-- `Pyresample `_
-- `PySpectral `_
-- `Trollimage `_
-- `Pycoast `_
-- `Pydecorate `_
+- `pyresample `_
+- `pyspectral `_
+- `trollimage `_
+- `pycoast `_
+- `pydecorate `_
- `python-geotiepoints `_
- `pyninjotiff `_
Go to the Satpy project_ page for source code and downloads.
-Satpy is designed to be easily extendable to support any meteorological
+Satpy is designed to be easily extendable to support any earth observation
satellite by the creation of plugins (readers, compositors, writers, etc).
The table at the bottom of this page shows the input formats supported by
the base Satpy installation.
@@ -56,6 +56,7 @@ the base Satpy installation.
examples/index
quickstart
readers
+ remote_reading
composites
resample
enhancements
@@ -68,216 +69,13 @@ the base Satpy installation.
Satpy API
faq
+ Release Notes
+ Security Policy
.. _reader_table:
-.. list-table:: Satpy Readers
- :header-rows: 1
- :widths: 45 25 30
+.. include:: reader_table.rst
- * - Description
- - Reader name
- - Status
- * - MSG (Meteosat 8 to 11) SEVIRI data in HRIT format
- - `seviri_l1b_hrit`
- - Nominal
- * - MSG (Meteosat 8 to 11) SEVIRI data in native format
- - `seviri_l1b_native`
- - Nominal.
- * - MSG (Meteosat 8 to 11) SEVIRI data in netCDF format
- - `seviri_l1b_nc`
- - | HRV channel not supported, incomplete metadata
- | in the files. EUMETSAT has been notified.
- * - MSG (Meteosat 8 to 11) L2 products in BUFR format
- - `seviri_l2_bufr`
- - AMV BUFR products not supported yet.
- * - MSG (Meteosat 8 to 11) L2 products in GRIB2 format
- - `seviri_l2_grib`
- - In development, CLM, OCA and FIR products supported
- * - MFG (Meteosat 2 to 7) MVIRI data in netCDF format (FIDUCEO FCDR)
- - `mviri_l1b_fiduceo_nc`
- - Beta
- * - Himawari 8 and 9 AHI data in HSD format
- - `ahi_hsd`
- - Nominal
- * - Himawari 8 and 9 AHI data in HRIT format
- - `ahi_hrit`
- - Nominal
- * - Himawari 8 and 9 AHI data in Gridded binary format,
- from http://www.cr.chiba-u.jp/databases/GEO/H8_9/FD/index_jp.html
- - `ahi_l1b_gridded_bin`
- - Nominal
- * - MTSAT-1R JAMI data in JMA HRIT format
- - `jami_hrit`
- - Beta
- * - MTSAT-2 Imager data in JMA HRIT format
- - `mtsat2-imager_hrit`
- - Beta
- * - GOES-R imager data in netcdf format
- - `abi_l1b`
- - Nominal
- * - NOAA GOES-R ABI L2+ products in netcdf format
- - `abi_l2_nc`
- - Beta
- * - GOES 11 to 15 imager data in HRIT format
- - `goes-imager_hrit`
- - Nominal
- * - GOES 8 to 15 imager data in netCDF format (from NOAA CLASS)
- - `goes-imager_nc`
- - Beta
- * - Electro-L N2 MSU-GS data in HRIT format
- - `electrol_hrit`
- - Nominal
- * - NOAA 15 to 19, Metop A to C AVHRR data in AAPP format
- - `avhrr_l1b_aapp`
- - Nominal
- * - Metop A to C AVHRR in native level 1 format
- - `avhrr_l1b_eps`
- - Nominal
- * - Tiros-N, NOAA 7 to 19 AVHRR data in GAC and LAC format
- - `avhrr_l1b_gaclac`
- - Nominal
- * - NOAA 15 to 19 AVHRR data in raw HRPT format
- - `avhrr_l1b_hrpt`
- - In development
- * - GCOM-W1 AMSR2 data in HDF5 format
- - `amsr2_l1b`
- - Nominal
- * - MTG FCI Level 1C data for Full Disk High Spectral Imagery (FDHSI) in netcdf format
- - `fci_l1c_fdhsi`
- - In development
- * - Callipso Caliop Level 2 Cloud Layer data (v3) in EOS-hdf4 format
- - `caliop_l2_cloud`
- - In development
- * - Terra and Aqua MODIS data in EOS-hdf4 level-1 format as produced by IMAPP and IPOPP or downloaded from LAADS
- - `modis_l1b`
- - Nominal
- * - NWCSAF GEO 2016 products in netCDF4 format (limited to SEVIRI)
- - `nwcsaf-geo`
- - In development
- * - NWCSAF PPS 2014, 2018 products in netCDF4 format
- - `nwcsaf-pps_nc`
- - | Not yet support for remapped netCDF products.
- | Only the standard swath based output is supported.
- | CPP products not supported yet
- * - Sentinel-1 A and B SAR-C data in SAFE format
- - `sar-c_safe`
- - Nominal
- * - Sentinel-2 A and B MSI data in SAFE format
- - `msi_safe`
- - Nominal
- * - Sentinel-3 A and B OLCI Level 1B data in netCDF4 format
- - `olci_l1b`
- - Nominal
- * - Sentinel-3 A and B OLCI Level 2 data in netCDF4 format
- - `olci_l2`
- - Nominal
- * - Sentinel-3 A and B SLSTR data in netCDF4 format
- - `slstr_l1b`
- - In development
- * - OSISAF SST data in GHRSST (netcdf) format
- - `ghrsst_l3c_sst`
- - In development
- * - NUCAPS EDR Retrieval in NetCDF4 format
- - `nucaps`
- - Nominal
- * - NOAA Level 2 ACSPO SST data in netCDF4 format
- - `acspo`
- - Nominal
- * - GEOstationary Cloud Algorithm Test-bed (GEOCAT)
- - `geocat`
- - Nominal
- * - The Clouds from AVHRR Extended (CLAVR-x)
- - `clavrx`
- - Nominal
- * - SNPP VIIRS data in HDF5 SDR format
- - `viirs_sdr`
- - Nominal
- * - SNPP VIIRS data in netCDF4 L1B format
- - `viirs_l1b`
- - Nominal
- * - SNPP VIIRS SDR data in HDF5 Compact format
- - `viirs_compact`
- - Nominal
- * - AAPP MAIA VIIRS and AVHRR products in hdf5 format
- - `maia`
- - Nominal
- * - VIIRS EDR Active Fires data in NetCDF4 & CSV .txt format
- - `viirs_edr_active_fires`
- - Beta
- * - VIIRS EDR Flood data in hdf4 format
- - `viirs_edr_flood`
- - Beta
- * - GRIB2 format
- - `grib`
- - Beta
- * - SCMI ABI L1B format
- - `abi_l1b_scmi`
- - Beta
- * - VIRR data in HDF5 format
- - `virr_l1b`
- - Beta
- * - MERSI-2 L1B data in HDF5 format
- - `mersi2_l1b`
- - Beta
- * - FY-4A AGRI L1 data in HDF5 format
- - `agri_l1`
- - Beta
- * - Vaisala Global Lightning Dataset GLD360 data in ASCII format
- - `vaisala_gld360`
- - Beta
- * - TROPOMI L2 data in NetCDF4 format
- - `tropomi_l2`
- - Beta
- * - Hydrology SAF products in GRIB format
- - `hsaf_grib`
- - | Beta
- | Only the h03, h03b, h05 and h05B products are supported at-present
- * - GEO-KOMPSAT-2 AMI L1B data in NetCDF4 format
- - `ami_l1b`
- - Beta
- * - GOES-R GLM Grided Level 2 in NetCDF4 format
- - `glm_l2`
- - Beta
- * - Sentinel-3 SLSTR SST data in NetCDF4 format
- - `slstr_l2`
- - Beta
- * - IASI level 2 SO2 in BUFR format
- - `iasi_l2_so2_bufr`
- - Beta
- * - HY-2B Scatterometer level 2b data in HDF5 format
- - `hy2_scat_l2b_h5`
- - Beta
- * - OMPS EDR data in HDF5 format
- - `omps_edr`
- - Beta
- * - VII Level 2 in NetCDF4 format
- - `vii_l2_nc`
- - Beta
- * - VII Level 1b in NetCDF4 format
- - `vii_l1b_nc`
- - Beta
- * - MTG FCI Level 2 in NetCDF4 format
- - `fci_l2_nc`
- - Beta
- * - SMOS level 2 wind data in NetCDF4 format
- - `smos_l2_wind`
- - Beta
- * - AMSR2 level 2 wind data in HDF5 format
- - `amsr2_l2`
- - Beta
- * - GPM IMERG level 3 precipitation data in HDF5 format
- - `gpm_imerg`
- - Nominal
- * - AMSR2 level 2 GAASP in NetCDF4 format
- - `amsr2_l2_gaasp`
- - Beta
- * - MiRS level 2 Precipitation and Surface Products (IMG) in NetCDF4 format
- - `mirs`
- - Beta
- * - MIMIC Total Precipitable Water Product Reader in NetCDF format
- - mimicTPW2_comp
- - Beta
Indices and tables
==================
diff --git a/doc/source/install.rst b/doc/source/install.rst
index 4c0a5f3ff2..3c3ba26a41 100644
--- a/doc/source/install.rst
+++ b/doc/source/install.rst
@@ -2,6 +2,75 @@
Installation Instructions
=========================
+Satpy is available from conda-forge (via conda), PyPI (via pip), or from
+source (via pip+git). The below instructions show how to install stable
+versions of Satpy. For a development/unstable version see :ref:`devinstall`.
+
+Conda-based Installation
+========================
+
+Satpy can be installed into a conda environment by installing the package
+from the conda-forge channel. If you do not already have access to a conda
+installation, we recommend installing
+`miniconda `_ for the smallest
+and easiest installation.
+
+The commands below will use ``-c conda-forge`` to make sure packages are
+downloaded from the conda-forge channel. Alternatively, you can tell conda
+to always use conda-forge by running:
+
+.. code-block:: bash
+
+ $ conda config --add channels conda-forge
+
+In a new conda environment
+--------------------------
+
+We recommend creating a separate environment for your work with Satpy. To
+create a new environment and install Satpy all in one command you can
+run:
+
+
+.. code-block:: bash
+
+ $ conda create -c conda-forge -n my_satpy_env python satpy
+
+You must then activate the environment so any future python or
+conda commands will use this environment.
+
+.. code-block::
+
+ $ conda activate my_satpy_env
+
+This method of creating an environment with Satpy (and optionally other
+packages) installed can generally be created faster than creating an
+environment and then later installing Satpy and other packages (see the
+section below).
+
+In an existing environment
+--------------------------
+
+.. note::
+
+ It is recommended that when first exploring Satpy, you create a new
+ environment specifically for this rather than modifying one used for
+ other work.
+
+If you already have a conda environment, it is activated, and would like to
+install Satpy into it, run the following:
+
+.. code-block:: bash
+
+ $ conda install -c conda-forge satpy
+
+.. note::
+
+ Satpy only automatically installs the dependencies needed to process the
+ most common use cases. Additional dependencies may need to be installed
+ with conda or pip if import errors are encountered. To check your
+ installation use the ``check_satpy`` function discussed
+ :ref:`here `.
+
Pip-based Installation
======================
@@ -31,42 +100,6 @@ dependencies:
$ pip install "satpy[all]"
-Conda-based Installation
-========================
-
-Satpy is available from the conda-forge channel. If
-you have not configured your conda environment to search conda-forge already
-then do:
-
-.. code-block:: bash
-
- $ conda config --add channels conda-forge
-
-We recommend creating a separate environment for your work with Satpy. If
-you haven't created and activated one already, you can by running:
-
-.. code-block:: bash
-
- $ conda create -n my_satpy_env python
- $ conda activate my_satpy_env
-
-The above will create a new environment with the latest version of Python
-installed along with Satpy and all of its dependencies. The second command
-will activate the environment so all future conda or python commands will
-use this new environment.
-
-Next to install Satpy into an existing activated environment run:
-
-.. code-block:: bash
-
- $ conda install satpy
-
-.. note::
-
- Satpy only automatically installs the dependencies needed to process the
- most common use cases. Additional dependencies may need to be installed
- with conda or pip if import errors are encountered.
-
Ubuntu System Python Installation
=================================
@@ -85,5 +118,3 @@ created.
$ virtualenv /path/to/pytroll-env
$ source /path/to/pytroll-env/bin/activate
$ pip install satpy
-
-
diff --git a/doc/source/modifiers.rst b/doc/source/modifiers.rst
new file mode 100644
index 0000000000..4869fe9091
--- /dev/null
+++ b/doc/source/modifiers.rst
@@ -0,0 +1,138 @@
+Modifiers
+=========
+
+Modifiers are filters applied to datasets prior to computing composites.
+They take at least one input (a dataset) and have exactly one output
+(the same dataset, modified). They can take additional input datasets
+or parameters.
+
+Modifiers are defined in composites files in ``etc/composites`` within
+``$SATPY_CONFIG_PATH``.
+
+The instruction to use a certain modifier can be contained in a composite
+definition or in a reader definition. If it is defined in a composite
+definition, it is applied upon constructing the composite.
+
+When using built-in composites, Satpy users do not need to understand
+the mechanics of modifiers, as they are applied automatically.
+The :doc:`composites` documentation contains information on how to apply
+modifiers when creating new composites.
+
+Some readers read data where certain modifiers are already applied. Here,
+the reader definition will refer to the Satpy modifier. This marking
+adds the modifier to the metadata to prevent it from being applied again
+upon composite calculation.
+
+Commonly used modifiers are listed in the table below. Further details
+on those modifiers can be found in the linked API documentation.
+
+.. list-table:: Commonly used modifiers
+ :header-rows: 1
+
+ * - Label
+ - Class
+ - Description
+ * - ``sunz_corrected``
+ - :class:`~satpy.modifiers.geometry.SunZenithCorrector`
+ - Modifies solar channels for the solar zenith angle to provide
+ smoother images.
+ * - ``effective_solar_pathlength_corrected``
+ - :class:`~satpy.modifiers.geometry.EffectiveSolarPathLengthCorrector`
+ - Modifies solar channels for atmospheric path length of solar radiation.
+ * - ``nir_reflectance``
+ - :class:`~satpy.modifiers.spectral.NIRReflectance`
+ - Calculates reflective part of channels at the edge of solar and
+ terrestrial radiation (3.7 µm or 3.9 µm).
+ * - ``nir_emissive``
+ - :class:`~satpy.modifiers.spectral.NIREmissivePartFromReflectance`
+ - Calculates emissive part of channels at the edge of solar and terrestrial
+ radiation (3.7 µm or 3.9 µm)
+ * - ``rayleigh_corrected``
+ - :class:`~satpy.modifiers.atmosphere.PSPRayleighReflectance`
+ - Modifies solar channels to filter out the visual impact of rayleigh
+ scattering.
+
+A complete list can be found in the `etc/composites
+`_
+source code and in the :mod:`~satpy.modifiers` module documentation.
+
+Parallax correction
+-------------------
+
+.. warning::
+
+ The Satpy parallax correction is experimental and subject to change.
+
+Since version 0.37 (mid 2022), Satpy has included a
+modifier for parallax correction, implemented in the
+:class:`~satpy.modifiers.parallax.ParallaxCorrectionModifier` class.
+This modifier is important for some applications, but not applied
+by default to any Satpy datasets or composites, because it can be
+applied to any input dataset and used with any source of (cloud top)
+height. Therefore, users wishing to apply the parallax correction
+semi-automagically have to define their own modifier and then apply
+that modifier for their datasets. An example is included
+with the :class:`~satpy.modifiers.parallax.ParallaxCorrectionModifier`
+API documentation. Note that Satpy cannot apply modifiers to
+composites, so users wishing to apply parallax correction to a composite
+will have to use a lower level API or duplicate an existing composite
+recipe to use modified inputs.
+
+The parallax correction is directly calculated from the cloud top height.
+Information on satellite position is obtained from cloud top height
+metadata. If no orbital parameters are present in the cloud top height
+metadata, Satpy will attempt to calculate orbital parameters from the
+platform name and start time. The backup calculation requires skyfield
+and astropy to be installed. If the metadata include neither orbital
+parameters nor platform name and start time, parallax calculation will
+fail. Because the cloud top height metadata are used, it is essential
+that the cloud top height data are derived from the same platform as
+the measurements to be corrected are taken by.
+
+The parallax error moves clouds away from the observer. Therefore, the
+parallax correction shifts clouds in the direction of the observer. The
+space left behind by the cloud will be filled with fill values. As the
+cloud is shifted toward the observer, it may occupy less pixels than before,
+because pixels closer to the observer have a smaller surface area. It can
+also be deformed (a "rectangular" cloud may get the shape of a parallelogram).
+
+.. figure:: https://figshare.com/ndownloader/files/36422616/preview/36422616/preview.jpg
+ :width: 512
+ :height: 512
+ :alt: Satellite image without parallax correction.
+
+ SEVIRI view of southern Sweden, 2021-11-30 12:15Z, without parallax correction.
+ This is the ``natural_color`` composite as built into Satpy.
+
+
+.. figure:: https://figshare.com/ndownloader/files/36422613/preview/36422613/preview.jpg
+ :width: 512
+ :height: 512
+ :alt: Satellite image with parallax correction.
+
+ The same satellite view with parallax correction. The most obvious change
+ are the gaps left behind by the parallax correction, shown as black pixels.
+ Otherwise it shows that clouds have "moved" south-south-west in the direction
+ of the satellite. To view the images side-by-side or alternating, look at
+ `the figshare page `_
+
+The utility function :func:`~satpy.modifiers.parallax.get_surface_parallax_displacement` allows to calculate the magnitude of the parallax error. For a cloud with a cloud top height of 10 km:
+
+.. figure:: https://figshare.com/ndownloader/files/36462435/preview/36462435/preview.jpg
+ :width: 512
+ :height: 512
+ :alt: Figure showing magnitude of parallax effect.
+
+ Magnitude of the parallax error for a fictitious cloud with a cloud top
+ height of 10 km for the GOES-East (GOES-16) full disc.
+
+The parallax correction is currently experimental and subject to change.
+Although it is covered by tests, there may be cases that yield unexpected
+or incorrect results. It does not yet perform any checks that the
+provided (cloud top) height covers the area of the dataset for which
+the parallax correction shall be applied.
+
+For more general background information and web routines related to the
+parallax effect, see also `this collection at the CIMSS website _`.
+
+.. versionadded:: 0.37
diff --git a/doc/source/multiscene.rst b/doc/source/multiscene.rst
index 0de8cf1cea..8a7be6b8aa 100644
--- a/doc/source/multiscene.rst
+++ b/doc/source/multiscene.rst
@@ -174,7 +174,7 @@ This will compute one video frame (image) at a time and write it to the MPEG-4
video file. For users with more powerful systems it is possible to use
the ``client`` and ``batch_size`` keyword arguments to compute multiple frames
in parallel using the dask ``distributed`` library (if installed).
-See the :doc:`dask distributed ` documentation
+See the :doc:`dask distributed ` documentation
for information on creating a ``Client`` object. If working on a cluster
you may want to use :doc:`dask jobqueue ` to take advantage
of multiple nodes at a time.
diff --git a/doc/source/quickstart.rst b/doc/source/quickstart.rst
index afe46ac3e0..9f9885a750 100644
--- a/doc/source/quickstart.rst
+++ b/doc/source/quickstart.rst
@@ -26,7 +26,7 @@ To load data from the files use the :meth:`Scene.load `
method. Printing the Scene object will list each of the
:class:`xarray.DataArray` objects currently loaded:
- >>> global_scene.load([0.6, 0.8, 10.8])
+ >>> global_scene.load(['0.8', '1.6', '10.8'])
>>> print(global_scene)
dask.array
@@ -34,14 +34,12 @@ method. Printing the Scene object will list each of the
* x (x) float64 5.567e+06 5.564e+06 5.561e+06 5.558e+06 5.555e+06 ...
* y (y) float64 -5.567e+06 -5.564e+06 -5.561e+06 -5.558e+06 ...
Attributes:
- satellite_longitude: 0.0
+ orbital_parameters: {'projection_longitude': 0.0, 'pr...
sensor: seviri
- satellite_altitude: 35785831.0
platform_name: Meteosat-11
standard_name: brightness_temperature
units: K
wavelength: (9.8, 10.8, 11.8)
- satellite_latitude: 0.0
start_time: 2018-02-28 15:00:10.814000
end_time: 2018-02-28 15:12:43.956000
area: Area ID: some_area_name\nDescription: On-the-fly ar...
@@ -58,14 +56,12 @@ method. Printing the Scene object will list each of the
* x (x) float64 5.567e+06 5.564e+06 5.561e+06 5.558e+06 5.555e+06 ...
* y (y) float64 -5.567e+06 -5.564e+06 -5.561e+06 -5.558e+06 ...
Attributes:
- satellite_longitude: 0.0
+ orbital_parameters: {'projection_longitude': 0.0, 'pr...
sensor: seviri
- satellite_altitude: 35785831.0
platform_name: Meteosat-11
standard_name: toa_bidirectional_reflectance
units: %
wavelength: (0.74, 0.81, 0.88)
- satellite_latitude: 0.0
start_time: 2018-02-28 15:00:10.814000
end_time: 2018-02-28 15:12:43.956000
area: Area ID: some_area_name\nDescription: On-the-fly ar...
@@ -82,14 +78,12 @@ method. Printing the Scene object will list each of the
* x (x) float64 5.567e+06 5.564e+06 5.561e+06 5.558e+06 5.555e+06 ...
* y (y) float64 -5.567e+06 -5.564e+06 -5.561e+06 -5.558e+06 ...
Attributes:
- satellite_longitude: 0.0
+ orbital_parameters: {'projection_longitude': 0.0, 'pr...
sensor: seviri
- satellite_altitude: 35785831.0
platform_name: Meteosat-11
standard_name: toa_bidirectional_reflectance
units: %
- wavelength: (0.56, 0.635, 0.71)
- satellite_latitude: 0.0
+ wavelength: (1.5, 1.64, 1.78)
start_time: 2018-02-28 15:00:10.814000
end_time: 2018-02-28 15:12:43.956000
area: Area ID: some_area_name\nDescription: On-the-fly ar...
@@ -103,7 +97,7 @@ method. Printing the Scene object will list each of the
Satpy allows loading file data by wavelengths in micrometers (shown above) or by channel name::
- >>> global_scene.load(["VIS006", "VIS008", "IR_108"])
+ >>> global_scene.load(["VIS008", "IR_016", "IR_108"])
To have a look at the available channels for loading from your :class:`~satpy.scene.Scene` object use the
:meth:`~satpy.scene.Scene.available_dataset_names` method:
@@ -125,7 +119,7 @@ To have a look at the available channels for loading from your :class:`~satpy.sc
To access the loaded data use the wavelength or name:
- >>> print(global_scene[0.6])
+ >>> print(global_scene[0.8])
For more information on loading datasets by resolution, calibration, or other
advanced loading methods see the :doc:`readers` documentation.
@@ -136,44 +130,45 @@ Calculating measurement values and navigation coordinates
Once loaded, measurement values can be calculated from a DataArray within a scene, using .values to get a fully calculated numpy array:
- >>> vis006 = global_scene["VIS006"]
- >>> vis006_meas = vis006.values
+ >>> vis008 = global_scene["VIS008"]
+ >>> vis008_meas = vis008.values
Note that for very large images, such as half-kilometer geostationary imagery, calculated measurement arrays may require multiple gigabytes of memory; using deferred computation and/or subsetting of datasets may be preferred in such cases.
The 'area' attribute of the DataArray, if present, can be converted to latitude and longitude arrays. For some instruments (typically polar-orbiters), the get_lonlats() may result in arrays needing an additional .compute() or .values extraction.
- >>> vis006_lon, vis006_lat = vis006.attrs['area'].get_lonlats()
+ >>> vis008_lon, vis008_lat = vis008.attrs['area'].get_lonlats()
-Visualizing data
-================
+Visualizing data
+================
+
+To visualize loaded data in a pop-up window:
+
+ >>> global_scene.show(0.8)
-To visualize loaded data in a pop-up window:
-
- >>> global_scene.show(0.6)
-
Alternatively if working in a Jupyter notebook the scene can be converted to
-a `geoviews `_ object using the
+a `geoviews `_ object using the
:meth:`~satpy.scene.Scene.to_geoviews` method. The geoviews package is not a
requirement of the base satpy install so in order to use this feature the user
needs to install the geoviews package himself.
-
- >>> import holoviews as hv
- >>> import geoviews as gv
- >>> import geoviews.feature as gf
- >>> gv.extension("bokeh", "matplotlib")
- >>> %opts QuadMesh Image [width=600 height=400 colorbar=True] Feature [apply_ranges=False]
- >>> %opts Image QuadMesh (cmap='RdBu_r')
+
+ >>> import holoviews as hv
+ >>> import geoviews as gv
+ >>> import geoviews.feature as gf
+ >>> gv.extension("bokeh", "matplotlib")
+ >>> %opts QuadMesh Image [width=600 height=400 colorbar=True] Feature [apply_ranges=False]
+ >>> %opts Image QuadMesh (cmap='RdBu_r')
>>> gview = global_scene.to_geoviews(vdims=[0.6])
- >>> gview[::5,::5] * gf.coastline * gf.borders
-
-Creating new datasets
-=====================
+ >>> gview[::5,::5] * gf.coastline * gf.borders
+
+Creating new datasets
+=====================
Calculations based on loaded datasets/channels can easily be assigned to a new dataset:
- >>> global_scene["ndvi"] = (global_scene[0.8] - global_scene[0.6]) / (global_scene[0.8] + global_scene[0.6])
+ >>> global_scene.load(['VIS006', 'VIS008'])
+ >>> global_scene["ndvi"] = (global_scene['VIS008'] - global_scene['VIS006']) / (global_scene['VIS008'] + global_scene['VIS006'])
>>> global_scene.show("ndvi")
When doing calculations Xarray, by default, will drop all attributes so attributes need to be
@@ -181,9 +176,9 @@ copied over by hand. The :func:`~satpy.dataset.combine_metadata` function can as
Assigning additional custom metadata is also possible.
>>> from satpy.dataset import combine_metadata
- >>> scene['new_band'] = scene[0.8] / scene[0.6]
- >>> scene['new_band'].attrs = combine_metadata(scene[0.8], scene[0.6])
- >>> scene['new_band'].attrs['some_other_key'] = 'whatever_value_you_want'
+ >>> scene['new_band'] = scene['VIS008'] / scene['VIS006']
+ >>> scene['new_band'].attrs = combine_metadata(scene['VIS008'], scene['VIS006'])
+ >>> scene['new_band'].attrs['some_other_key'] = 'whatever_value_you_want'
Generating composites
=====================
@@ -281,6 +276,7 @@ To subset multi-resolution data consistently, use the :meth:`~satpy.scene.Scene.
>>> vis006_llbox_lon, vis006_llbox_lat = vis006_llbox.attrs['area'].get_lonlats()
+.. _troubleshooting:
Troubleshooting
===============
diff --git a/doc/source/reader_table.py b/doc/source/reader_table.py
new file mode 100644
index 0000000000..1c6760a390
--- /dev/null
+++ b/doc/source/reader_table.py
@@ -0,0 +1,82 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2022 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# satpy is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with satpy. If not, see .
+"""Module for autogenerating reader table from config files."""
+
+from yaml import BaseLoader
+
+from satpy.readers import available_readers
+
+
+def rst_table_row(columns=None):
+ """Create one row for a rst table.
+
+ Args:
+ columns (list[str]): Content of each column.
+ Returns:
+ str
+ """
+ row = " * - {}\n".format(columns[0])
+ columns = [" - {}\n".format(col) for col in columns[1:]]
+ row = row + "".join(columns)
+
+ return row
+
+
+def rst_table_header(name=None, header=None, header_rows=1, widths="auto"):
+ """Create header for rst table.
+
+ Args:
+ name (str): Name of the table
+ header (list[str]): Column names
+ header-rows (int): Number of header rows
+ width (optional[list[int]]): Width of each column as a list. If not specified
+ defaults to auto and will therefore determined by the backend
+ (see )
+ Returns:
+ str
+ """
+ if isinstance(widths, list):
+ widths = " ".join([str(w) for w in widths])
+
+ header = rst_table_row(header)
+
+ table_header = (f".. list-table:: {name}\n"
+ f" :header-rows: {header_rows}\n"
+ f" :widths: {widths}\n"
+ f" :class: datatable\n\n"
+ f"{header}")
+
+ return table_header
+
+
+def generate_reader_table():
+ """Create reader table from reader yaml config files.
+
+ Returns:
+ str
+ """
+ table = [rst_table_header("Satpy Readers", header=["Description", "Reader name", "Status", "fsspec support"],
+ widths=[45, 25, 30, 30])]
+
+ reader_configs = available_readers(as_dict=True, yaml_loader=BaseLoader)
+ for rc in reader_configs:
+ table.append(rst_table_row([rc.get("long_name", "").rstrip("\n"), rc.get("name", ""),
+ rc.get("status", ""), rc.get("supports_fsspec", "false")]))
+
+ return "".join(table)
diff --git a/doc/source/readers.rst b/doc/source/readers.rst
index f96a56e518..fa7cfecea1 100644
--- a/doc/source/readers.rst
+++ b/doc/source/readers.rst
@@ -15,9 +15,10 @@ requested, or added to a Scene object.
Available Readers
=================
-To get a list of available readers use the `available_readers` function. By default,
-it returns the names of available readers. To return additional reader information
-use `available_readers(as_dict=True)`::
+For readers currently available in Satpy see :ref:`reader_table`.
+Additionally to get a list of available readers you can use the `available_readers`
+function. By default, it returns the names of available readers.
+To return additional reader information use `available_readers(as_dict=True)`::
>>> from satpy import available_readers
>>> available_readers()
@@ -79,13 +80,13 @@ loading datasets::
>>> scn.load([0.6, 10.8], pad_data=False)
-For geostationary products, where the imagery is stored in the files in a flipped orientation
-(e.g. MSG SEVIRI L1.5 data which is flipped upside-down and left-right), the keyword argument
+For geostationary products, where the imagery is stored in the files in an unconventional orientation
+(e.g. MSG SEVIRI L1.5 data are stored with the southwest corner in the upper right), the keyword argument
``upper_right_corner`` can be passed into the load call to automatically flip the datasets to the
wished orientation. Accepted argument values are ``'NE'``, ``'NW'``, ``'SE'``, ``'SW'``,
and ``'native'``.
By default, no flipping is applied (corresponding to ``upper_right_corner='native'``) and
-the data is delivered in the original format. To get the data in the common upright orientation,
+the data are delivered in the original format. To get the data in the common upright orientation,
load the datasets using e.g.::
>>> scn.load(['VIS008'], upper_right_corner='NE')
@@ -108,8 +109,37 @@ names of Datasets::
>>> scn.available_dataset_names()
-Search for local files
-======================
+Load remote data
+================
+
+Starting with Satpy version 0.25.1 with supported readers it is possible to
+load data from remote file systems like ``s3fs`` or ``fsspec``.
+For example:
+
+::
+
+ >>> from satpy import Scene
+ >>> from satpy.readers import FSFile
+ >>> import fsspec
+
+ >>> filename = 'noaa-goes16/ABI-L1b-RadC/2019/001/17/*_G16_s20190011702186*'
+
+ >>> the_files = fsspec.open_files("simplecache::s3://" + filename, s3={'anon': True})
+
+ >>> fs_files = [FSFile(open_file) for open_file in the_files]
+
+ >>> scn = Scene(filenames=fs_files, reader='abi_l1b')
+ >>> scn.load(['true_color_raw'])
+
+Check the list of :ref:`reader_table` to see which reader supports remote
+files. For the usage of ``fsspec`` and advanced features like caching files
+locally see the `fsspec Documentation `_ .
+
+
+.. _search_for_files:
+
+Search for local/remote files
+=============================
Satpy provides a utility
:func:`~satpy.readers.find_files_and_readers` for searching for files in
@@ -129,56 +159,109 @@ the :class:`~satpy.scene.Scene` initialization.
>>> scn = Scene(filenames=my_files)
See the :func:`~satpy.readers.find_files_and_readers` documentation for
-more information on the possible parameters.
+more information on the possible parameters as well as for searching on
+remote file systems.
+
+.. _dataset_metadata:
Metadata
========
-.. _dataset_metadata:
-
The datasets held by a scene also provide vital metadata such as dataset name, units, observation
time etc. The following attributes are standardized across all readers:
* ``name``, and other identifying metadata keys: See :doc:`dev_guide/satpy_internals`.
* ``start_time``: Left boundary of the time interval covered by the dataset.
+ For more information see the :ref:`time_metadata` section below.
* ``end_time``: Right boundary of the time interval covered by the dataset.
+ For more information see the :ref:`time_metadata` section below.
* ``area``: :class:`~pyresample.geometry.AreaDefinition` or
:class:`~pyresample.geometry.SwathDefinition` if data is geolocated. Areas are used for gridded
projected data and Swaths when data must be described by individual longitude/latitude
coordinates. See the Coordinates section below.
+* ``reader``: The name of the Satpy reader that produced the dataset.
* ``orbital_parameters``: Dictionary of orbital parameters describing the satellite's position.
+ See the :ref:`orbital_parameters` section below for more information.
+* ``time_parameters``: Dictionary of additional time parameters describing the
+ time ranges related to the requests or schedules for when observations
+ should happen and when they actually do. See :ref:`time_metadata` below for
+ details.
+* ``raw_metadata``: Raw, unprocessed metadata from the reader.
- * For *geostationary* satellites it is described using the following scalar attributes:
-
- * ``satellite_actual_longitude/latitude/altitude``: Current position of the satellite at the
- time of observation in geodetic coordinates (i.e. altitude is relative and normal to the
- surface of the ellipsoid).
- * ``satellite_nominal_longitude/latitude/altitude``: Center of the station keeping box (a
- confined area in which the satellite is actively maintained in using maneuvres). Inbetween
- major maneuvres, when the satellite is permanently moved, the nominal position is constant.
- * ``nadir_longitude/latitude``: Intersection of the instrument's Nadir with the surface of the
- earth. May differ from the actual satellite position, if the instrument is pointing slightly
- off the axis (satellite, earth-center). If available, this should be used to compute viewing
- angles etc. Otherwise, use the actual satellite position.
- * ``projection_longitude/latitude/altitude``: Projection center of the re-projected data. This
- should be used to compute lat/lon coordinates. Note that the projection center can differ
- considerably from the actual satellite position. For example MSG-1 was at times positioned
- at 3.4 degrees west, while the image data was re-projected to 0 degrees.
- * [DEPRECATED] ``satellite_longitude/latitude/altitude``: Current position of the satellite at
- the time of observation in geodetic coordinates.
-
- .. note:: Longitudes and latitudes are given in degrees, altitude in meters. For use in
- pyorbital, the altitude has to be converted to kilometers, see for example
- :func:`pyorbital.orbital.get_observer_look`.
+Note that the above attributes are not necessarily available for each dataset.
- * For *polar orbiting* satellites the readers usually provide coordinates and viewing angles of
- the swath as ancillary datasets. Additional metadata related to the satellite position include:
+.. _time_metadata:
+
+Time Metadata
+-------------
+
+In addition to the generic ``start_time`` and ``end_time`` pieces of metadata
+there are other time fields that may be provided if the reader supports them.
+These items are stored in a ``time_parameters`` sub-dictionary and they include
+values like:
+
+* ``observation_start_time``: The point in time when a sensor began recording
+ for the current data.
+* ``observation_end_time``: Same as ``observation_start_time``, but when data
+ has stopped being recorded.
+* ``nominal_start_time``: The "human friendly" time describing the start of
+ the data observation interval or repeat cycle. This time is often on a round
+ minute (seconds=0). Along with the nominal end time, these times define the
+ regular interval of the data collection. For example, GOES-16 ABI full disk
+ images are collected every 10 minutes (in the common configuration) so
+ ``nominal_start_time`` and ``nominal_end_time`` would be 10 minutes apart
+ regardless of when the instrument recorded data inside that interval.
+ This time may also be referred to as the repeat cycle, repeat slot, or time
+ slot.
+* ``nominal_end_time``: Same as ``nominal_start_time``, but the end of the
+ interval.
+
+In general, ``start_time`` and ``end_time`` will be set to the "nominal"
+time by the reader. This ensures that other Satpy components get a
+consistent time for calculations (ex. generation of solar zenith angles)
+and can be reused between bands.
+
+See the :ref:`data_array_coordinates` section below for more information on
+time information that may show up as a per-element/row "coordinate" on the
+DataArray (ex. acquisition time) instead of as metadata.
+
+.. _orbital_parameters:
+
+Orbital Parameters
+------------------
+
+Orbital parameters describe the position of the satellite. As such they
+typically come in a few "flavors" for the common types of orbits a satellite
+may have.
+
+For *geostationary* satellites it is described using the following scalar attributes:
+
+ * ``satellite_actual_longitude/latitude/altitude``: Current position of the satellite at the
+ time of observation in geodetic coordinates (i.e. altitude is relative and normal to the
+ surface of the ellipsoid). The longitude and latitude are given in degrees, the altitude in meters.
+ * ``satellite_nominal_longitude/latitude/altitude``: Center of the station keeping box (a
+ confined area in which the satellite is actively maintained in using maneuvers). Inbetween
+ major maneuvers, when the satellite is permanently moved, the nominal position is constant.
+ The longitude and latitude are given in degrees, the altitude in meters.
+ * ``nadir_longitude/latitude``: Intersection of the instrument's Nadir with the surface of the
+ earth. May differ from the actual satellite position, if the instrument is pointing slightly
+ off the axis (satellite, earth-center). If available, this should be used to compute viewing
+ angles etc. Otherwise, use the actual satellite position. The values are given in degrees.
+ * ``projection_longitude/latitude/altitude``: Projection center of the re-projected data. This
+ should be used to compute lat/lon coordinates. Note that the projection center can differ
+ considerably from the actual satellite position. For example MSG-1 was at times positioned
+ at 3.4 degrees west, while the image data was re-projected to 0 degrees.
+ The longitude and latitude are given in degrees, the altitude in meters.
+
+ .. note:: For use in pyorbital, the altitude has to be converted to kilometers, see for example
+ :func:`pyorbital.orbital.get_observer_look`.
- * ``tle``: Two-Line Element (TLE) set used to compute the satellite's orbit
+For *polar orbiting* satellites the readers usually provide coordinates and viewing angles of
+the swath as ancillary datasets. Additional metadata related to the satellite position includes:
-* ``raw_metadata``: Raw, unprocessed metadata from the reader.
+ * ``tle``: Two-Line Element (TLE) set used to compute the satellite's orbit
-Note that the above attributes are not necessarily available for each dataset.
+.. _data_array_coordinates:
Coordinates
===========
@@ -276,3 +359,18 @@ satpy cf nc readers
.. automodule:: satpy.readers.satpy_cf_nc
:noindex:
+
+hdf5 based readers
+------------------
+
+.. automodule:: satpy.readers.agri_l1
+ :noindex:
+
+.. automodule:: satpy.readers.ghi_l1
+ :noindex:
+
+Arctica-M N1 HDF5 format reader
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. automodule:: satpy.readers.msu_gsa_l1b
+ :noindex:
diff --git a/doc/source/remote_reading.rst b/doc/source/remote_reading.rst
new file mode 100644
index 0000000000..da22c0be12
--- /dev/null
+++ b/doc/source/remote_reading.rst
@@ -0,0 +1,150 @@
+====================
+Reading remote files
+====================
+
+Using a single reader
+=====================
+
+Some of the readers in Satpy can read data directly over various transfer protocols. This is done
+using `fsspec `_ and various packages
+it is using underneath.
+
+As an example, reading ABI data from public AWS S3 storage can be done in the following way::
+
+ from satpy import Scene
+
+ storage_options = {'anon': True}
+ filenames = ['s3://noaa-goes16/ABI-L1b-RadC/2019/001/17/*_G16_s20190011702186*']
+ scn = Scene(reader='abi_l1b', filenames=filenames, reader_kwargs={'storage_options': storage_options})
+ scn.load(['true_color_raw'])
+
+Reading from S3 as above requires the `s3fs` library to be installed in addition to `fsspec`.
+
+As an alternative, the storage options can be given using
+`fsspec configuration `_.
+For the above example, the configuration could be saved to `s3.json` in the `fsspec` configuration directory
+(by default placed in `~/.config/fsspec/` directory in Linux)::
+
+ {
+ "s3": {
+ "anon": "true"
+ }
+ }
+
+.. note::
+
+ Options given in `reader_kwargs` override only the matching options given in configuration file and everythin else is left
+ as-is. In case of problems in data access, remove the configuration file to see if that solves the issue.
+
+
+For reference, reading SEVIRI HRIT data from a local S3 storage works the same way::
+
+ filenames = [
+ 's3://satellite-data-eumetcast-seviri-rss/H-000-MSG3*202204260855*',
+ ]
+ storage_options = {
+ "client_kwargs": {"endpoint_url": "https://PLACE-YOUR-SERVER-URL-HERE"},
+ "secret": "VERYBIGSECRET",
+ "key": "ACCESSKEY"
+ }
+ scn = Scene(reader='seviri_l1b_hrit', filenames=filenames, reader_kwargs={'storage_options': storage_options})
+ scn.load(['WV_073'])
+
+Using the `fsspec` configuration in `s3.json` the configuration would look like this::
+
+ {
+ "s3": {
+ "client_kwargs": {"endpoint_url": "https://PLACE-YOUR-SERVER-URL-HERE"},
+ "secret": "VERYBIGSECRET",
+ "key": "ACCESSKEY"
+ }
+ }
+
+
+Using multiple readers
+======================
+
+If multiple readers are used and the required credentials differ, the storage options are passed per reader like this::
+
+ reader1_filenames = [...]
+ reader2_filenames = [...]
+ filenames = {
+ 'reader1': reader1_filenames,
+ 'reader2': reader2_filenames,
+ }
+ reader1_storage_options = {...}
+ reader2_storage_options = {...}
+ reader_kwargs = {
+ 'reader1': {
+ 'option1': 'foo',
+ 'storage_options': reader1_storage_options,
+ },
+ 'reader2': {
+ 'option1': 'foo',
+ 'storage_options': reader1_storage_options,
+ }
+ }
+ scn = Scene(filenames=filenames, reader_kwargs=reader_kwargs)
+
+
+Caching the remote files
+========================
+
+Caching the remote file locally can speedup the overall processing time significantly, especially if the data are re-used
+for example when testing. The caching can be done by taking advantage of the `fsspec caching mechanism
+`_::
+
+ reader_kwargs = {
+ 'storage_options': {
+ 's3': {'anon': True},
+ 'simple': {
+ 'cache_storage': '/tmp/s3_cache',
+ }
+ }
+ }
+
+ filenames = ['simplecache::s3://noaa-goes16/ABI-L1b-RadC/2019/001/17/*_G16_s20190011702186*']
+ scn = Scene(reader='abi_l1b', filenames=filenames, reader_kwargs=reader_kwargs)
+ scn.load(['true_color_raw'])
+ scn2 = scn.resample(scn.coarsest_area(), resampler='native')
+ scn2.save_datasets(base_dir='/tmp/', tiled=True, blockxsize=512, blockysize=512, driver='COG', overviews=[])
+
+
+The following table shows the timings for running the above code with different cache statuses::
+
+.. _cache_timing_table:
+
+.. list-table:: Processing times without and with caching
+ :header-rows: 1
+ :widths: 40 30 30
+
+ * - Caching
+ - Elapsed time
+ - Notes
+ * - No caching
+ - 650 s
+ - remove `reader_kwargs` and `simplecache::` from the code
+ * - File cache
+ - 66 s
+ - Initial run
+ * - File cache
+ - 13 s
+ - Second run
+
+.. note::
+
+ The cache is not cleaned by Satpy nor fsspec so the user should handle cleaning excess files from `cache_storage`.
+
+
+.. note::
+
+ Only `simplecache` is considered thread-safe, so using the other caching mechanisms may or may not work depending
+ on the reader, Dask scheduler or the phase of the moon.
+
+
+Resources
+=========
+
+See :class:`~satpy.readers.FSFile` for direct usage of `fsspec` with Satpy, and
+`fsspec documentation `_ for more details on connection options
+and detailes.
diff --git a/doc/source/writers.rst b/doc/source/writers.rst
index 91a14c90cf..f453f4d5a5 100644
--- a/doc/source/writers.rst
+++ b/doc/source/writers.rst
@@ -41,16 +41,20 @@ One common parameter across almost all Writers is ``filename`` and
-
* - NinJo TIFF (using ``pyninjotiff`` package)
- :class:`ninjotiff `
- - Nominal
+ - Deprecated from NinJo 7 (use ninjogeotiff)
-
* - NetCDF (Standard CF)
- :class:`cf `
- - Pre-alpha
+ - Beta
- :mod:`Usage example `
* - AWIPS II Tiled NetCDF4
- :class:`awips_tiled `
- Beta
-
+ * - GeoTIFF with NinJo tags (from NinJo 7)
+ - :class:`ninjogeotiff `
+ - Beta
+ -
Available Writers
=================
diff --git a/pyproject.toml b/pyproject.toml
index 4943fe0134..e27dcfd9e2 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,5 +1,14 @@
[build-system]
-requires = ["setuptools>=42", "wheel", "setuptools_scm[toml]>=3.4", 'setuptools_scm_git_archive']
+requires = ["setuptools>=45", "wheel", "setuptools_scm[toml]>=6.2", 'setuptools_scm_git_archive']
+build-backend = "setuptools.build_meta"
[tool.setuptools_scm]
-write_to = "satpy/version.py"
\ No newline at end of file
+write_to = "satpy/version.py"
+
+[tool.isort]
+sections = ["FUTURE", "STDLIB", "THIRDPARTY", "FIRSTPARTY", "LOCALFOLDER"]
+profile = "black"
+skip_gitignore = true
+default_section = "THIRDPARTY"
+known_first_party = "satpy"
+line_length = 120
diff --git a/satpy/__init__.py b/satpy/__init__.py
index 2c2b663eef..476b625eeb 100644
--- a/satpy/__init__.py
+++ b/satpy/__init__.py
@@ -18,18 +18,25 @@
"""Satpy Package initializer."""
import os
-from satpy.version import version as __version__ # noqa
+
+try:
+ from satpy.version import version as __version__ # noqa
+except ModuleNotFoundError:
+ raise ModuleNotFoundError(
+ "No module named satpy.version. This could mean "
+ "you didn't install 'satpy' properly. Try reinstalling ('pip "
+ "install').")
CHUNK_SIZE = int(os.getenv('PYTROLL_CHUNK_SIZE', 4096))
-from satpy.utils import get_logger # noqa
+from satpy._config import config # noqa
from satpy.dataset import DataID, DataQuery # noqa
from satpy.dataset.data_dict import DatasetDict # noqa
-from satpy.readers import (find_files_and_readers, # noqa
- available_readers) # noqa
-from satpy.writers import available_writers # noqa
-from satpy.scene import Scene # noqa
from satpy.multiscene import MultiScene # noqa
-from satpy._config import config # noqa
+from satpy.readers import available_readers # noqa
+from satpy.readers import find_files_and_readers # noqa
+from satpy.scene import Scene # noqa
+from satpy.utils import get_logger # noqa
+from satpy.writers import available_writers # noqa
log = get_logger('satpy')
diff --git a/satpy/_compat.py b/satpy/_compat.py
index ec3b0a5ffc..6a2a4fd528 100644
--- a/satpy/_compat.py
+++ b/satpy/_compat.py
@@ -17,19 +17,74 @@
# satpy. If not, see .
"""Backports and compatibility fixes for satpy."""
+from threading import RLock
+
+_NOT_FOUND = object()
+
+
+class CachedPropertyBackport:
+ """Backport of cached_property from Python-3.8.
+
+ Source: https://github.com/python/cpython/blob/v3.8.0/Lib/functools.py#L930
+ """
+
+ def __init__(self, func): # noqa
+ self.func = func
+ self.attrname = None
+ self.__doc__ = func.__doc__
+ self.lock = RLock()
+
+ def __set_name__(self, owner, name): # noqa
+ if self.attrname is None:
+ self.attrname = name
+ elif name != self.attrname:
+ raise TypeError(
+ "Cannot assign the same cached_property to two different names "
+ f"({self.attrname!r} and {name!r})."
+ )
+
+ def __get__(self, instance, owner=None): # noqa
+ if instance is None:
+ return self
+ if self.attrname is None:
+ raise TypeError(
+ "Cannot use cached_property instance without calling __set_name__ on it.")
+ try:
+ cache = instance.__dict__
+ except AttributeError: # not all objects have __dict__ (e.g. class defines slots)
+ msg = (
+ f"No '__dict__' attribute on {type(instance).__name__!r} "
+ f"instance to cache {self.attrname!r} property."
+ )
+ raise TypeError(msg) from None
+ val = cache.get(self.attrname, _NOT_FOUND)
+ if val is _NOT_FOUND:
+ with self.lock:
+ # check if another thread filled cache while we awaited lock
+ val = cache.get(self.attrname, _NOT_FOUND)
+ if val is _NOT_FOUND:
+ val = self.func(instance)
+ try:
+ cache[self.attrname] = val
+ except TypeError:
+ msg = (
+ f"The '__dict__' attribute on {type(instance).__name__!r} instance "
+ f"does not support item assignment for caching {self.attrname!r} property."
+ )
+ raise TypeError(msg) from None
+ return val
+
+
try:
- from functools import cached_property
+ from functools import cached_property # type: ignore
except ImportError:
# for python < 3.8
- from functools import lru_cache
-
- def cached_property(func):
- """Port back functools.cached_property."""
- return property(lru_cache(maxsize=None)(func))
+ cached_property = CachedPropertyBackport # type: ignore
try:
- from numpy.typing import ArrayLike # noqa
+ from numpy.typing import ArrayLike, DTypeLike # noqa
except ImportError:
# numpy <1.20
+ from numpy import dtype as DTypeLike # noqa
from numpy import ndarray as ArrayLike # noqa
diff --git a/satpy/_config.py b/satpy/_config.py
index d23df06f0d..c64182b50c 100644
--- a/satpy/_config.py
+++ b/satpy/_config.py
@@ -16,17 +16,30 @@
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
"""Satpy Configuration directory and file handling."""
+from __future__ import annotations
+import ast
import glob
import logging
import os
import sys
from collections import OrderedDict
+from importlib.metadata import entry_points
+from pathlib import Path
+
+try:
+ from importlib.resources import files as impr_files # type: ignore
+except ImportError:
+ # Python 3.8
+ def impr_files(module_name: str) -> Path:
+ """Get path to module as a backport for Python 3.8."""
+ from importlib.resources import path as impr_path
+
+ with impr_path(module_name, "__init__.py") as pkg_init_path:
+ return pkg_init_path.parent
-import pkg_resources
-from donfig import Config
import appdirs
-import ast
+from donfig import Config
LOG = logging.getLogger(__name__)
@@ -37,9 +50,13 @@
_satpy_dirs = appdirs.AppDirs(appname='satpy', appauthor='pytroll')
_CONFIG_DEFAULTS = {
'cache_dir': _satpy_dirs.user_cache_dir,
- 'data_dir': _satpy_dirs.user_data_dir,
+ 'cache_lonlats': False,
+ 'cache_sensor_angles': False,
'config_path': [],
+ 'data_dir': _satpy_dirs.user_data_dir,
+ 'demo_data_dir': '.',
'download_aux': True,
+ 'sensor_angles_position_preference': 'actual',
}
# Satpy main configuration object
@@ -74,14 +91,14 @@
if _satpy_config_path.startswith("["):
# 'SATPY_CONFIG_PATH' is set by previous satpy config as a reprsentation of a 'list'
# need to use 'ast.literal_eval' to parse the string back to a list
- _satpy_config_path = ast.literal_eval(_satpy_config_path)
+ _satpy_config_path_list = ast.literal_eval(_satpy_config_path)
else:
# colon-separated are ordered by custom -> builtins
# i.e. last-applied/highest priority to first-applied/lowest priority
- _satpy_config_path = _satpy_config_path.split(':')
+ _satpy_config_path_list = _satpy_config_path.split(os.pathsep)
- os.environ['SATPY_CONFIG_PATH'] = repr(_satpy_config_path)
- for config_dir in _satpy_config_path:
+ os.environ['SATPY_CONFIG_PATH'] = repr(_satpy_config_path_list)
+ for config_dir in _satpy_config_path_list:
_CONFIG_PATHS.append(os.path.join(config_dir, 'satpy.yaml'))
_ancpath = os.getenv('SATPY_ANCPATH', None)
@@ -105,9 +122,9 @@ def get_config_path_safe():
def get_entry_points_config_dirs(name, include_config_path=True):
"""Get the config directories for all entry points of given name."""
dirs = []
- for entry_point in pkg_resources.iter_entry_points(name):
- package_name = entry_point.module_name.split('.', 1)[0]
- new_dir = os.path.join(entry_point.dist.module_path, package_name, 'etc')
+ for entry_point in entry_points().get(name, []):
+ module = _entry_point_module(entry_point)
+ new_dir = str(impr_files(module) / "etc")
if not dirs or dirs[-1] != new_dir:
dirs.append(new_dir)
if include_config_path:
@@ -115,6 +132,14 @@ def get_entry_points_config_dirs(name, include_config_path=True):
return dirs
+def _entry_point_module(entry_point):
+ try:
+ return entry_point.module
+ except AttributeError:
+ # Python 3.8
+ return entry_point.value.split(":")[0].strip()
+
+
def config_search_paths(filename, search_dirs=None, **kwargs):
"""Get series of configuration base paths where Satpy configs are located."""
if search_dirs is None:
diff --git a/satpy/aux_download.py b/satpy/aux_download.py
index 159bcb779b..82095737f0 100644
--- a/satpy/aux_download.py
+++ b/satpy/aux_download.py
@@ -17,12 +17,13 @@
# satpy. If not, see .
"""Functions and utilities for downloading ancillary data."""
-import os
import logging
-import satpy
+import os
import pooch
+import satpy
+
logger = logging.getLogger(__name__)
_FILE_REGISTRY = {}
@@ -192,17 +193,16 @@ def _find_registerable_files_compositors(sensors=None):
Compositor objects should register files when they are initialized.
"""
- from satpy.composites.config_loader import CompositorLoader
- composite_loader = CompositorLoader()
+ from satpy.composites.config_loader import all_composite_sensors, load_compositor_configs_for_sensors
if sensors is None:
- sensors = composite_loader.all_composite_sensors()
+ sensors = all_composite_sensors()
if sensors:
- composite_loader.load_compositors(sensors)
- _register_modifier_files(composite_loader)
+ mods = load_compositor_configs_for_sensors(sensors)[1]
+ _register_modifier_files(mods)
-def _register_modifier_files(composite_loader):
- for mod_sensor_dict in composite_loader.modifiers.values():
+def _register_modifier_files(modifiers):
+ for mod_sensor_dict in modifiers.values():
for mod_name, (mod_cls, mod_props) in mod_sensor_dict.items():
try:
mod_cls(**mod_props)
@@ -214,6 +214,7 @@ def _register_modifier_files(composite_loader):
def _find_registerable_files_readers(readers=None):
"""Load all readers so that files are registered."""
import yaml
+
from satpy.readers import configs_for_reader, load_reader
for reader_configs in configs_for_reader(reader=readers):
try:
diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py
index 2b7d3374b2..93f595735a 100644
--- a/satpy/composites/__init__.py
+++ b/satpy/composites/__init__.py
@@ -1,6 +1,4 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-# Copyright (c) 2015-2020 Satpy developers
+# Copyright (c) 2015-2022 Satpy developers
#
# This file is part of satpy.
#
@@ -16,6 +14,7 @@
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
"""Base classes for composite objects."""
+from __future__ import annotations
import logging
import os
@@ -26,15 +25,15 @@
import xarray as xr
import satpy
+from satpy.aux_download import DataDownloadMixin
from satpy.dataset import DataID, combine_metadata
from satpy.dataset.dataid import minimal_default_keys_config
-from satpy.aux_download import DataDownloadMixin
+from satpy.utils import unify_chunks
from satpy.writers import get_enhanced_image
-
LOG = logging.getLogger(__name__)
-NEGLIBLE_COORDS = ['time']
+NEGLIGIBLE_COORDS = ['time']
"""Keywords identifying non-dimensional coordinates to be ignored during composite generation."""
MASKING_COMPOSITOR_METHODS = ['less', 'less_equal', 'equal', 'greater_equal',
@@ -45,14 +44,10 @@
class IncompatibleAreas(Exception):
"""Error raised upon compositing things of different shapes."""
- pass
-
class IncompatibleTimes(Exception):
"""Error raised upon compositing things from different times."""
- pass
-
def check_times(projectables):
"""Check that *projectables* have compatible times."""
@@ -76,8 +71,7 @@ def check_times(projectables):
# Is there a more gracious way to handle this ?
if np.max(times) - np.min(times) > np.timedelta64(1, 's'):
raise IncompatibleTimes
- else:
- mid_time = (np.max(times) - np.min(times)) / 2 + np.min(times)
+ mid_time = (np.max(times) - np.min(times)) / 2 + np.min(times)
return mid_time
@@ -94,7 +88,7 @@ def sub_arrays(proj1, proj2):
class CompositeBase:
- """Base class for all compositors.
+ """Base class for all compositors and modifiers.
A compositor in Satpy is a class that takes in zero or more input
DataArrays and produces a new DataArray with its own identifier (name).
@@ -157,16 +151,51 @@ def apply_modifier_info(self, origin, destination):
d[k] = o[k]
def match_data_arrays(self, data_arrays):
- """Match data arrays so that they can be used together in a composite."""
+ """Match data arrays so that they can be used together in a composite.
+
+ For the purpose of this method, "can be used together" means:
+
+ - All arrays should have the same dimensions.
+ - Either all arrays should have an area, or none should.
+ - If all have an area, the areas should be all the same.
+
+ In addition, negligible non-dimensional coordinates are dropped (see
+ :meth:`drop_coordinates`) and dask chunks are unified (see
+ :func:`satpy.utils.unify_chunks`).
+
+ Args:
+ data_arrays (List[arrays]): Arrays to be checked
+
+ Returns:
+ data_arrays (List[arrays]):
+ Arrays with negligible non-dimensional coordinates removed.
+
+ Raises:
+ :class:`IncompatibleAreas`:
+ If dimension or areas do not match.
+ :class:`ValueError`:
+ If some, but not all data arrays lack an area attribute.
+ """
self.check_geolocation(data_arrays)
- return self.drop_coordinates(data_arrays)
+ new_arrays = self.drop_coordinates(data_arrays)
+ new_arrays = list(unify_chunks(*new_arrays))
+ return new_arrays
def drop_coordinates(self, data_arrays):
- """Drop neglible non-dimensional coordinates."""
+ """Drop negligible non-dimensional coordinates.
+
+ Drops negligible coordinates if they do not correspond to any
+ dimension. Negligible coordinates are defined in the
+ :attr:`NEGLIGIBLE_COORDS` module attribute.
+
+ Args:
+ data_arrays (List[arrays]): Arrays to be checked
+ """
new_arrays = []
for ds in data_arrays:
drop = [coord for coord in ds.coords
- if coord not in ds.dims and any([neglible in coord for neglible in NEGLIBLE_COORDS])]
+ if coord not in ds.dims and
+ any([neglible in coord for neglible in NEGLIGIBLE_COORDS])]
if drop:
new_arrays.append(ds.drop(drop))
else:
@@ -175,7 +204,23 @@ def drop_coordinates(self, data_arrays):
return new_arrays
def check_geolocation(self, data_arrays):
- """Check that the geolocations of the *data_arrays* are compatible."""
+ """Check that the geolocations of the *data_arrays* are compatible.
+
+ For the purpose of this method, "compatible" means:
+
+ - All arrays should have the same dimensions.
+ - Either all arrays should have an area, or none should.
+ - If all have an area, the areas should be all the same.
+
+ Args:
+ data_arrays (List[arrays]): Arrays to be checked
+
+ Raises:
+ :class:`IncompatibleAreas`:
+ If dimension or areas do not match.
+ :class:`ValueError`:
+ If some, but not all data arrays lack an area attribute.
+ """
if len(data_arrays) == 1:
return
@@ -191,7 +236,7 @@ def check_geolocation(self, data_arrays):
areas = [ds.attrs.get('area') for ds in data_arrays]
if all(a is None for a in areas):
return
- elif any(a is None for a in areas):
+ if any(a is None for a in areas):
raise ValueError("Missing 'area' attribute")
if not all(areas[0] == x for x in areas[1:]):
@@ -199,12 +244,6 @@ def check_geolocation(self, data_arrays):
"'{}'".format(self.attrs['name']))
raise IncompatibleAreas("Areas are different")
- def check_areas(self, data_arrays):
- """Check that the areas of the *data_arrays* are compatible."""
- warnings.warn('satpy.composites.CompositeBase.check_areas is deprecated, use '
- 'satpy.composites.CompositeBase.match_data_arrays instead')
- return self.match_data_arrays(data_arrays)
-
class DifferenceCompositor(CompositeBase):
"""Make the difference of two data arrays."""
@@ -216,13 +255,46 @@ def __call__(self, projectables, nonprojectables=None, **attrs):
projectables = self.match_data_arrays(projectables)
info = combine_metadata(*projectables)
info['name'] = self.attrs['name']
- info.update(attrs)
+ info.update(self.attrs) # attrs from YAML/__init__
+ info.update(attrs) # overwriting of DataID properties
proj = projectables[0] - projectables[1]
proj.attrs = info
return proj
+class RatioCompositor(CompositeBase):
+ """Make the ratio of two data arrays."""
+
+ def __call__(self, projectables, nonprojectables=None, **info):
+ """Generate the composite."""
+ if len(projectables) != 2:
+ raise ValueError("Expected 2 datasets, got %d" % (len(projectables),))
+ projectables = self.match_data_arrays(projectables)
+ info = combine_metadata(*projectables)
+ info['name'] = self.attrs['name']
+
+ proj = projectables[0] / projectables[1]
+ proj.attrs = info
+ return proj
+
+
+class SumCompositor(CompositeBase):
+ """Make the sum of two data arrays."""
+
+ def __call__(self, projectables, nonprojectables=None, **info):
+ """Generate the composite."""
+ if len(projectables) != 2:
+ raise ValueError("Expected 2 datasets, got %d" % (len(projectables),))
+ projectables = self.match_data_arrays(projectables)
+ info = combine_metadata(*projectables)
+ info['name'] = self.attrs['name']
+
+ proj = projectables[0] + projectables[1]
+ proj.attrs = info
+ return proj
+
+
class SingleBandCompositor(CompositeBase):
"""Basic single-band composite builder.
@@ -252,6 +324,51 @@ def __call__(self, projectables, nonprojectables=None, **attrs):
dims=data.dims, coords=data.coords)
+class CategoricalDataCompositor(CompositeBase):
+ """Compositor used to recategorize categorical data using a look-up-table.
+
+ Each value in the data array will be recategorized to a new category defined in
+ the look-up-table using the original value as an index for that look-up-table.
+
+ Example:
+ data = [[1, 3, 2], [4, 2, 0]]
+ lut = [10, 20, 30, 40, 50]
+ res = [[20, 40, 30], [50, 30, 10]]
+ """
+
+ def __init__(self, name, lut=None, **kwargs):
+ """Get look-up-table used to recategorize data.
+
+ Args:
+ lut (list): a list of new categories. The lenght must be greater than the
+ maximum value in the data array that should be recategorized.
+ """
+ self.lut = np.array(lut)
+ super(CategoricalDataCompositor, self).__init__(name, **kwargs)
+
+ def _update_attrs(self, new_attrs):
+ """Modify name and add LUT."""
+ new_attrs['name'] = self.attrs['name']
+ new_attrs['composite_lut'] = list(self.lut)
+
+ @staticmethod
+ def _getitem(block, lut):
+ return lut[block]
+
+ def __call__(self, projectables, **kwargs):
+ """Recategorize the data."""
+ if len(projectables) != 1:
+ raise ValueError("Can't have more than one dataset for a categorical data composite")
+
+ data = projectables[0].astype(int)
+ res = data.data.map_blocks(self._getitem, self.lut, dtype=self.lut.dtype)
+
+ new_attrs = data.attrs.copy()
+ self._update_attrs(new_attrs)
+
+ return xr.DataArray(res, dims=data.dims, attrs=new_attrs, coords=data.coords)
+
+
class GenericCompositor(CompositeBase):
"""Basic colored composite builder."""
@@ -371,7 +488,7 @@ def __call__(self, projectables, nonprojectables=None, **info):
return super(Filler, self).__call__([filled_projectable], **info)
-class MultiFiller(GenericCompositor):
+class MultiFiller(SingleBandCompositor):
"""Fix holes in projectable 1 with data from the next projectables."""
def __call__(self, projectables, nonprojectables=None, **info):
@@ -384,7 +501,7 @@ def __call__(self, projectables, nonprojectables=None, **info):
for next_projectable in info['optional_datasets']:
filled_projectable = filled_projectable.fillna(next_projectable)
- return super(MultiFiller, self).__call__([filled_projectable], **info)
+ return super().__call__([filled_projectable], **info)
class RGBCompositor(GenericCompositor):
@@ -408,7 +525,7 @@ def build_colormap(palette, dtype, info):
Colormaps come in different forms, but they are all supposed to have
color values between 0 and 255. The following cases are considered:
- - Palettes comprised of only a list on colors. If *dtype* is uint8,
+ - Palettes comprised of only a list of colors. If *dtype* is uint8,
the values of the colormap are the enumeration of the colors.
Otherwise, the colormap values will be spread evenly from the min
to the max of the valid_range provided in `info`.
@@ -508,9 +625,14 @@ def _insert_palette_colors(channels, palette):
class DayNightCompositor(GenericCompositor):
- """A compositor that blends a day data with night data."""
+ """A compositor that blends day data with night data.
- def __init__(self, name, lim_low=85., lim_high=88., **kwargs):
+ Using the `day_night` flag it is also possible to provide only a day product
+ or only a night product and mask out (make transparent) the opposite portion
+ of the image (night or day). See the documentation below for more details.
+ """
+
+ def __init__(self, name, lim_low=85., lim_high=88., day_night="day_night", **kwargs):
"""Collect custom configuration values.
Args:
@@ -518,61 +640,83 @@ def __init__(self, name, lim_low=85., lim_high=88., **kwargs):
blending of the given channels
lim_high (float): upper limit of Sun zenith angle for the
blending of the given channels
+ day_night (string): "day_night" means both day and night portions will be kept
+ "day_only" means only day portion will be kept
+ "night_only" means only night portion will be kept
"""
self.lim_low = lim_low
self.lim_high = lim_high
+ self.day_night = day_night
super(DayNightCompositor, self).__init__(name, **kwargs)
def __call__(self, projectables, **kwargs):
"""Generate the composite."""
projectables = self.match_data_arrays(projectables)
-
- day_data = projectables[0]
- night_data = projectables[1]
+ # At least one composite is requested.
+ foreground_data = projectables[0]
lim_low = np.cos(np.deg2rad(self.lim_low))
lim_high = np.cos(np.deg2rad(self.lim_high))
try:
- coszen = np.cos(np.deg2rad(projectables[2]))
+ coszen = np.cos(np.deg2rad(projectables[2 if self.day_night == "day_night" else 1]))
except IndexError:
- from pyorbital.astronomy import cos_zen
+ from satpy.modifiers.angles import get_cos_sza
LOG.debug("Computing sun zenith angles.")
# Get chunking that matches the data
- try:
- chunks = day_data.sel(bands=day_data['bands'][0]).chunks
- except KeyError:
- chunks = day_data.chunks
- lons, lats = day_data.attrs["area"].get_lonlats(chunks=chunks)
- coszen = xr.DataArray(cos_zen(day_data.attrs["start_time"],
- lons, lats),
- dims=['y', 'x'],
- coords=[day_data['y'], day_data['x']])
+ coszen = get_cos_sza(foreground_data)
# Calculate blending weights
coszen -= np.min((lim_high, lim_low))
coszen /= np.abs(lim_low - lim_high)
coszen = coszen.clip(0, 1)
- # Apply enhancements to get images
- day_data = enhance2dataset(day_data)
- night_data = enhance2dataset(night_data)
+ # Apply enhancements
+ foreground_data = enhance2dataset(foreground_data)
- # Adjust bands so that they match
- # L/RGB -> RGB/RGB
- # LA/RGB -> RGBA/RGBA
- # RGB/RGBA -> RGBA/RGBA
- day_data = add_bands(day_data, night_data['bands'])
- night_data = add_bands(night_data, day_data['bands'])
+ if "only" in self.day_night:
+ # Only one portion (day or night) is selected. One composite is requested.
+ # Add alpha band to single L/RGB composite to make the masked-out portion transparent
+ # L -> LA
+ # RGB -> RGBA
+ foreground_data = add_alpha_bands(foreground_data)
- # Replace missing channel data with zeros
- day_data = zero_missing_data(day_data, night_data)
- night_data = zero_missing_data(night_data, day_data)
+ # No need to replace missing channel data with zeros
+ # Get metadata
+ attrs = foreground_data.attrs.copy()
- # Get merged metadata
- attrs = combine_metadata(day_data, night_data)
+ # Determine the composite position
+ day_data = foreground_data if "day" in self.day_night else 0
+ night_data = foreground_data if "night" in self.day_night else 0
+
+ else:
+ # Both day and night portions are selected. Two composites are requested. Get the second one merged.
+ background_data = projectables[1]
+
+ # Apply enhancements
+ background_data = enhance2dataset(background_data)
+
+ # Adjust bands so that they match
+ # L/RGB -> RGB/RGB
+ # LA/RGB -> RGBA/RGBA
+ # RGB/RGBA -> RGBA/RGBA
+ foreground_data = add_bands(foreground_data, background_data['bands'])
+ background_data = add_bands(background_data, foreground_data['bands'])
+
+ # Replace missing channel data with zeros
+ foreground_data = zero_missing_data(foreground_data, background_data)
+ background_data = zero_missing_data(background_data, foreground_data)
+
+ # Get merged metadata
+ attrs = combine_metadata(foreground_data, background_data)
+
+ # Determine the composite position
+ day_data = foreground_data
+ night_data = background_data
# Blend the two images together
- data = (1 - coszen) * night_data + coszen * day_data
+ day_portion = coszen * day_data
+ night_portion = (1 - coszen) * night_data
+ data = night_portion + day_portion
data.attrs = attrs
# Split to separate bands so the mode is correct
@@ -581,6 +725,28 @@ def __call__(self, projectables, **kwargs):
return super(DayNightCompositor, self).__call__(data, **kwargs)
+def add_alpha_bands(data):
+ """Only used for DayNightCompositor.
+
+ Add an alpha band to L or RGB composite as prerequisites for the following band matching
+ to make the masked-out area transparent.
+ """
+ if 'A' not in data['bands'].data:
+ new_data = [data.sel(bands=band) for band in data['bands'].data]
+ # Create alpha band based on a copy of the first "real" band
+ alpha = new_data[0].copy()
+ alpha.data = da.ones((data.sizes['y'],
+ data.sizes['x']),
+ chunks=new_data[0].chunks)
+ # Rename band to indicate it's alpha
+ alpha['bands'] = 'A'
+ new_data.append(alpha)
+ new_data = xr.concat(new_data, dim='bands')
+ new_data.attrs['mode'] = data.attrs['mode'] + 'A'
+ data = new_data
+ return data
+
+
def enhance2dataset(dset, convert_p=False):
"""Return the enhancement dataset *dset* as an array.
@@ -650,7 +816,6 @@ def add_bands(data, bands):
new_data = xr.concat(new_data, dim='bands')
new_data.attrs['mode'] = data.attrs['mode'] + 'A'
data = new_data
-
return data
@@ -759,25 +924,18 @@ class RatioSharpenedRGB(GenericCompositor):
def __init__(self, *args, **kwargs):
"""Instanciate the ration sharpener."""
- self.high_resolution_band = kwargs.pop("high_resolution_band", "red")
- if self.high_resolution_band not in ['red', 'green', 'blue', None]:
+ self.high_resolution_color = kwargs.pop("high_resolution_band", "red")
+ if self.high_resolution_color not in ['red', 'green', 'blue', None]:
raise ValueError("RatioSharpenedRGB.high_resolution_band must "
"be one of ['red', 'green', 'blue', None]. Not "
- "'{}'".format(self.high_resolution_band))
- kwargs.setdefault('common_channel_mask', False)
+ "'{}'".format(self.high_resolution_color))
super(RatioSharpenedRGB, self).__init__(*args, **kwargs)
- def _get_band(self, high_res, low_res, color, ratio):
- """Figure out what data should represent this color."""
- if self.high_resolution_band == color:
- ret = high_res
- else:
- ret = low_res * ratio
- ret.attrs = low_res.attrs.copy()
- return ret
-
def __call__(self, datasets, optional_datasets=None, **info):
- """Sharpen low resolution datasets by multiplying by the ratio of ``high_res / low_res``."""
+ """Sharpen low resolution datasets by multiplying by the ratio of ``high_res / low_res``.
+
+ The resulting RGB has the units attribute removed.
+ """
if len(datasets) != 3:
raise ValueError("Expected 3 datasets, got %d" % (len(datasets), ))
if not all(x.shape == datasets[0].shape for x in datasets[1:]) or \
@@ -786,53 +944,67 @@ def __call__(self, datasets, optional_datasets=None, **info):
raise IncompatibleAreas('RatioSharpening requires datasets of '
'the same size. Must resample first.')
+ optional_datasets = tuple() if optional_datasets is None else optional_datasets
+ datasets = self.match_data_arrays(datasets + optional_datasets)
+ red, green, blue, new_attrs = self._get_and_sharpen_rgb_data_arrays_and_meta(datasets, optional_datasets)
+ combined_info = self._combined_sharpened_info(info, new_attrs)
+ res = super(RatioSharpenedRGB, self).__call__((red, green, blue,), **combined_info)
+ res.attrs.pop("units", None)
+ return res
+
+ def _get_and_sharpen_rgb_data_arrays_and_meta(self, datasets, optional_datasets):
new_attrs = {}
- if optional_datasets:
- datasets = self.match_data_arrays(datasets + optional_datasets)
- high_res = datasets[-1]
- p1, p2, p3 = datasets[:3]
+ low_res_red = datasets[0]
+ low_res_green = datasets[1]
+ low_res_blue = datasets[2]
+ if optional_datasets and self.high_resolution_color is not None:
+ LOG.debug("Sharpening image with high resolution {} band".format(self.high_resolution_color))
+ high_res = datasets[3]
if 'rows_per_scan' in high_res.attrs:
new_attrs.setdefault('rows_per_scan', high_res.attrs['rows_per_scan'])
new_attrs.setdefault('resolution', high_res.attrs['resolution'])
- colors = ['red', 'green', 'blue']
-
- if self.high_resolution_band in colors:
- LOG.debug("Sharpening image with high resolution {} band".format(self.high_resolution_band))
- low_res = datasets[:3][colors.index(self.high_resolution_band)]
- ratio = high_res / low_res
- # make ratio a no-op (multiply by 1) where the ratio is NaN or
- # infinity or it is negative.
- ratio = ratio.where(np.isfinite(ratio) & (ratio >= 0), 1.)
- # we don't need ridiculously high ratios, they just make bright pixels
- ratio = ratio.clip(0, 1.5)
- else:
- LOG.debug("No sharpening band specified for ratio sharpening")
- high_res = None
- ratio = 1.
-
- r = self._get_band(high_res, p1, 'red', ratio)
- g = self._get_band(high_res, p2, 'green', ratio)
- b = self._get_band(high_res, p3, 'blue', ratio)
+ low_res_colors = ['red', 'green', 'blue']
+ low_resolution_index = low_res_colors.index(self.high_resolution_color)
else:
- datasets = self.match_data_arrays(datasets)
- r, g, b = datasets[:3]
-
- # combine the masks
- mask = ~(r.isnull() | g.isnull() | b.isnull())
- r = r.where(mask)
- g = g.where(mask)
- b = b.where(mask)
-
- # Collect information that is the same between the projectables
- # we want to use the metadata from the original datasets since the
- # new r, g, b arrays may have lost their metadata during calculations
- info = combine_metadata(*datasets)
- info.update(new_attrs)
+ LOG.debug("No sharpening band specified for ratio sharpening")
+ high_res = None
+ low_resolution_index = 0
+
+ if high_res is not None:
+ low_res = (low_res_red, low_res_green, low_res_blue)[low_resolution_index]
+ ratio = da.map_blocks(
+ _get_sharpening_ratio,
+ high_res.data,
+ low_res.data,
+ meta=np.array((), dtype=high_res.dtype),
+ dtype=high_res.dtype,
+ chunks=high_res.chunks,
+ )
+ with xr.set_options(keep_attrs=True):
+ low_res_red = high_res if low_resolution_index == 0 else low_res_red * ratio
+ low_res_green = high_res if low_resolution_index == 1 else low_res_green * ratio
+ low_res_blue = high_res if low_resolution_index == 2 else low_res_blue * ratio
+ return low_res_red, low_res_green, low_res_blue, new_attrs
+
+ def _combined_sharpened_info(self, info, new_attrs):
+ combined_info = {}
+ combined_info.update(info)
+ combined_info.update(new_attrs)
# Update that information with configured information (including name)
- info.update(self.attrs)
+ combined_info.update(self.attrs)
# Force certain pieces of metadata that we *know* to be true
- info.setdefault("standard_name", "true_color")
- return super(RatioSharpenedRGB, self).__call__((r, g, b), **info)
+ combined_info.setdefault("standard_name", "true_color")
+ return combined_info
+
+
+def _get_sharpening_ratio(high_res, low_res):
+ ratio = high_res / low_res
+ # make ratio a no-op (multiply by 1) where the ratio is NaN, infinity,
+ # or it is negative.
+ ratio[~np.isfinite(ratio) | (ratio < 0)] = 1.0
+ # we don't need ridiculously high ratios, they just make bright pixels
+ np.clip(ratio, 0, 1.5, out=ratio)
+ return ratio
def _mean4(data, offset=(0, 0), block_id=None):
@@ -855,7 +1027,8 @@ def _mean4(data, offset=(0, 0), block_id=None):
av_data = np.pad(data, pad, 'edge')
new_shape = (int(rows2 / 2.), 2, int(cols2 / 2.), 2)
- data_mean = np.nanmean(av_data.reshape(new_shape), axis=(1, 3))
+ with np.errstate(invalid='ignore'):
+ data_mean = np.nanmean(av_data.reshape(new_shape), axis=(1, 3))
data_mean = np.repeat(np.repeat(data_mean, 2, axis=0), 2, axis=1)
data_mean = data_mean[row_offset:row_offset + rows, col_offset:col_offset + cols]
return data_mean
@@ -891,15 +1064,15 @@ def four_element_average_dask(d):
def __call__(self, datasets, optional_datasets=None, **attrs):
"""Generate the composite."""
colors = ['red', 'green', 'blue']
- if self.high_resolution_band not in colors:
+ if self.high_resolution_color not in colors:
raise ValueError("SelfSharpenedRGB requires at least one high resolution band, not "
- "'{}'".format(self.high_resolution_band))
+ "'{}'".format(self.high_resolution_color))
- high_res = datasets[colors.index(self.high_resolution_band)]
+ high_res = datasets[colors.index(self.high_resolution_color)]
high_mean = self.four_element_average_dask(high_res)
- red = high_mean if self.high_resolution_band == 'red' else datasets[0]
- green = high_mean if self.high_resolution_band == 'green' else datasets[1]
- blue = high_mean if self.high_resolution_band == 'blue' else datasets[2]
+ red = high_mean if self.high_resolution_color == 'red' else datasets[0]
+ green = high_mean if self.high_resolution_color == 'green' else datasets[1]
+ blue = high_mean if self.high_resolution_color == 'blue' else datasets[2]
return super(SelfSharpenedRGB, self).__call__((red, green, blue), optional_datasets=(high_res,), **attrs)
@@ -948,13 +1121,14 @@ def __call__(self, projectables, *args, **kwargs):
"""Generate the composite."""
projectables = self.match_data_arrays(projectables)
luminance = projectables[0]
- luminance /= 100.
+ luminance = luminance / 100.
# Limit between min(luminance) ... 1.0
luminance = luminance.clip(max=1.)
# Get the enhanced version of the RGB composite to be sharpened
rgb_img = enhance2dataset(projectables[1])
- rgb_img *= luminance
+ # Ignore alpha band when applying luminance
+ rgb_img = rgb_img.where(rgb_img.bands == 'A', rgb_img * luminance)
return super(SandwichCompositor, self).__call__(rgb_img, *args, **kwargs)
@@ -1145,13 +1319,30 @@ def __call__(self, projectables, *args, **kwargs):
foreground = add_bands(foreground, background['bands'])
background = add_bands(background, foreground['bands'])
+ attrs = self._combine_metadata_with_mode_and_sensor(foreground, background)
+ data = self._get_merged_image_data(foreground, background)
+ res = super(BackgroundCompositor, self).__call__(data, **kwargs)
+ res.attrs.update(attrs)
+ return res
+
+ def _combine_metadata_with_mode_and_sensor(self,
+ foreground: xr.DataArray,
+ background: xr.DataArray
+ ) -> dict:
# Get merged metadata
attrs = combine_metadata(foreground, background)
+ # 'mode' is no longer valid after we've remove the 'A'
+ # let the base class __call__ determine mode
+ attrs.pop("mode", None)
if attrs.get('sensor') is None:
# sensor can be a set
- attrs['sensor'] = self._get_sensors(projectables)
+ attrs['sensor'] = self._get_sensors([foreground, background])
+ return attrs
- # Stack the images
+ @staticmethod
+ def _get_merged_image_data(foreground: xr.DataArray,
+ background: xr.DataArray
+ ) -> list[xr.DataArray]:
if 'A' in foreground.attrs['mode']:
# Use alpha channel as weight and blend the two composites
alpha = foreground.sel(bands='A')
@@ -1165,19 +1356,20 @@ def __call__(self, projectables, *args, **kwargs):
chan = xr.where(chan.isnull(), bg_band, chan)
data.append(chan)
else:
- data = xr.where(foreground.isnull(), background, foreground)
+ data_arr = xr.where(foreground.isnull(), background, foreground)
# Split to separate bands so the mode is correct
- data = [data.sel(bands=b) for b in data['bands']]
+ data = [data_arr.sel(bands=b) for b in data_arr['bands']]
- res = super(BackgroundCompositor, self).__call__(data, **kwargs)
- res.attrs.update(attrs)
- return res
+ return data
class MaskingCompositor(GenericCompositor):
"""A compositor that masks e.g. IR 10.8 channel data using cloud products from NWC SAF."""
- def __init__(self, name, transparency=None, conditions=None, **kwargs):
+ _supported_modes = {"LA", "RGBA"}
+
+ def __init__(self, name, transparency=None, conditions=None, mode="LA",
+ **kwargs):
"""Collect custom configuration values.
Kwargs:
@@ -1187,6 +1379,10 @@ def __init__(self, name, transparency=None, conditions=None, **kwargs):
DEPRECATED.
conditions (list): list of three items determining the masking
settings.
+ mode (str, optional): Image mode to return. For single-band input,
+ this shall be "LA" (default) or "RGBA". For
+ multi-band input, this argument is ignored
+ as the result is always RGBA.
Each condition in *conditions* consists of three items:
@@ -1245,6 +1441,10 @@ def __init__(self, name, transparency=None, conditions=None, **kwargs):
self.conditions = conditions
if self.conditions is None:
raise ValueError("Masking conditions not defined.")
+ if mode not in self._supported_modes:
+ raise ValueError(f"Invalid mode {mode!s}. Supported modes: " +
+ ", ".join(self._supported_modes))
+ self.mode = mode
super(MaskingCompositor, self).__init__(name, **kwargs)
@@ -1255,34 +1455,11 @@ def __call__(self, projectables, *args, **kwargs):
projectables = self.match_data_arrays(projectables)
data_in = projectables[0]
mask_in = projectables[1]
- mask_data = mask_in.data
alpha_attrs = data_in.attrs.copy()
- if 'bands' in data_in.dims:
- data = [data_in.sel(bands=b) for b in data_in['bands'] if b != 'A']
- else:
- data = [data_in]
-
- # Create alpha band
- alpha = da.ones((data[0].sizes['y'],
- data[0].sizes['x']),
- chunks=data[0].chunks)
-
- for condition in self.conditions:
- method = condition['method']
- value = condition.get('value', None)
- if isinstance(value, str):
- value = _get_flag_value(mask_in, value)
- transparency = condition['transparency']
- mask = self._get_mask(method, value, mask_data)
-
- if transparency == 100.0:
- data = self._set_data_nans(data, mask, alpha_attrs)
- alpha_val = 1. - transparency / 100.
- alpha = da.where(mask, alpha_val, alpha)
+ data = self._select_data_bands(data_in)
- alpha = xr.DataArray(data=alpha, attrs=alpha_attrs,
- dims=data[0].dims, coords=data[0].coords)
+ alpha = self._get_alpha_bands(data, mask_in, alpha_attrs)
data.append(alpha)
res = super(MaskingCompositor, self).__call__(data, **kwargs)
@@ -1316,6 +1493,44 @@ def _set_data_nans(self, data, mask, attrs):
return data
+ def _select_data_bands(self, data_in):
+ """Select data to be composited from input data.
+
+ From input data, select the bands that need to have masking applied.
+ """
+ if 'bands' in data_in.dims:
+ return [data_in.sel(bands=b) for b in data_in['bands'] if b != 'A']
+ if self.mode == "RGBA":
+ return [data_in, data_in, data_in]
+ return [data_in]
+
+ def _get_alpha_bands(self, data, mask_in, alpha_attrs):
+ """Get alpha bands.
+
+ From input data, masks, and attributes, get alpha band.
+ """
+ # Create alpha band
+ mask_data = mask_in.data
+ alpha = da.ones((data[0].sizes['y'],
+ data[0].sizes['x']),
+ chunks=data[0].chunks)
+
+ for condition in self.conditions:
+ method = condition['method']
+ value = condition.get('value', None)
+ if isinstance(value, str):
+ value = _get_flag_value(mask_in, value)
+ transparency = condition['transparency']
+ mask = self._get_mask(method, value, mask_data)
+
+ if transparency == 100.0:
+ data = self._set_data_nans(data, mask, alpha_attrs)
+ alpha_val = 1. - transparency / 100.
+ alpha = da.where(mask, alpha_val, alpha)
+
+ return xr.DataArray(data=alpha, attrs=alpha_attrs,
+ dims=data[0].dims, coords=data[0].coords)
+
def _get_flag_value(mask, val):
"""Get a numerical value of the named flag.
@@ -1334,7 +1549,7 @@ def _get_flag_value(mask, val):
return flag_values[index]
-class LongitudeMaskingCompositor(GenericCompositor):
+class LongitudeMaskingCompositor(SingleBandCompositor):
"""Masks areas outside defined longitudes."""
def __init__(self, name, lon_min=None, lon_max=None, **kwargs):
@@ -1353,7 +1568,7 @@ def __init__(self, name, lon_min=None, lon_max=None, **kwargs):
self.lon_min = -180.
if not self.lon_max:
self.lon_max = 180.
- super(LongitudeMaskingCompositor, self).__init__(name, **kwargs)
+ super().__init__(name, **kwargs)
def __call__(self, projectables, nonprojectables=None, **info):
"""Generate the composite."""
@@ -1366,4 +1581,4 @@ def __call__(self, projectables, nonprojectables=None, **info):
lon_min_max = np.logical_or(lons >= self.lon_min, lons <= self.lon_max)
masked_projectable = projectable.where(lon_min_max)
- return super(LongitudeMaskingCompositor, self).__call__([masked_projectable], **info)
+ return super().__call__([masked_projectable], **info)
diff --git a/satpy/composites/abi.py b/satpy/composites/abi.py
index 762c08567e..3ae5237906 100644
--- a/satpy/composites/abi.py
+++ b/satpy/composites/abi.py
@@ -18,6 +18,7 @@
"""Composite classes for the ABI instrument."""
import logging
+
from satpy.composites import GenericCompositor
LOG = logging.getLogger(__name__)
diff --git a/satpy/composites/agri.py b/satpy/composites/agri.py
new file mode 100644
index 0000000000..839706457e
--- /dev/null
+++ b/satpy/composites/agri.py
@@ -0,0 +1,61 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2015-2022 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Composite classes for the AGRI instrument."""
+
+import logging
+
+from satpy.composites import GenericCompositor
+
+LOG = logging.getLogger(__name__)
+
+
+class SimulatedRed(GenericCompositor):
+ """A single-band dataset resembling a Red (0.64 µm) band.
+
+ This compositor creates a single band product by combining two
+ other bands by preset amounts. The general formula with
+ dependencies (d) and fractions (f) is::
+
+ result = (f1 * d1 - f2 * d2) / f3
+
+ See the `fractions` keyword argument for more information.
+ The default setup is to use:
+
+ - f1 = 1.0
+ - f2 = 0.13
+ - f3 = 0.87
+
+ """
+
+ def __init__(self, name, fractions=(1.0, 0.13, 0.87), **kwargs):
+ """Initialize fractions for input channels.
+
+ Args:
+ name (str): Name of this composite
+ fractions (iterable): Fractions of each input band to include in the result.
+
+ """
+ self.fractions = fractions
+ super(SimulatedRed, self).__init__(name, **kwargs)
+
+ def __call__(self, projectables, optional_datasets=None, **attrs):
+ """Generate the single band composite."""
+ c1, c2 = self.match_data_arrays(projectables)
+ res = (c1 * self.fractions[0] - c2 * self.fractions[1]) / self.fractions[2]
+ res.attrs = c1.attrs.copy()
+ return super(SimulatedRed, self).__call__((res,), **attrs)
diff --git a/satpy/composites/ahi.py b/satpy/composites/ahi.py
index c170eb0543..bb96a94581 100644
--- a/satpy/composites/ahi.py
+++ b/satpy/composites/ahi.py
@@ -1,6 +1,4 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-# Copyright (c) 2015-2017 Satpy developers
+# Copyright (c) 2022- Satpy developers
#
# This file is part of satpy.
#
@@ -15,29 +13,8 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
-"""Composite classes for the AHI instrument."""
+"""Composite classes for AHI."""
-import logging
-
-from satpy.composites import GenericCompositor
-
-LOG = logging.getLogger(__name__)
-
-
-class GreenCorrector(GenericCompositor):
- """Corrector of the AHI green band to compensate for the deficit of chlorophyll signal."""
-
- def __init__(self, *args, **kwargs):
- """Set default keyword argument values."""
- # XXX: Should this be 0.93 and 0.07
- self.fractions = kwargs.pop('fractions', [0.85, 0.15])
- super(GreenCorrector, self).__init__(*args, **kwargs)
-
- def __call__(self, projectables, optional_datasets=None, **attrs):
- """Boost vegetation effect thanks to NIR (0.8µm) band."""
- green, nir = self.match_data_arrays(projectables)
- LOG.info('Boosting vegetation on green band')
-
- new_green = green * self.fractions[0] + nir * self.fractions[1]
- new_green.attrs = green.attrs.copy()
- return super(GreenCorrector, self).__call__((new_green,), **attrs)
+# The green corrector used to be defined here, but was moved to spectral.py
+# in Satpy 0.38 because it also applies to FCI.
+from .spectral import GreenCorrector # noqa: F401
diff --git a/satpy/composites/cloud_products.py b/satpy/composites/cloud_products.py
index e80c3ad4bc..abc5dcf393 100644
--- a/satpy/composites/cloud_products.py
+++ b/satpy/composites/cloud_products.py
@@ -19,7 +19,7 @@
import numpy as np
-from satpy.composites import GenericCompositor, ColormapCompositor
+from satpy.composites import ColormapCompositor, GenericCompositor
class CloudTopHeightCompositor(ColormapCompositor):
diff --git a/satpy/composites/config_loader.py b/satpy/composites/config_loader.py
index c37df7a2b4..4550ca58b9 100644
--- a/satpy/composites/config_loader.py
+++ b/satpy/composites/config_loader.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
-# Copyright (c) 2020 Satpy developers
+# Copyright (c) 2020-2021 Satpy developers
#
# This file is part of satpy.
#
@@ -16,18 +16,22 @@
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
"""Classes for loading compositor and modifier configuration files."""
-import os
+from __future__ import annotations
+
import logging
+import os
import warnings
+from functools import lru_cache, update_wrapper
+from typing import Callable, Iterable
import yaml
from yaml import UnsafeLoader
-from satpy import DatasetDict, DataQuery, DataID
-from satpy._config import (get_entry_points_config_dirs, config_search_paths,
- glob_config)
-from satpy.utils import recursive_dict_update
+import satpy
+from satpy import DataID, DataQuery
+from satpy._config import config_search_paths, get_entry_points_config_dirs, glob_config
from satpy.dataset.dataid import minimal_default_keys_config
+from satpy.utils import recursive_dict_update
logger = logging.getLogger(__name__)
@@ -166,138 +170,149 @@ def parse_config(self, configured_modifiers, composite_configs):
"'{}'".format(composite_configs))
-class CompositorLoader:
- """Read compositors and modifiers using the configuration files on disk."""
-
- def __init__(self):
- """Initialize the compositor loader."""
- self.modifiers = {}
- self.compositors = {}
- # sensor -> { dict of DataID key information }
- self._sensor_dataid_keys = {}
-
- @classmethod
- def all_composite_sensors(cls):
- """Get all sensor names from available composite configs."""
- paths = get_entry_points_config_dirs('satpy.composites')
- composite_configs = glob_config(
- os.path.join("composites", "*.yaml"),
- search_dirs=paths)
- yaml_names = set([os.path.splitext(os.path.basename(fn))[0]
- for fn in composite_configs])
- non_sensor_yamls = ('visir',)
- sensor_names = [x for x in yaml_names if x not in non_sensor_yamls]
- return sensor_names
-
- def load_sensor_composites(self, sensor_name):
- """Load all compositor configs for the provided sensor."""
- config_filename = sensor_name + ".yaml"
- logger.debug("Looking for composites config file %s", config_filename)
- paths = get_entry_points_config_dirs('satpy.composites')
- composite_configs = config_search_paths(
- os.path.join("composites", config_filename),
- search_dirs=paths, check_exists=True)
- if not composite_configs:
- logger.debug("No composite config found called %s",
- config_filename)
- return
- self._load_config(composite_configs)
-
- def get_compositor(self, key, sensor_names):
- """Get the compositor for *sensor_names*."""
- for sensor_name in sensor_names:
- try:
- return self.compositors[sensor_name][key]
- except KeyError:
- continue
- raise KeyError("Could not find compositor '{}'".format(key))
-
- def get_modifier(self, key, sensor_names):
- """Get the modifier for *sensor_names*."""
- for sensor_name in sensor_names:
- try:
- return self.modifiers[sensor_name][key]
- except KeyError:
- continue
- raise KeyError("Could not find modifier '{}'".format(key))
-
- def load_compositors(self, sensor_names):
- """Load all compositor configs for the provided sensors.
-
- Args:
- sensor_names (list of strings): Sensor names that have matching
- ``sensor_name.yaml`` config files.
-
- Returns:
- (comps, mods): Where `comps` is a dictionary:
-
- sensor_name -> composite ID -> compositor object
-
- And `mods` is a dictionary:
-
- sensor_name -> modifier name -> (modifier class,
- modifiers options)
-
- Note that these dictionaries are copies of those cached in
- this object.
-
- """
- comps = {}
- mods = {}
- for sensor_name in sensor_names:
- if sensor_name not in self.compositors:
- self.load_sensor_composites(sensor_name)
- if sensor_name in self.compositors:
- comps[sensor_name] = DatasetDict(
- self.compositors[sensor_name].copy())
- mods[sensor_name] = self.modifiers[sensor_name].copy()
- return comps, mods
-
- def _get_sensor_id_keys(self, conf, sensor_id, sensor_deps):
- try:
- id_keys = conf['composite_identification_keys']
- except KeyError:
- try:
- id_keys = self._sensor_dataid_keys[sensor_deps[-1]]
- except IndexError:
- id_keys = minimal_default_keys_config
- self._sensor_dataid_keys[sensor_id] = id_keys
- return id_keys
-
- def _load_config(self, composite_configs):
- if not isinstance(composite_configs, (list, tuple)):
- composite_configs = [composite_configs]
-
- conf = {}
- for composite_config in composite_configs:
- with open(composite_config, 'r', encoding='utf-8') as conf_file:
- conf = recursive_dict_update(conf, yaml.load(conf_file, Loader=UnsafeLoader))
- try:
- sensor_name = conf['sensor_name']
- except KeyError:
- logger.debug('No "sensor_name" tag found in %s, skipping.',
- composite_configs)
- return
+def _load_config(composite_configs):
+ if not isinstance(composite_configs, (list, tuple)):
+ composite_configs = [composite_configs]
+
+ conf = {}
+ for composite_config in composite_configs:
+ with open(composite_config, 'r', encoding='utf-8') as conf_file:
+ conf = recursive_dict_update(conf, yaml.load(conf_file, Loader=UnsafeLoader))
+ try:
+ sensor_name = conf['sensor_name']
+ except KeyError:
+ logger.debug('No "sensor_name" tag found in %s, skipping.',
+ composite_configs)
+ return {}, {}, {}
+
+ sensor_compositors = {}
+ sensor_modifiers = {}
+
+ dep_id_keys = None
+ sensor_deps = sensor_name.split('/')[:-1]
+ if sensor_deps:
+ # get dependent
+ for sensor_dep in sensor_deps:
+ dep_comps, dep_mods, dep_id_keys = load_compositor_configs_for_sensor(sensor_dep)
+ # the last parent should include all of its parents so only add the last one
+ sensor_compositors.update(dep_comps)
+ sensor_modifiers.update(dep_mods)
+
+ id_keys = _get_sensor_id_keys(conf, dep_id_keys)
+ mod_config_helper = _ModifierConfigHelper(sensor_modifiers, id_keys)
+ configured_modifiers = conf.get('modifiers', {})
+ mod_config_helper.parse_config(configured_modifiers, composite_configs)
+
+ comp_config_helper = _CompositeConfigHelper(sensor_compositors, id_keys)
+ configured_composites = conf.get('composites', {})
+ comp_config_helper.parse_config(configured_composites, composite_configs)
+ return sensor_compositors, sensor_modifiers, id_keys
+
+
+def _get_sensor_id_keys(conf, parent_id_keys):
+ try:
+ id_keys = conf['composite_identification_keys']
+ except KeyError:
+ id_keys = parent_id_keys
+ if not id_keys:
+ id_keys = minimal_default_keys_config
+ return id_keys
+
+
+def _lru_cache_with_config_path(func: Callable):
+ """Use lru_cache but include satpy's current config_path."""
+ @lru_cache()
+ def _call_without_config_path_wrapper(sensor_name, _):
+ return func(sensor_name)
- sensor_id = sensor_name.split('/')[-1]
- sensor_deps = sensor_name.split('/')[:-1]
+ def _add_config_path_wrapper(sensor_name: str):
+ config_path = satpy.config.get("config_path")
+ # make sure config_path is hashable, but keep original order since it matters
+ config_path = tuple(config_path)
+ return _call_without_config_path_wrapper(sensor_name, config_path)
- compositors = self.compositors.setdefault(sensor_id, DatasetDict())
- modifiers = self.modifiers.setdefault(sensor_id, {})
+ wrapper = update_wrapper(_add_config_path_wrapper, func)
+ wrapper = _update_cached_wrapper(wrapper, _call_without_config_path_wrapper)
+ return wrapper
- for sensor_dep in reversed(sensor_deps):
- if sensor_dep not in self.compositors or sensor_dep not in self.modifiers:
- self.load_sensor_composites(sensor_dep)
- if sensor_deps:
- compositors.update(self.compositors[sensor_deps[-1]])
- modifiers.update(self.modifiers[sensor_deps[-1]])
+def _update_cached_wrapper(wrapper, cached_func):
+ for meth_name in ("cache_clear", "cache_parameters", "cache_info"):
+ if hasattr(cached_func, meth_name):
+ setattr(wrapper, meth_name, getattr(cached_func, meth_name))
+ return wrapper
- id_keys = self._get_sensor_id_keys(conf, sensor_id, sensor_deps)
- mod_config_helper = _ModifierConfigHelper(modifiers, id_keys)
- configured_modifiers = conf.get('modifiers', {})
- mod_config_helper.parse_config(configured_modifiers, composite_configs)
- comp_config_helper = _CompositeConfigHelper(compositors, id_keys)
- configured_composites = conf.get('composites', {})
- comp_config_helper.parse_config(configured_composites, composite_configs)
+@_lru_cache_with_config_path
+def load_compositor_configs_for_sensor(sensor_name: str) -> tuple[dict[str, dict], dict[str, dict], dict]:
+ """Load compositor, modifier, and DataID key information from configuration files for the specified sensor.
+
+ Args:
+ sensor_name: Sensor name that has matching ``sensor_name.yaml``
+ config files.
+
+ Returns:
+ (comps, mods, data_id_keys): Where `comps` is a dictionary:
+
+ composite ID -> compositor object
+
+ And `mods` is a dictionary:
+
+ modifier name -> (modifier class, modifiers options)
+
+ Add `data_id_keys` is a dictionary:
+
+ DataID key -> key properties
+
+ """
+ config_filename = sensor_name + ".yaml"
+ logger.debug("Looking for composites config file %s", config_filename)
+ paths = get_entry_points_config_dirs('satpy.composites')
+ composite_configs = config_search_paths(
+ os.path.join("composites", config_filename),
+ search_dirs=paths, check_exists=True)
+ if not composite_configs:
+ logger.debug("No composite config found called %s",
+ config_filename)
+ return {}, {}, minimal_default_keys_config
+ return _load_config(composite_configs)
+
+
+def load_compositor_configs_for_sensors(sensor_names: Iterable[str]) -> tuple[dict[str, dict], dict[str, dict]]:
+ """Load compositor and modifier configuration files for the specified sensors.
+
+ Args:
+ sensor_names (list of strings): Sensor names that have matching
+ ``sensor_name.yaml`` config files.
+
+ Returns:
+ (comps, mods): Where `comps` is a dictionary:
+
+ sensor_name -> composite ID -> compositor object
+
+ And `mods` is a dictionary:
+
+ sensor_name -> modifier name -> (modifier class,
+ modifiers options)
+
+ """
+ comps = {}
+ mods = {}
+ for sensor_name in sensor_names:
+ sensor_comps, sensor_mods = load_compositor_configs_for_sensor(sensor_name)[:2]
+ comps[sensor_name] = sensor_comps
+ mods[sensor_name] = sensor_mods
+ return comps, mods
+
+
+def all_composite_sensors():
+ """Get all sensor names from available composite configs."""
+ paths = get_entry_points_config_dirs('satpy.composites')
+ composite_configs = glob_config(
+ os.path.join("composites", "*.yaml"),
+ search_dirs=paths)
+ yaml_names = set([os.path.splitext(os.path.basename(fn))[0]
+ for fn in composite_configs])
+ non_sensor_yamls = ('visir',)
+ sensor_names = [x for x in yaml_names if x not in non_sensor_yamls]
+ return sensor_names
diff --git a/satpy/composites/crefl_utils.py b/satpy/composites/crefl_utils.py
deleted file mode 100644
index 6978a0ad42..0000000000
--- a/satpy/composites/crefl_utils.py
+++ /dev/null
@@ -1,464 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-# Copyright (c) 2010-2018 Satpy developers
-#
-# This file is part of satpy.
-#
-# satpy is free software: you can redistribute it and/or modify it under the
-# terms of the GNU General Public License as published by the Free Software
-# Foundation, either version 3 of the License, or (at your option) any later
-# version.
-#
-# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
-# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
-# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along with
-# satpy. If not, see .
-"""Shared utilities for correcting reflectance data using the 'crefl' algorithm.
-
-Original code written by Ralph Kuehn with modifications by David Hoese and Martin Raspaud.
-Ralph's code was originally based on the C crefl code distributed for VIIRS and MODIS.
-"""
-import logging
-
-import numpy as np
-import xarray as xr
-import dask.array as da
-
-LOG = logging.getLogger(__name__)
-
-bUseV171 = False
-
-if bUseV171:
- UO3 = 0.319
- UH2O = 2.93
-else:
- UO3 = 0.285
- UH2O = 2.93
-
-MAXSOLZ = 86.5
-MAXAIRMASS = 18
-SCALEHEIGHT = 8000
-FILL_INT16 = 32767
-TAUSTEP4SPHALB_ABI = .0003
-TAUSTEP4SPHALB = .0001
-
-MAXNUMSPHALBVALUES = 4000 # with no aerosol taur <= 0.4 in all bands everywhere
-REFLMIN = -0.01
-REFLMAX = 1.6
-
-
-def _csalbr(tau):
- # Previously 3 functions csalbr fintexp1, fintexp3
- a = [-.57721566, 0.99999193, -0.24991055, 0.05519968, -0.00976004,
- 0.00107857]
- # xx = a[0] + a[1] * tau + a[2] * tau**2 + a[3] * tau**3 + a[4] * tau**4 + a[5] * tau**5
- # xx = np.polyval(a[::-1], tau)
-
- # xx = a[0]
- # xftau = 1.0
- # for i in xrange(5):
- # xftau = xftau*tau
- # xx = xx + a[i] * xftau
- fintexp1 = np.polyval(a[::-1], tau) - np.log(tau)
- fintexp3 = (np.exp(-tau) * (1.0 - tau) + tau**2 * fintexp1) / 2.0
-
- return (3.0 * tau - fintexp3 *
- (4.0 + 2.0 * tau) + 2.0 * np.exp(-tau)) / (4.0 + 3.0 * tau)
-
-
-# From crefl.1.7.1
-if bUseV171:
- aH2O = np.array([-5.60723, -5.25251, 0, 0, -6.29824, -7.70944, -3.91877, 0,
- 0, 0, 0, 0, 0, 0, 0, 0])
- bH2O = np.array([0.820175, 0.725159, 0, 0, 0.865732, 0.966947, 0.745342, 0,
- 0, 0, 0, 0, 0, 0, 0, 0])
- # const float aO3[Nbands]={ 0.0711, 0.00313, 0.0104, 0.0930, 0,
- # 0, 0, 0.00244, 0.00383, 0.0225, 0.0663, 0.0836, 0.0485, 0.0395, 0.0119,
- # 0.00263};*/
- aO3 = np.array(
- [0.0715289, 0, 0.00743232, 0.089691, 0, 0, 0, 0.001, 0.00383, 0.0225,
- 0.0663, 0.0836, 0.0485, 0.0395, 0.0119, 0.00263])
- # const float taur0[Nbands] = { 0.0507, 0.0164, 0.1915, 0.0948,
- # 0.0036, 0.0012, 0.0004, 0.3109, 0.2375, 0.1596, 0.1131, 0.0994,
- # 0.0446, 0.0416, 0.0286, 0.0155};*/
- taur0 = np.array(
- [0.05100, 0.01631, 0.19325, 0.09536, 0.00366, 0.00123, 0.00043, 0.3139,
- 0.2375, 0.1596, 0.1131, 0.0994, 0.0446, 0.0416, 0.0286, 0.0155])
-else:
- # From polar2grid cviirs.c
- # This number is what Ralph says "looks good"
- rg_fudge = .55
- aH2O = np.array(
- [0.000406601, 0.0015933, 0, 1.78644e-05, 0.00296457, 0.000617252,
- 0.000996563, 0.00222253, 0.00094005, 0.000563288, 0, 0, 0, 0, 0, 0,
- 2.4111e-003, 7.8454e-003*rg_fudge, 7.9258e-3, 9.3392e-003, 2.53e-2])
- bH2O = np.array([0.812659, 0.832931, 1., 0.8677850, 0.806816, 0.944958,
- 0.78812, 0.791204, 0.900564, 0.942907, 0, 0, 0, 0, 0, 0,
- # These are actually aO2 values for abi calculations
- 1.2360e-003, 3.7296e-003, 177.7161e-006, 10.4899e-003, 1.63e-2])
- # /*const float aO3[Nbands]={ 0.0711, 0.00313, 0.0104, 0.0930, 0, 0, 0, 0.00244,
- # 0.00383, 0.0225, 0.0663, 0.0836, 0.0485, 0.0395, 0.0119, 0.00263};*/
- aO3 = np.array([0.0433461, 0.0, 0.0178299, 0.0853012, 0, 0, 0, 0.0813531,
- 0, 0, 0.0663, 0.0836, 0.0485, 0.0395, 0.0119, 0.00263,
- 4.2869e-003, 25.6509e-003*rg_fudge, 802.4319e-006, 0.0000e+000, 2e-5])
- # /*const float taur0[Nbands] = { 0.0507, 0.0164, 0.1915, 0.0948, 0.0036, 0.0012, 0.0004,
- # 0.3109, 0.2375, 0.1596, 0.1131, 0.0994, 0.0446, 0.0416, 0.0286, 0.0155};*/
- taur0 = np.array([0.04350, 0.01582, 0.16176, 0.09740, 0.00369, 0.00132,
- 0.00033, 0.05373, 0.01561, 0.00129, 0.1131, 0.0994,
- 0.0446, 0.0416, 0.0286, 0.0155,
- 184.7200e-003, 52.3490e-003, 15.8450e-003, 1.3074e-003, 311.2900e-006])
- # add last 5 from bH2O to aO2
- aO2 = 0
-
-# Map of pixel resolutions -> wavelength -> coefficient index
-# Map of pixel resolutions -> band name -> coefficient index
-# Index is used in aH2O, bH2O, aO3, and taur0 arrays above
-MODIS_COEFF_INDEX_MAP = {
- 1000: {
- (0.620, 0.6450, 0.670): 0,
- "1": 0,
- (0.841, 0.8585, 0.876): 1,
- "2": 1,
- (0.459, 0.4690, 0.479): 2,
- "3": 2,
- (0.545, 0.5550, 0.565): 3,
- "4": 3,
- (1.230, 1.2400, 1.250): 4,
- "5": 4,
- (1.628, 1.6400, 1.652): 5,
- "6": 5,
- (2.105, 2.1300, 2.155): 6,
- "7": 6,
- }
-}
-MODIS_COEFF_INDEX_MAP[500] = MODIS_COEFF_INDEX_MAP[1000]
-MODIS_COEFF_INDEX_MAP[250] = MODIS_COEFF_INDEX_MAP[1000]
-
-# resolution -> wavelength -> coefficient index
-# resolution -> band name -> coefficient index
-VIIRS_COEFF_INDEX_MAP = {
- 1000: {
- (0.662, 0.6720, 0.682): 0, # M05
- "M05": 0,
- (0.846, 0.8650, 0.885): 1, # M07
- "M07": 1,
- (0.478, 0.4880, 0.498): 2, # M03
- "M03": 2,
- (0.545, 0.5550, 0.565): 3, # M04
- "M04": 3,
- (1.230, 1.2400, 1.250): 4, # M08
- "M08": 4,
- (1.580, 1.6100, 1.640): 5, # M10
- "M10": 5,
- (2.225, 2.2500, 2.275): 6, # M11
- "M11": 6,
- },
- 500: {
- (0.600, 0.6400, 0.680): 7, # I01
- "I01": 7,
- (0.845, 0.8650, 0.884): 8, # I02
- "I02": 8,
- (1.580, 1.6100, 1.640): 9, # I03
- "I03": 9,
- },
-}
-
-
-# resolution -> wavelength -> coefficient index
-# resolution -> band name -> coefficient index
-ABI_COEFF_INDEX_MAP = {
- 2000: {
- (0.450, 0.470, 0.490): 16, # C01
- "C01": 16,
- (0.590, 0.640, 0.690): 17, # C02
- "C02": 17,
- (0.8455, 0.865, 0.8845): 18, # C03
- "C03": 18,
- # (1.3705, 1.378, 1.3855): None, # C04
- # "C04": None,
- (1.580, 1.610, 1.640): 19, # C05
- "C05": 19,
- (2.225, 2.250, 2.275): 20, # C06
- "C06": 20
- },
-}
-
-
-COEFF_INDEX_MAP = {
- "viirs": VIIRS_COEFF_INDEX_MAP,
- "modis": MODIS_COEFF_INDEX_MAP,
- "abi": ABI_COEFF_INDEX_MAP,
-}
-
-
-def find_coefficient_index(sensor, wavelength_range, resolution=0):
- """Return index in to coefficient arrays for this band's wavelength.
-
- This function search through the `COEFF_INDEX_MAP` dictionary and
- finds the first key where the nominal wavelength of `wavelength_range`
- falls between the minimum wavelength and maximum wavelength of the key.
- `wavelength_range` can also be the standard name of the band. For
- example, "M05" for VIIRS or "1" for MODIS.
-
- :param sensor: sensor of band to be corrected
- :param wavelength_range: 3-element tuple of (min wavelength, nominal wavelength, max wavelength)
- :param resolution: resolution of the band to be corrected
- :return: index in to coefficient arrays like `aH2O`, `aO3`, etc.
- None is returned if no matching wavelength is found
- """
- index_map = COEFF_INDEX_MAP[sensor.lower()]
- # Find the best resolution of coefficients
- for res in sorted(index_map.keys()):
- if resolution <= res:
- index_map = index_map[res]
- break
- else:
- raise ValueError("Unrecognized data resolution: {}", resolution)
- # Find the best wavelength of coefficients
- if isinstance(wavelength_range, str):
- # wavelength range is actually a band name
- return index_map[wavelength_range]
- else:
- for k, v in index_map.items():
- if isinstance(k, str):
- # we are analyzing wavelengths and ignoring dataset names
- continue
- if k[0] <= wavelength_range[1] <= k[2]:
- return v
-
-
-def get_coefficients(sensor, wavelength_range, resolution=0):
- """Get coefficients used in CREFL correction.
-
- Args:
- sensor: sensor of the band to be corrected
- wavelength_range: 3-element tuple of (min wavelength, nominal wavelength, max wavelength)
- resolution: resolution of the band to be corrected
-
- Returns:
- aH2O, bH2O, aO3, taur0 coefficient values
-
- """
- idx = find_coefficient_index(sensor,
- wavelength_range,
- resolution=resolution)
- return aH2O[idx], bH2O[idx], aO3[idx], taur0[idx]
-
-
-def _chand(phi, muv, mus, taur):
- # FROM FUNCTION CHAND
- # phi: azimuthal difference between sun and observation in degree
- # (phi=0 in backscattering direction)
- # mus: cosine of the sun zenith angle
- # muv: cosine of the observation zenith angle
- # taur: molecular optical depth
- # rhoray: molecular path reflectance
- # constant xdep: depolarization factor (0.0279)
- # xfd = (1-xdep/(2-xdep)) / (1 + 2*xdep/(2-xdep)) = 2 * (1 - xdep) / (2 + xdep) = 0.958725775
- # */
- xfd = 0.958725775
- xbeta2 = 0.5
- # float pl[5];
- # double fs01, fs02, fs0, fs1, fs2;
- as0 = [0.33243832, 0.16285370, -0.30924818, -0.10324388, 0.11493334,
- -6.777104e-02, 1.577425e-03, -1.240906e-02, 3.241678e-02,
- -3.503695e-02]
- as1 = [0.19666292, -5.439061e-02]
- as2 = [0.14545937, -2.910845e-02]
- # float phios, xcos1, xcos2, xcos3;
- # float xph1, xph2, xph3, xitm1, xitm2;
- # float xlntaur, xitot1, xitot2, xitot3;
- # int i, ib;
-
- xph1 = 1.0 + (3.0 * mus * mus - 1.0) * (3.0 * muv * muv - 1.0) * xfd / 8.0
- xph2 = -xfd * xbeta2 * 1.5 * mus * muv * da.sqrt(
- 1.0 - mus * mus) * da.sqrt(1.0 - muv * muv)
- xph3 = xfd * xbeta2 * 0.375 * (1.0 - mus * mus) * (1.0 - muv * muv)
-
- # pl[0] = 1.0
- # pl[1] = mus + muv
- # pl[2] = mus * muv
- # pl[3] = mus * mus + muv * muv
- # pl[4] = mus * mus * muv * muv
-
- fs01 = as0[0] + (mus + muv) * as0[1] + (mus * muv) * as0[2] + (
- mus * mus + muv * muv) * as0[3] + (mus * mus * muv * muv) * as0[4]
- fs02 = as0[5] + (mus + muv) * as0[6] + (mus * muv) * as0[7] + (
- mus * mus + muv * muv) * as0[8] + (mus * mus * muv * muv) * as0[9]
- # for (i = 0; i < 5; i++) {
- # fs01 += (double) (pl[i] * as0[i]);
- # fs02 += (double) (pl[i] * as0[5 + i]);
- # }
-
- # for refl, (ah2o, bh2o, ao3, tau) in zip(reflectance_bands, coefficients):
-
- # ib = find_coefficient_index(center_wl)
- # if ib is None:
- # raise ValueError("Can't handle band with wavelength '{}'".format(center_wl))
-
- xlntaur = da.log(taur)
-
- fs0 = fs01 + fs02 * xlntaur
- fs1 = as1[0] + xlntaur * as1[1]
- fs2 = as2[0] + xlntaur * as2[1]
- del xlntaur, fs01, fs02
-
- trdown = da.exp(-taur / mus)
- trup = da.exp(-taur / muv)
-
- xitm1 = (1.0 - trdown * trup) / 4.0 / (mus + muv)
- xitm2 = (1.0 - trdown) * (1.0 - trup)
- xitot1 = xph1 * (xitm1 + xitm2 * fs0)
- xitot2 = xph2 * (xitm1 + xitm2 * fs1)
- xitot3 = xph3 * (xitm1 + xitm2 * fs2)
- del xph1, xph2, xph3, xitm1, xitm2, fs0, fs1, fs2
-
- phios = da.deg2rad(phi + 180.0)
- xcos1 = 1.0
- xcos2 = da.cos(phios)
- xcos3 = da.cos(2.0 * phios)
- del phios
-
- rhoray = xitot1 * xcos1 + xitot2 * xcos2 * 2.0 + xitot3 * xcos3 * 2.0
- return rhoray, trdown, trup
-
-
-def _sphalb_index(index_arr, sphalb0):
- # FIXME: if/when dask can support lazy index arrays then remove this
- return sphalb0[index_arr]
-
-
-def _atm_variables_finder(mus, muv, phi, height, tau, tO3, tH2O, taustep4sphalb, tO2=1.0):
- tau_step = da.linspace(taustep4sphalb, MAXNUMSPHALBVALUES * taustep4sphalb, MAXNUMSPHALBVALUES,
- chunks=int(MAXNUMSPHALBVALUES / 2))
- sphalb0 = _csalbr(tau_step)
- taur = tau * da.exp(-height / SCALEHEIGHT)
- rhoray, trdown, trup = _chand(phi, muv, mus, taur)
- if isinstance(height, xr.DataArray):
- sphalb = da.map_blocks(_sphalb_index, (taur / taustep4sphalb + 0.5).astype(np.int32).data, sphalb0.compute(),
- dtype=sphalb0.dtype)
- else:
- sphalb = sphalb0[(taur / taustep4sphalb + 0.5).astype(np.int32)]
- Ttotrayu = ((2 / 3. + muv) + (2 / 3. - muv) * trup) / (4 / 3. + taur)
- Ttotrayd = ((2 / 3. + mus) + (2 / 3. - mus) * trdown) / (4 / 3. + taur)
- TtotraytH2O = Ttotrayu * Ttotrayd * tH2O
- tOG = tO3 * tO2
- return sphalb, rhoray, TtotraytH2O, tOG
-
-
-def get_atm_variables(mus, muv, phi, height, ah2o, bh2o, ao3, tau):
- """Get atmospheric variables for non-ABI instruments."""
- air_mass = 1.0 / mus + 1 / muv
- air_mass = air_mass.where(air_mass <= MAXAIRMASS, -1.0)
- tO3 = 1.0
- tH2O = 1.0
- if ao3 != 0:
- tO3 = da.exp(-air_mass * UO3 * ao3)
- if bh2o != 0:
- if bUseV171:
- tH2O = da.exp(-da.exp(ah2o + bh2o * da.log(air_mass * UH2O)))
- else:
- tH2O = da.exp(-(ah2o * ((air_mass * UH2O) ** bh2o)))
- # Returns sphalb, rhoray, TtotraytH2O, tOG
- return _atm_variables_finder(mus, muv, phi, height, tau, tO3, tH2O, TAUSTEP4SPHALB)
-
-
-def get_atm_variables_abi(mus, muv, phi, height, G_O3, G_H2O, G_O2, ah2o, ao2, ao3, tau):
- """Get atmospheric variables for ABI."""
- tO3 = 1.0
- tH2O = 1.0
- if ao3 != 0:
- tO3 = da.exp(-G_O3 * ao3)
- if ah2o != 0:
- tH2O = da.exp(-G_H2O * ah2o)
- tO2 = da.exp(-G_O2 * ao2)
- # Returns sphalb, rhoray, TtotraytH2O, tOG.
- return _atm_variables_finder(mus, muv, phi, height, tau, tO3, tH2O, TAUSTEP4SPHALB_ABI, tO2=tO2)
-
-
-def _G_calc(zenith, a_coeff):
- return (da.cos(da.deg2rad(zenith))+(a_coeff[0]*(zenith**a_coeff[1])*(a_coeff[2]-zenith)**a_coeff[3]))**-1
-
-
-def _avg_elevation_index(avg_elevation, row, col):
- return avg_elevation[row, col]
-
-
-def run_crefl(refl, coeffs,
- lon,
- lat,
- sensor_azimuth,
- sensor_zenith,
- solar_azimuth,
- solar_zenith,
- avg_elevation=None,
- percent=False,
- use_abi=False):
- """Run main crefl algorithm.
-
- All input parameters are per-pixel values meaning they are the same size
- and shape as the input reflectance data, unless otherwise stated.
-
- :param reflectance_bands: tuple of reflectance band arrays
- :param coefficients: tuple of coefficients for each band (see `get_coefficients`)
- :param lon: input swath longitude array
- :param lat: input swath latitude array
- :param sensor_azimuth: input swath sensor azimuth angle array
- :param sensor_zenith: input swath sensor zenith angle array
- :param solar_azimuth: input swath solar azimuth angle array
- :param solar_zenith: input swath solar zenith angle array
- :param avg_elevation: average elevation (usually pre-calculated and stored in CMGDEM.hdf)
- :param percent: True if input reflectances are on a 0-100 scale instead of 0-1 scale (default: False)
-
- """
- # FUTURE: Find a way to compute the average elevation before hand
- # Get digital elevation map data for our granule, set ocean fill value to 0
- if avg_elevation is None:
- LOG.debug("No average elevation information provided in CREFL")
- # height = np.zeros(lon.shape, dtype=np.float64)
- height = 0.
- else:
- LOG.debug("Using average elevation information provided to CREFL")
- lat[(lat <= -90) | (lat >= 90)] = np.nan
- lon[(lon <= -180) | (lon >= 180)] = np.nan
- row = ((90.0 - lat) * avg_elevation.shape[0] / 180.0).astype(np.int32)
- col = ((lon + 180.0) * avg_elevation.shape[1] / 360.0).astype(np.int32)
- space_mask = da.isnull(lon) | da.isnull(lat)
- row[space_mask] = 0
- col[space_mask] = 0
-
- height = da.map_blocks(_avg_elevation_index, avg_elevation, row, col, dtype=avg_elevation.dtype)
- height = xr.DataArray(height, dims=['y', 'x'])
- # negative heights aren't allowed, clip to 0
- height = height.where((height >= 0.) & ~space_mask, 0.0)
- del lat, lon, row, col
- mus = da.cos(da.deg2rad(solar_zenith))
- mus = mus.where(mus >= 0)
- muv = da.cos(da.deg2rad(sensor_zenith))
- phi = solar_azimuth - sensor_azimuth
-
- if use_abi:
- LOG.debug("Using ABI CREFL algorithm")
- a_O3 = [268.45, 0.5, 115.42, -3.2922]
- a_H2O = [0.0311, 0.1, 92.471, -1.3814]
- a_O2 = [0.4567, 0.007, 96.4884, -1.6970]
- G_O3 = _G_calc(solar_zenith, a_O3) + _G_calc(sensor_zenith, a_O3)
- G_H2O = _G_calc(solar_zenith, a_H2O) + _G_calc(sensor_zenith, a_H2O)
- G_O2 = _G_calc(solar_zenith, a_O2) + _G_calc(sensor_zenith, a_O2)
- # Note: bh2o values are actually ao2 values for abi
- sphalb, rhoray, TtotraytH2O, tOG = get_atm_variables_abi(mus, muv, phi, height, G_O3, G_H2O, G_O2, *coeffs)
- else:
- LOG.debug("Using original VIIRS CREFL algorithm")
- sphalb, rhoray, TtotraytH2O, tOG = get_atm_variables(mus, muv, phi, height, *coeffs)
-
- del solar_azimuth, solar_zenith, sensor_zenith, sensor_azimuth
- # Note: Assume that fill/invalid values are either NaN or we are dealing
- # with masked arrays
- if percent:
- corr_refl = ((refl / 100.) / tOG - rhoray) / TtotraytH2O
- else:
- corr_refl = (refl / tOG - rhoray) / TtotraytH2O
- corr_refl /= (1.0 + corr_refl * sphalb)
- return corr_refl.clip(REFLMIN, REFLMAX)
diff --git a/satpy/composites/glm.py b/satpy/composites/glm.py
index 9d729df7fa..48fe6b922c 100644
--- a/satpy/composites/glm.py
+++ b/satpy/composites/glm.py
@@ -20,6 +20,7 @@
import logging
import xarray as xr
+
from satpy.composites import GenericCompositor
from satpy.writers import get_enhanced_image
diff --git a/satpy/composites/sar.py b/satpy/composites/sar.py
index 94b7310451..0e064ae886 100644
--- a/satpy/composites/sar.py
+++ b/satpy/composites/sar.py
@@ -20,6 +20,7 @@
import logging
import numpy as np
+
from satpy.composites import GenericCompositor
from satpy.dataset import combine_metadata
diff --git a/satpy/composites/spectral.py b/satpy/composites/spectral.py
new file mode 100644
index 0000000000..d0e6dc9330
--- /dev/null
+++ b/satpy/composites/spectral.py
@@ -0,0 +1,70 @@
+# Copyright (c) 2015-2022 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Composite classes for spectral adjustments."""
+
+import logging
+
+from satpy.composites import GenericCompositor
+from satpy.dataset import combine_metadata
+
+LOG = logging.getLogger(__name__)
+
+
+class GreenCorrector(GenericCompositor):
+ """Corrector of the FCI or AHI green band.
+
+ The green band in FCI and AHI deliberately misses the chlorophyll peak
+ in order to focus on aerosol and ash rather than on vegetation. This
+ affects true colour RGBs, because vegetation looks brown rather than green.
+ To make vegetation look greener again, this corrector allows
+ to simulate the green band as a fraction of two or more other channels.
+
+ To be used, the composite takes two or more input channels and a parameter
+ ``fractions`` that should be a list of floats with the same length as the
+ number of channels.
+
+ For example, to simulate an FCI corrected green composite, one could use
+ a combination of 93% from the green band (vis_05) and 7% from the
+ near-infrared 0.8 µm band (vis_08)::
+
+ corrected_green:
+ compositor: !!python/name:satpy.composites.ahi.GreenCorrector
+ fractions: [0.93, 0.07]
+ prerequisites:
+ - name: vis_05
+ modifiers: [sunz_corrected, rayleigh_corrected]
+ - name: vis_08
+ modifiers: [sunz_corrected, rayleigh_corrected]
+ standard_name: toa_bidirectional_reflectance
+
+ Other examples can be found in the ``fci.yaml`` and ``ahi.yaml`` composite
+ files in the satpy distribution.
+ """
+
+ def __init__(self, *args, fractions=(0.85, 0.15), **kwargs):
+ """Set default keyword argument values."""
+ # XXX: Should this be 0.93 and 0.07
+ self.fractions = fractions
+ super(GreenCorrector, self).__init__(*args, **kwargs)
+
+ def __call__(self, projectables, optional_datasets=None, **attrs):
+ """Boost vegetation effect thanks to NIR (0.8µm) band."""
+ LOG.info('Boosting vegetation on green band')
+
+ projectables = self.match_data_arrays(projectables)
+ new_green = sum(fraction * value for fraction, value in zip(self.fractions, projectables))
+ new_green.attrs = combine_metadata(*projectables)
+ return super(GreenCorrector, self).__call__((new_green,), **attrs)
diff --git a/satpy/composites/viirs.py b/satpy/composites/viirs.py
index 576e27d16a..2c6587570c 100644
--- a/satpy/composites/viirs.py
+++ b/satpy/composites/viirs.py
@@ -19,9 +19,9 @@
import logging
-import numpy as np
import dask
import dask.array as da
+import numpy as np
import xarray as xr
from satpy.composites import CompositeBase, GenericCompositor
@@ -268,7 +268,7 @@ def _saturation_correction(self, dnb_data, unit_factor, min_val,
max_val)) / dnb_data.size
LOG.debug("Dynamic DNB saturation percentage: %f", saturation_pct)
while saturation_pct > 0.005:
- max_val *= 1.1 * unit_factor
+ max_val *= 1.1
saturation_pct = float(np.count_nonzero(
dnb_data > max_val)) / dnb_data.size
LOG.debug("Dynamic DNB saturation percentage: %f",
@@ -340,7 +340,7 @@ def __call__(self, datasets, **info):
else:
inner_sqrt = (output_dataset - min_val) / (max_val - min_val)
# clip negative values to 0 before the sqrt
- inner_sqrt = inner_sqrt.where(inner_sqrt > 0, 0)
+ inner_sqrt.data = np.clip(inner_sqrt.data, 0, None)
output_dataset.data = np.sqrt(inner_sqrt).data
info = dnb_data.attrs.copy()
@@ -480,8 +480,7 @@ def local_histogram_equalization(data, mask_to_equalize, valid_data_mask=None, n
Returns: The equalized data
"""
out = out if out is not None else np.zeros_like(data)
- # if we don't have a valid mask, use the mask of what we should be
- # equalizing
+ # if we don't have a valid mask, use the mask of what we should be equalizing
if valid_data_mask is None:
valid_data_mask = mask_to_equalize
@@ -495,77 +494,20 @@ def local_histogram_equalization(data, mask_to_equalize, valid_data_mask=None, n
(total_cols % tile_size) == 0) else int(total_cols / tile_size) + 1
# an array of our distribution functions for equalization
- all_cumulative_dist_functions = [[]]
+ all_cumulative_dist_functions = [[] for _ in range(row_tiles)]
# an array of our bin information for equalization
- all_bin_information = [[]]
+ all_bin_information = [[] for _ in range(row_tiles)]
- # loop through our tiles and create the histogram equalizations for each
- # one
+ # loop through our tiles and create the histogram equalizations for each one
for num_row_tile in range(row_tiles):
-
- # make sure we have enough rows available to store info on this next
- # row of tiles
- if len(all_cumulative_dist_functions) <= num_row_tile:
- all_cumulative_dist_functions.append([])
- if len(all_bin_information) <= num_row_tile:
- all_bin_information.append([])
-
- # go through each tile in this row and calculate the equalization
for num_col_tile in range(col_tiles):
-
- # calculate the range for this tile (min is inclusive, max is
- # exclusive)
- min_row = num_row_tile * tile_size
- max_row = min_row + tile_size
- min_col = num_col_tile * tile_size
- max_col = min_col + tile_size
-
- # for speed of calculation, pull out the mask of pixels that should
- # be used to calculate the histogram
- mask_valid_data_in_tile = valid_data_mask[min_row:max_row, min_col:
- max_col]
-
- # if we have any valid data in this tile, calculate a histogram equalization for this tile
- # (note: even if this tile does no fall in the mask_to_equalize, it's histogram may be used by other tiles)
- cumulative_dist_function, temp_bins = None, None
- if mask_valid_data_in_tile.any():
-
- # use all valid data in the tile, so separate sections will
- # blend cleanly
- temp_valid_data = data[min_row:max_row, min_col:max_col][
- mask_valid_data_in_tile]
- temp_valid_data = temp_valid_data[
- temp_valid_data >= 0
- ] # TEMP, testing to see if negative data is messing everything up
- # limit the contrast by only considering data within a certain
- # range of the average
- if std_mult_cutoff is not None:
- avg = np.mean(temp_valid_data)
- std = np.std(temp_valid_data)
- # limit our range to avg +/- std_mult_cutoff*std; e.g. the
- # default std_mult_cutoff is 4.0 so about 99.8% of the data
- concervative_mask = (
- temp_valid_data < (avg + std * std_mult_cutoff)) & (
- temp_valid_data > (avg - std * std_mult_cutoff))
- temp_valid_data = temp_valid_data[concervative_mask]
-
- # if we are taking the log of our data, do so now
- if do_log_scale:
- temp_valid_data = np.log(temp_valid_data + log_offset)
-
- # do the histogram equalization and get the resulting
- # distribution function and bin information
- if temp_valid_data.size > 0:
- cumulative_dist_function, temp_bins = _histogram_equalization_helper(
- temp_valid_data,
- number_of_bins,
- clip_limit=clip_limit,
- slope_limit=slope_limit)
-
- # hang on to our equalization related information for use later
- all_cumulative_dist_functions[num_row_tile].append(
- cumulative_dist_function)
- all_bin_information[num_row_tile].append(temp_bins)
+ tile_dist_func, tile_bin_info = _histogram_equalize_one_tile(
+ data, valid_data_mask, std_mult_cutoff, do_log_scale, log_offset,
+ clip_limit, slope_limit, number_of_bins, num_row_tile, num_col_tile,
+ tile_size
+ )
+ all_cumulative_dist_functions[num_row_tile].append(tile_dist_func)
+ all_bin_information[num_row_tile].append(tile_bin_info)
# get the tile weight array so we can use it to interpolate our data
tile_weights = _calculate_weights(tile_size)
@@ -574,99 +516,11 @@ def local_histogram_equalization(data, mask_to_equalize, valid_data_mask=None, n
# versions of the data
for num_row_tile in range(row_tiles):
for num_col_tile in range(col_tiles):
-
- # calculate the range for this tile (min is inclusive, max is
- # exclusive)
- min_row = num_row_tile * tile_size
- max_row = min_row + tile_size
- min_col = num_col_tile * tile_size
- max_col = min_col + tile_size
-
- # for convenience, pull some of these tile sized chunks out
- temp_all_data = data[min_row:max_row, min_col:max_col].copy()
- temp_mask_to_equalize = mask_to_equalize[min_row:max_row, min_col:
- max_col]
- temp_all_valid_data_mask = valid_data_mask[min_row:max_row,
- min_col:max_col]
-
- # if we have any data in this tile, calculate our weighted sum
- if temp_mask_to_equalize.any():
- if do_log_scale:
- temp_all_data[temp_all_valid_data_mask] = np.log(
- temp_all_data[temp_all_valid_data_mask] + log_offset)
- temp_data_to_equalize = temp_all_data[temp_mask_to_equalize]
- temp_all_valid_data = temp_all_data[temp_all_valid_data_mask]
-
- # a place to hold our weighted sum that represents the interpolated contributions
- # of the histogram equalizations from the surrounding tiles
- temp_sum = np.zeros_like(temp_data_to_equalize)
-
- # how much weight were we unable to use because those tiles
- # fell off the edge of the image?
- unused_weight = np.zeros(temp_data_to_equalize.shape,
- dtype=tile_weights.dtype)
-
- # loop through all the surrounding tiles and process their
- # contributions to this tile
- for weight_row in range(3):
- for weight_col in range(3):
- # figure out which adjacent tile we're processing (in
- # overall tile coordinates instead of relative to our
- # current tile)
- calculated_row = num_row_tile - 1 + weight_row
- calculated_col = num_col_tile - 1 + weight_col
- tmp_tile_weights = tile_weights[
- weight_row, weight_col][np.where(
- temp_mask_to_equalize)]
-
- # if we're inside the tile array and the tile we're
- # processing has a histogram equalization for us to
- # use, process it
- if ((calculated_row >= 0) and
- (calculated_row < row_tiles) and
- (calculated_col >= 0) and
- (calculated_col < col_tiles) and (
- all_bin_information[calculated_row][
- calculated_col] is not None) and
- (all_cumulative_dist_functions[calculated_row][
- calculated_col] is not None)):
-
- # equalize our current tile using the histogram
- # equalization from the tile we're processing
- temp_equalized_data = np.interp(
- temp_all_valid_data, all_bin_information[
- calculated_row][calculated_col][:-1],
- all_cumulative_dist_functions[calculated_row][
- calculated_col])
- temp_equalized_data = temp_equalized_data[np.where(
- temp_mask_to_equalize[
- temp_all_valid_data_mask])]
-
- # add the contribution for the tile we're
- # processing to our weighted sum
- temp_sum += (temp_equalized_data *
- tmp_tile_weights)
-
- # if the tile we're processing doesn't exist, hang onto the weight we
- # would have used for it so we can correct that later
- else:
- unused_weight -= tmp_tile_weights
-
- # if we have unused weights, scale our values to correct for
- # that
- if unused_weight.any():
- # TODO, if the mask masks everything out this will be a
- # zero!
- temp_sum /= unused_weight + 1
-
- # now that we've calculated the weighted sum for this tile, set
- # it in our data array
- out[min_row:max_row, min_col:max_col][
- temp_mask_to_equalize] = temp_sum
- # TEMP, test without using weights
- # data[min_row:max_row, min_col:max_col][temp_mask_to_equalize] = \
- # np.interp(temp_data_to_equalize, all_bin_information[num_row_tile][num_col_tile][:-1],
- # all_cumulative_dist_functions[num_row_tile][num_col_tile])
+ _interpolate_local_equalized_tiles(
+ data, out, mask_to_equalize, valid_data_mask, do_log_scale, log_offset,
+ tile_weights, all_bin_information, all_cumulative_dist_functions,
+ num_row_tile, num_col_tile, row_tiles, col_tiles, tile_size,
+ )
# if we were asked to, normalize our data to be between zero and one,
# rather than zero and number_of_bins
@@ -676,6 +530,155 @@ def local_histogram_equalization(data, mask_to_equalize, valid_data_mask=None, n
return out
+def _histogram_equalize_one_tile(
+ data, valid_data_mask, std_mult_cutoff, do_log_scale, log_offset,
+ clip_limit, slope_limit, number_of_bins, num_row_tile, num_col_tile,
+ tile_size):
+ # calculate the range for this tile (min is inclusive, max is
+ # exclusive)
+ min_row = num_row_tile * tile_size
+ max_row = min_row + tile_size
+ min_col = num_col_tile * tile_size
+ max_col = min_col + tile_size
+
+ # for speed of calculation, pull out the mask of pixels that should
+ # be used to calculate the histogram
+ mask_valid_data_in_tile = valid_data_mask[min_row:max_row, min_col:max_col]
+
+ # if we have any valid data in this tile, calculate a histogram equalization for this tile
+ # (note: even if this tile does no fall in the mask_to_equalize, it's histogram may be used by other tiles)
+ if not mask_valid_data_in_tile.any():
+ return None, None
+
+ # use all valid data in the tile, so separate sections will
+ # blend cleanly
+ temp_valid_data = data[min_row:max_row, min_col:max_col][
+ mask_valid_data_in_tile]
+ temp_valid_data = temp_valid_data[
+ temp_valid_data >= 0
+ ] # TEMP, testing to see if negative data is messing everything up
+ # limit the contrast by only considering data within a certain
+ # range of the average
+ if std_mult_cutoff is not None:
+ avg = np.mean(temp_valid_data)
+ std = np.std(temp_valid_data)
+ # limit our range to avg +/- std_mult_cutoff*std; e.g. the
+ # default std_mult_cutoff is 4.0 so about 99.8% of the data
+ concervative_mask = (
+ temp_valid_data < (avg + std * std_mult_cutoff)) & (
+ temp_valid_data > (avg - std * std_mult_cutoff))
+ temp_valid_data = temp_valid_data[concervative_mask]
+
+ # if we are taking the log of our data, do so now
+ if do_log_scale:
+ temp_valid_data = np.log(temp_valid_data + log_offset)
+
+ # do the histogram equalization and get the resulting
+ # distribution function and bin information
+ if not temp_valid_data.size:
+ return None, None
+
+ cumulative_dist_function, temp_bins = _histogram_equalization_helper(
+ temp_valid_data,
+ number_of_bins,
+ clip_limit=clip_limit,
+ slope_limit=slope_limit)
+ return cumulative_dist_function, temp_bins
+
+
+def _interpolate_local_equalized_tiles(
+ data, out, mask_to_equalize, valid_data_mask, do_log_scale, log_offset,
+ tile_weights, all_bin_information, all_cumulative_dist_functions,
+ row_idx, col_idx, row_tiles, col_tiles, tile_size):
+ # calculate the range for this tile (min is inclusive, max is
+ # exclusive)
+ num_row_tile = row_idx
+ num_col_tile = col_idx
+ min_row = num_row_tile * tile_size
+ max_row = min_row + tile_size
+ min_col = num_col_tile * tile_size
+ max_col = min_col + tile_size
+
+ # for convenience, pull some of these tile sized chunks out
+ temp_all_data = data[min_row:max_row, min_col:max_col].copy()
+ temp_mask_to_equalize = mask_to_equalize[min_row:max_row, min_col:max_col]
+ temp_all_valid_data_mask = valid_data_mask[min_row:max_row, min_col:max_col]
+
+ # if we have any data in this tile, calculate our weighted sum
+ if not temp_mask_to_equalize.any():
+ return
+
+ if do_log_scale:
+ temp_all_data[temp_all_valid_data_mask] = np.log(
+ temp_all_data[temp_all_valid_data_mask] + log_offset)
+ temp_data_to_equalize = temp_all_data[temp_mask_to_equalize]
+ temp_all_valid_data = temp_all_data[temp_all_valid_data_mask]
+
+ # a place to hold our weighted sum that represents the interpolated contributions
+ # of the histogram equalizations from the surrounding tiles
+ temp_sum = np.zeros_like(temp_data_to_equalize)
+
+ # how much weight were we unable to use because those tiles
+ # fell off the edge of the image?
+ unused_weight = np.zeros(temp_data_to_equalize.shape, dtype=tile_weights.dtype)
+
+ # loop through all the surrounding tiles and process their
+ # contributions to this tile
+ for weight_row in range(3):
+ for weight_col in range(3):
+ # figure out which adjacent tile we're processing (in
+ # overall tile coordinates instead of relative to our
+ # current tile)
+ calculated_row = num_row_tile - 1 + weight_row
+ calculated_col = num_col_tile - 1 + weight_col
+ tmp_tile_weights = tile_weights[
+ weight_row, weight_col][np.where(temp_mask_to_equalize)]
+
+ # if we're inside the tile array and the tile we're
+ # processing has a histogram equalization for us to
+ # use, process it
+ if ((calculated_row >= 0) and
+ (calculated_row < row_tiles) and
+ (calculated_col >= 0) and
+ (calculated_col < col_tiles) and (
+ all_bin_information[calculated_row][
+ calculated_col] is not None) and
+ (all_cumulative_dist_functions[calculated_row][
+ calculated_col] is not None)):
+
+ # equalize our current tile using the histogram
+ # equalization from the tile we're processing
+ temp_equalized_data = np.interp(
+ temp_all_valid_data, all_bin_information[calculated_row][calculated_col][:-1],
+ all_cumulative_dist_functions[calculated_row][
+ calculated_col])
+ temp_equalized_data = temp_equalized_data[np.where(
+ temp_mask_to_equalize[temp_all_valid_data_mask])]
+
+ # add the contribution for the tile we're
+ # processing to our weighted sum
+ temp_sum += temp_equalized_data * tmp_tile_weights
+
+ # if the tile we're processing doesn't exist, hang onto the weight we
+ # would have used for it so we can correct that later
+ else:
+ unused_weight -= tmp_tile_weights
+
+ # if we have unused weights, scale our values to correct for that
+ if unused_weight.any():
+ # TODO: if the mask masks everything out this will be a zero!
+ temp_sum /= unused_weight + 1
+
+ # now that we've calculated the weighted sum for this tile, set
+ # it in our data array
+ out[min_row:max_row, min_col:max_col][
+ temp_mask_to_equalize] = temp_sum
+ # TEMP, test without using weights
+ # data[min_row:max_row, min_col:max_col][temp_mask_to_equalize] = \
+ # np.interp(temp_data_to_equalize, all_bin_information[num_row_tile][num_col_tile][:-1],
+ # all_cumulative_dist_functions[num_row_tile][num_col_tile])
+
+
def _histogram_equalization_helper(valid_data, number_of_bins, clip_limit=None, slope_limit=None):
"""Calculate the simplest possible histogram equalization, using only valid data.
@@ -745,10 +748,8 @@ def _calculate_weights(tile_size):
# that has all 8 surrounding tiles available
# create our empty template tiles
template_tile = np.zeros((3, 3, tile_size, tile_size), dtype=np.float32)
- """
# TEMP FOR TESTING, create a weight tile that does no interpolation
- template_tile[1,1] = template_tile[1,1] + 1.0
- """
+ # template_tile[1,1] = template_tile[1,1] + 1.0
# for ease of calculation, figure out the index of the center pixel in a tile
# and how far that pixel is from the edge of the tile (in pixel units)
@@ -952,18 +953,19 @@ class SnowAge(GenericCompositor):
Product is based on method presented at the second
CSPP/IMAPP users' meeting at Eumetsat in Darmstadt on 14-16 April 2015
- # Bernard Bellec snow Look-Up Tables V 1.0 (c) Meteo-France
- # These Look-up Tables allow you to create the RGB snow product
- # for SUOMI-NPP VIIRS Imager according to the algorithm
- # presented at the second CSPP/IMAPP users' meeting at Eumetsat
- # in Darmstadt on 14-16 April 2015
- # The algorithm and the product are described in this
- # presentation :
- # http://www.ssec.wisc.edu/meetings/cspp/2015/Agenda%20PDF/Wednesday/Roquet_snow_product_cspp2015.pdf
- # For further information you may contact
- # Bernard Bellec at Bernard.Bellec@meteo.fr
- # or
- # Pascale Roquet at Pascale.Roquet@meteo.fr
+ Bernard Bellec snow Look-Up Tables V 1.0 (c) Meteo-France
+ These Look-up Tables allow you to create the RGB snow product
+ for SUOMI-NPP VIIRS Imager according to the algorithm
+ presented at the second CSPP/IMAPP users' meeting at Eumetsat
+ in Darmstadt on 14-16 April 2015
+ The algorithm and the product are described in this
+ presentation :
+ http://www.ssec.wisc.edu/meetings/cspp/2015/Agenda%20PDF/Wednesday/Roquet_snow_product_cspp2015.pdf
+ as well as in the paper http://dx.doi.org/10.1016/j.rse.2017.04.028
+ For further information you may contact
+ Bernard Bellec at Bernard.Bellec@meteo.fr
+ or
+ Pascale Roquet at Pascale.Roquet@meteo.fr
"""
def __call__(self, projectables, nonprojectables=None, **info):
@@ -972,11 +974,13 @@ def __call__(self, projectables, nonprojectables=None, **info):
The algorithm and the product are described in this
presentation :
http://www.ssec.wisc.edu/meetings/cspp/2015/Agenda%20PDF/Wednesday/Roquet_snow_product_cspp2015.pdf
+ as well as in the paper http://dx.doi.org/10.1016/j.rse.2017.04.028
For further information you may contact
Bernard Bellec at Bernard.Bellec@meteo.fr
or
Pascale Roquet at Pascale.Roquet@meteo.fr
+ The resulting RGB has the units attribute removed.
"""
if len(projectables) != 5:
raise ValueError("Expected 5 datasets, got %d" %
@@ -1005,4 +1009,6 @@ def __call__(self, projectables, nonprojectables=None, **info):
ch2.attrs = info
ch3.attrs = info
- return super(SnowAge, self).__call__([ch1, ch2, ch3], **info)
+ res = super(SnowAge, self).__call__([ch1, ch2, ch3], **info)
+ res.attrs.pop("units", None)
+ return res
diff --git a/satpy/dataset/__init__.py b/satpy/dataset/__init__.py
index a42c1fa99a..bf6e372600 100644
--- a/satpy/dataset/__init__.py
+++ b/satpy/dataset/__init__.py
@@ -17,7 +17,7 @@
# satpy. If not, see .
"""Classes and functions related to data identification and querying."""
+from .anc_vars import dataset_walker, replace_anc # noqa
+from .data_dict import DatasetDict, get_key # noqa
from .dataid import DataID, DataQuery, ModifierTuple, create_filtered_query # noqa
-from .data_dict import get_key, DatasetDict # noqa
from .metadata import combine_metadata # noqa
-from .anc_vars import dataset_walker, replace_anc # noqa
\ No newline at end of file
diff --git a/satpy/dataset/data_dict.py b/satpy/dataset/data_dict.py
index d0961f56a1..eb9d8b9662 100644
--- a/satpy/dataset/data_dict.py
+++ b/satpy/dataset/data_dict.py
@@ -18,14 +18,13 @@
"""Classes and functions related to a dictionary with DataID keys."""
import numpy as np
+
from .dataid import DataID, create_filtered_query, minimal_default_keys_config
class TooManyResults(KeyError):
"""Special exception when one key maps to multiple items in the container."""
- pass
-
def get_best_dataset_key(key, choices):
"""Choose the "best" `DataID` from `choices` based on `key`.
@@ -112,14 +111,14 @@ def get_key(key, key_container, num_results=1, best=True, query=None,
if num_results == 1 and not res:
raise KeyError("No dataset matching '{}' found".format(str(key)))
- elif num_results == 1 and len(res) != 1:
+ if num_results == 1 and len(res) != 1:
raise TooManyResults("No unique dataset matching {}".format(str(key)))
- elif num_results == 1:
+ if num_results == 1:
return res[0]
- elif num_results == 0:
+ if num_results == 0:
return res
- else:
- return res[:num_results]
+
+ return res[:num_results]
class DatasetDict(dict):
@@ -182,52 +181,59 @@ def get(self, key, default=None):
def __setitem__(self, key, value):
"""Support assigning 'Dataset' objects or dictionaries of metadata."""
- value_dict = value
if hasattr(value, 'attrs'):
# xarray.DataArray objects
- value_dict = value.attrs
+ value_info = value.attrs
+ else:
+ value_info = value
# use value information to make a more complete DataID
if not isinstance(key, DataID):
- if not isinstance(value_dict, dict):
- raise ValueError("Key must be a DataID when value is not an xarray DataArray or dict")
- old_key = key
- try:
- key = self.get_key(key)
- except KeyError:
- if isinstance(old_key, str):
- new_name = old_key
- else:
- new_name = value_dict.get("name")
- # this is a new key and it's not a full DataID tuple
- if new_name is None and value_dict.get('wavelength') is None:
- raise ValueError("One of 'name' or 'wavelength' attrs "
- "values should be set.")
- try:
- id_keys = value_dict['_satpy_id'].id_keys
- except KeyError:
- try:
- id_keys = value_dict['_satpy_id_keys']
- except KeyError:
- id_keys = minimal_default_keys_config
- value_dict['name'] = new_name
- key = DataID(id_keys, **value_dict)
- if hasattr(value, 'attrs') and 'name' not in value.attrs:
- value.attrs['name'] = new_name
+ key = self._create_dataid_key(key, value_info)
# update the 'value' with the information contained in the key
try:
new_info = key.to_dict()
except AttributeError:
new_info = key
- if isinstance(value_dict, dict):
- value_dict.update(new_info)
-
- if hasattr(value, 'attrs'):
+ if isinstance(value_info, dict):
+ value_info.update(new_info)
if isinstance(key, DataID):
- value.attrs['_satpy_id'] = key
+ value_info['_satpy_id'] = key
return super(DatasetDict, self).__setitem__(key, value)
+ def _create_dataid_key(self, key, value_info):
+ """Create a DataID key from dictionary."""
+ if not isinstance(value_info, dict):
+ raise ValueError("Key must be a DataID when value is not an xarray DataArray or dict")
+ old_key = key
+ try:
+ key = self.get_key(key)
+ except KeyError:
+ if isinstance(old_key, str):
+ new_name = old_key
+ else:
+ new_name = value_info.get("name")
+ # this is a new key and it's not a full DataID tuple
+ if new_name is None and value_info.get('wavelength') is None:
+ raise ValueError("One of 'name' or 'wavelength' attrs "
+ "values should be set.")
+ id_keys = self._create_id_keys_from_dict(value_info)
+ value_info['name'] = new_name
+ key = DataID(id_keys, **value_info)
+ return key
+
+ def _create_id_keys_from_dict(self, value_info_dict):
+ """Create id_keys from dict."""
+ try:
+ id_keys = value_info_dict['_satpy_id'].id_keys
+ except KeyError:
+ try:
+ id_keys = value_info_dict['_satpy_id_keys']
+ except KeyError:
+ id_keys = minimal_default_keys_config
+ return id_keys
+
def contains(self, item):
"""Check contains when we know the *exact* DataID."""
return super(DatasetDict, self).__contains__(item)
diff --git a/satpy/dataset/dataid.py b/satpy/dataset/dataid.py
index a147e50133..20a6f8d18b 100644
--- a/satpy/dataset/dataid.py
+++ b/satpy/dataset/dataid.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
-# Copyright (c) 2015-2020 Satpy developers
+# Copyright (c) 2015-2021 Satpy developers
#
# This file is part of satpy.
#
@@ -23,7 +23,8 @@
from collections import namedtuple
from contextlib import suppress
from copy import copy, deepcopy
-from enum import IntEnum, Enum
+from enum import Enum, IntEnum
+from typing import NoReturn
import numpy as np
@@ -44,7 +45,14 @@ def get_keys_from_config(common_id_keys, config):
class ValueList(IntEnum):
- """A static value list."""
+ """A static value list.
+
+ This class is meant to be used for dynamically created Enums. Due to this
+ it should not be used as a normal Enum class or there may be some
+ unexpected behavior. For example, this class contains custom pickling and
+ unpickling handling that may break in subclasses.
+
+ """
@classmethod
def convert(cls, value):
@@ -54,6 +62,21 @@ def convert(cls, value):
except KeyError:
raise ValueError('{} invalid value for {}'.format(value, cls))
+ @classmethod
+ def _unpickle(cls, enum_name, enum_members, enum_member):
+ """Create dynamic class that was previously pickled.
+
+ See :meth:`__reduce_ex__` for implementation details.
+
+ """
+ enum_cls = cls(enum_name, enum_members)
+ return enum_cls[enum_member]
+
+ def __reduce_ex__(self, proto):
+ """Reduce the object for pickling."""
+ return (ValueList._unpickle,
+ (self.__class__.__name__, list(self.__class__.__members__.keys()), self.name))
+
def __eq__(self, other):
"""Check equality."""
return self.name == other
@@ -71,11 +94,7 @@ def __repr__(self):
return '<' + str(self) + '>'
-try:
- wlklass = namedtuple("WavelengthRange", "min central max unit", defaults=('µm',))
-except TypeError: # python 3.6
- wlklass = namedtuple("WavelengthRange", "min central max unit")
- wlklass.__new__.__defaults__ = ('µm',)
+wlklass = namedtuple("WavelengthRange", "min central max unit", defaults=('µm',)) # type: ignore
class WavelengthRange(wlklass):
@@ -99,9 +118,9 @@ def __eq__(self, other):
"""
if other is None:
return False
- elif isinstance(other, numbers.Number):
+ if isinstance(other, numbers.Number):
return other in self
- elif isinstance(other, (tuple, list)) and len(other) == 3:
+ if isinstance(other, (tuple, list)) and len(other) == 3:
return self[:3] == other
return super().__eq__(other)
@@ -133,7 +152,7 @@ def __contains__(self, other):
"""Check if this range contains *other*."""
if other is None:
return False
- elif isinstance(other, numbers.Number):
+ if isinstance(other, numbers.Number):
return self.min <= other <= self.max
with suppress(AttributeError):
if self.unit != other.unit:
@@ -200,7 +219,7 @@ def convert(cls, modifiers):
"""Convert `modifiers` to this type if possible."""
if modifiers is None:
return None
- elif not isinstance(modifiers, (cls, tuple, list)):
+ if not isinstance(modifiers, (cls, tuple, list)):
raise TypeError("'DataID' modifiers must be a tuple or None, "
"not {}".format(type(modifiers)))
return cls(modifiers)
@@ -247,7 +266,6 @@ def __hash__(self):
},
}
-
#: Default ID keys for coordinate DataArrays.
default_co_keys_config = {'name': {
'required': True,
@@ -311,23 +329,17 @@ def convert_dict(self, keyvals):
if not keyvals:
return curated
for key, val in self._id_keys.items():
- if val is not None:
- if key in keyvals or val.get('default') is not None or val.get('required'):
- curated_val = keyvals.get(key, val.get('default'))
- if 'required' in val and curated_val is None:
- raise ValueError('Required field {} missing.'.format(key))
- if 'type' in val:
- curated[key] = val['type'].convert(curated_val)
- elif curated_val is not None:
- curated[key] = curated_val
- else:
- try:
- curated_val = keyvals[key]
- except KeyError:
- pass
- else:
- if curated_val is not None:
- curated[key] = curated_val
+ if val is None:
+ val = {}
+ if key in keyvals or val.get('default') is not None or val.get('required'):
+ curated_val = keyvals.get(key, val.get('default'))
+ if 'required' in val and curated_val is None:
+ raise ValueError('Required field {} missing.'.format(key))
+ if 'type' in val:
+ curated[key] = val['type'].convert(curated_val)
+ elif curated_val is not None:
+ curated[key] = curated_val
+
return curated
@classmethod
@@ -428,7 +440,7 @@ def __hash__(self):
self._hash = hash(tuple(sorted(self.items())))
return self._hash
- def _immutable(self, *args, **kws):
+ def _immutable(self, *args, **kws) -> NoReturn:
"""Raise and error."""
raise TypeError('Cannot change a DataID')
@@ -444,34 +456,20 @@ def __lt__(self, other):
elif key in self:
val = self[key]
list_self.append(val)
- if isinstance(val, numbers.Number):
- list_other.append(0)
- elif isinstance(val, str):
- list_other.append('')
- elif isinstance(val, tuple):
- list_other.append(tuple())
- else:
- raise NotImplementedError("Don't know how to generalize " + str(type(val)))
+ list_other.append(_generalize_value_for_comparison(val))
elif key in other:
val = other[key]
list_other.append(val)
- if isinstance(val, numbers.Number):
- list_self.append(0)
- elif isinstance(val, str):
- list_self.append('')
- elif isinstance(val, tuple):
- list_self.append(tuple())
- else:
- raise NotImplementedError("Don't know how to generalize " + str(type(val)))
+ list_self.append(_generalize_value_for_comparison(val))
return tuple(list_self) < tuple(list_other)
__setitem__ = _immutable
__delitem__ = _immutable
- pop = _immutable
+ pop = _immutable # type: ignore
popitem = _immutable
clear = _immutable
- update = _immutable
- setdefault = _immutable
+ update = _immutable # type: ignore
+ setdefault = _immutable # type: ignore
def _find_modifiers_key(self):
for key, val in self.items():
@@ -494,11 +492,23 @@ def is_modified(self):
return bool(self[key])
+def _generalize_value_for_comparison(val):
+ """Get a generalize value for comparisons."""
+ if isinstance(val, numbers.Number):
+ return 0
+ if isinstance(val, str):
+ return ""
+ if isinstance(val, tuple):
+ return tuple()
+
+ raise NotImplementedError("Don't know how to generalize " + str(type(val)))
+
+
class DataQuery:
"""The data query object.
A DataQuery can be used in Satpy to query for a Dataset. This way
- a fully qualified DataID can be found even if some of the DataID
+ a fully qualified DataID can be found even if some DataID
elements are unknown. In this case a `*` signifies something that is
unknown or not applicable to the requested Dataset.
"""
@@ -663,41 +673,52 @@ def sort_dataids(self, dataids):
sorted_dataids.append(dataid)
distance = 0
for key in keys:
+ if distance == np.inf:
+ break
val = self._dict.get(key, '*')
if val == '*':
- try:
- # for enums
- distance += dataid.get(key).value
- except AttributeError:
- if isinstance(dataid.get(key), numbers.Number):
- distance += dataid.get(key)
- elif isinstance(dataid.get(key), tuple):
- distance += len(dataid.get(key))
+ distance = self._add_absolute_distance(dataid, key, distance)
else:
try:
dataid_val = dataid[key]
except KeyError:
distance += big_distance
continue
- try:
- distance += dataid_val.distance(val)
- except AttributeError:
- if not isinstance(val, list):
- val = [val]
- if dataid_val not in val:
- distance = np.inf
- break
- elif isinstance(dataid_val, numbers.Number):
- # so as to get the highest resolution first
- # FIXME: this ought to be clarified, not sure that
- # higher resolution is preferable is all cases.
- # Moreover this might break with other numerical
- # values.
- distance += dataid_val
+ distance = self._add_distance_from_query(dataid_val, val, distance)
distances.append(distance)
distances, dataids = zip(*sorted(zip(distances, sorted_dataids)))
return dataids, distances
+ @staticmethod
+ def _add_absolute_distance(dataid, key, distance):
+ try:
+ # for enums
+ distance += dataid.get(key).value
+ except AttributeError:
+ if isinstance(dataid.get(key), numbers.Number):
+ distance += dataid.get(key)
+ elif isinstance(dataid.get(key), tuple):
+ distance += len(dataid.get(key))
+ return distance
+
+ @staticmethod
+ def _add_distance_from_query(dataid_val, requested_val, distance):
+ try:
+ distance += dataid_val.distance(requested_val)
+ except AttributeError:
+ if not isinstance(requested_val, list):
+ requested_val = [requested_val]
+ if dataid_val not in requested_val:
+ distance = np.inf
+ elif isinstance(dataid_val, numbers.Number):
+ # so as to get the highest resolution first
+ # FIXME: this ought to be clarified, not sure that
+ # higher resolution is preferable is all cases.
+ # Moreover this might break with other numerical
+ # values.
+ distance += dataid_val
+ return distance
+
def create_less_modified_query(self):
"""Create a query with one less modifier."""
new_dict = self.to_dict()
diff --git a/satpy/dataset/metadata.py b/satpy/dataset/metadata.py
index 4093447935..4ba3cde1a1 100644
--- a/satpy/dataset/metadata.py
+++ b/satpy/dataset/metadata.py
@@ -19,8 +19,9 @@
from collections.abc import Collection
from datetime import datetime
-from functools import reduce, partial
-from operator import is_, eq
+from functools import partial, reduce
+from operator import eq, is_
+
import numpy as np
from satpy.writers.utils import flatten_dict
@@ -113,7 +114,7 @@ def _are_values_combinable(values):
def _all_non_dicts_equal(values):
if _contain_arrays(values):
return _all_arrays_equal(values)
- elif _contain_collections_of_arrays(values):
+ if _contain_collections_of_arrays(values):
# in the real world, the `ancillary_variables` attribute may be
# List[xarray.DataArray], this means our values are now
# List[List[xarray.DataArray]].
diff --git a/satpy/demo/__init__.py b/satpy/demo/__init__.py
index 045e07c77a..b12c9e285b 100644
--- a/satpy/demo/__init__.py
+++ b/satpy/demo/__init__.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
-# Copyright (c) 2019 Satpy developers
+# Copyright (c) 2019-2021 Satpy developers
#
# This file is part of satpy.
#
@@ -50,133 +50,9 @@
"""
-import os
-import logging
-from satpy import config
-
-LOG = logging.getLogger(__name__)
-
-
-def get_us_midlatitude_cyclone_abi(base_dir=None, method=None, force=False):
- """Get GOES-16 ABI (CONUS sector) data from 2019-03-14 00:00Z.
-
- Args:
- base_dir (str): Base directory for downloaded files.
- method (str): Force download method for the data if not already cached.
- Allowed options are: 'gcsfs'. Default of ``None`` will
- choose the best method based on environment settings.
- force (bool): Force re-download of data regardless of its existence on
- the local system. Warning: May delete non-demo files stored in
- download directory.
-
- Total size: ~110MB
-
- """
- base_dir = base_dir or config.get('demo_data_dir', '.')
- if method is None:
- method = 'gcsfs'
- if method not in ['gcsfs']:
- raise NotImplementedError("Demo data download method '{}' not "
- "implemented yet.".format(method))
-
- from ._google_cloud_platform import get_bucket_files
- patterns = ['gs://gcp-public-data-goes-16/ABI-L1b-RadC/2019/073/00/*s20190730002*.nc']
- subdir = os.path.join(base_dir, 'abi_l1b', '20190314_us_midlatitude_cyclone')
- os.makedirs(subdir, exist_ok=True)
- filenames = get_bucket_files(patterns, subdir, force=force)
- assert len(filenames) == 16, "Not all files could be downloaded"
- return filenames
-
-
-def get_hurricane_florence_abi(base_dir=None, method=None, force=False,
- channels=None, num_frames=10):
- """Get GOES-16 ABI (Meso sector) data from 2018-09-11 13:00Z to 17:00Z.
-
- Args:
- base_dir (str): Base directory for downloaded files.
- method (str): Force download method for the data if not already cached.
- Allowed options are: 'gcsfs'. Default of ``None`` will
- choose the best method based on environment settings.
- force (bool): Force re-download of data regardless of its existence on
- the local system. Warning: May delete non-demo files stored in
- download directory.
- channels (list): Channels to include in download. Defaults to all
- 16 channels.
- num_frames (int or slice): Number of frames to download. Maximum
- 240 frames. Default 10 frames.
-
- Size per frame (all channels): ~15MB
-
- Total size (default 10 frames, all channels): ~124MB
-
- Total size (240 frames, all channels): ~3.5GB
-
- """
- base_dir = base_dir or config.get('demo_data_dir', '.')
- if channels is None:
- channels = range(1, 17)
- if method is None:
- method = 'gcsfs'
- if method not in ['gcsfs']:
- raise NotImplementedError("Demo data download method '{}' not "
- "implemented yet.".format(method))
- if isinstance(num_frames, (int, float)):
- frame_slice = slice(0, num_frames)
- else:
- frame_slice = num_frames
-
- from ._google_cloud_platform import get_bucket_files
-
- patterns = []
- for channel in channels:
- # patterns += ['gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/1[3456]/'
- # '*C{:02d}*s20182541[3456]*.nc'.format(channel)]
- patterns += [(
- 'gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/13/*RadM1*C{:02d}*s201825413*.nc'.format(channel),
- 'gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/14/*RadM1*C{:02d}*s201825414*.nc'.format(channel),
- 'gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/15/*RadM1*C{:02d}*s201825415*.nc'.format(channel),
- 'gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/16/*RadM1*C{:02d}*s201825416*.nc'.format(channel),
- )]
- subdir = os.path.join(base_dir, 'abi_l1b', '20180911_hurricane_florence_abi_l1b')
- os.makedirs(subdir, exist_ok=True)
- filenames = get_bucket_files(patterns, subdir, force=force, pattern_slice=frame_slice)
-
- actual_slice = frame_slice.indices(240) # 240 max frames
- num_frames = int((actual_slice[1] - actual_slice[0]) / actual_slice[2])
- assert len(filenames) == len(channels) * num_frames, "Not all files could be downloaded"
- return filenames
-
-
-def download_typhoon_surigae_ahi(base_dir=None,
- channels=(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16),
- segments=(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)):
- """Download Himawari 8 data.
-
- This scene shows the Typhoon Surigae.
- """
- import s3fs
- base_dir = base_dir or config.get('demo_data_dir', '.')
- channel_resolution = {1: 10,
- 2: 10,
- 3: 5,
- 4: 10}
- data_files = []
- for channel in channels:
- for segment in segments:
- resolution = channel_resolution.get(channel, 20)
- data_files.append(f"HS_H08_20210417_0500_B{channel:02d}_FLDK_R{resolution:02d}_S{segment:02d}10.DAT.bz2")
-
- subdir = os.path.join(base_dir, 'ahi_hsd', '20210417_0500_random')
- os.makedirs(subdir, exist_ok=True)
- fs = s3fs.S3FileSystem(anon=True)
-
- result = []
- for filename in data_files:
- destination_filename = os.path.join(subdir, filename)
- result.append(destination_filename)
- if os.path.exists(destination_filename):
- continue
- to_get = 'noaa-himawari8/AHI-L1b-FLDK/2021/04/17/0500/' + filename
- fs.get_file(to_get, destination_filename)
-
- return result
+from .abi_l1b import get_hurricane_florence_abi # noqa: F401
+from .abi_l1b import get_us_midlatitude_cyclone_abi # noqa: F401
+from .ahi_hsd import download_typhoon_surigae_ahi # noqa: F401
+from .fci import download_fci_test_data # noqa: F401
+from .seviri_hrit import download_seviri_hrit_20180228_1500 # noqa: F401
+from .viirs_sdr import get_viirs_sdr_20170128_1229 # noqa: F401
diff --git a/satpy/demo/_google_cloud_platform.py b/satpy/demo/_google_cloud_platform.py
index 7e3bd5d5c6..c1b7016388 100644
--- a/satpy/demo/_google_cloud_platform.py
+++ b/satpy/demo/_google_cloud_platform.py
@@ -16,15 +16,10 @@
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
-import os
import logging
-
-try:
- from urllib.request import urlopen
- from urllib.error import URLError
-except ImportError:
- # python 2
- from urllib2 import urlopen, URLError
+import os
+from urllib.error import URLError
+from urllib.request import urlopen
try:
import gcsfs
@@ -83,20 +78,25 @@ def get_bucket_files(glob_pattern, base_dir, force=False, pattern_slice=None):
else:
# flat list of results
glob_results = [fn for pat in gp for fn in fs.glob(pat)]
-
- for fn in glob_results[pattern_slice]:
- ondisk_fn = os.path.basename(fn)
- ondisk_pathname = os.path.join(base_dir, ondisk_fn)
- filenames.append(ondisk_pathname)
-
- if force and os.path.isfile(ondisk_pathname):
- os.remove(ondisk_pathname)
- elif os.path.isfile(ondisk_pathname):
- LOG.info("Found existing: {}".format(ondisk_pathname))
- continue
- LOG.info("Downloading: {}".format(ondisk_pathname))
- fs.get('gs://' + fn, ondisk_pathname)
+ filenames.extend(_download_gcs_files(glob_results[pattern_slice], fs, base_dir, force))
if not filenames:
raise OSError("No files could be found or downloaded.")
return filenames
+
+
+def _download_gcs_files(globbed_files, fs, base_dir, force):
+ filenames = []
+ for fn in globbed_files:
+ ondisk_fn = os.path.basename(fn)
+ ondisk_pathname = os.path.join(base_dir, ondisk_fn)
+ filenames.append(ondisk_pathname)
+
+ if force and os.path.isfile(ondisk_pathname):
+ os.remove(ondisk_pathname)
+ elif os.path.isfile(ondisk_pathname):
+ LOG.info("Found existing: {}".format(ondisk_pathname))
+ continue
+ LOG.info("Downloading: {}".format(ondisk_pathname))
+ fs.get('gs://' + fn, ondisk_pathname)
+ return filenames
diff --git a/satpy/demo/abi_l1b.py b/satpy/demo/abi_l1b.py
new file mode 100644
index 0000000000..e223238767
--- /dev/null
+++ b/satpy/demo/abi_l1b.py
@@ -0,0 +1,111 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Demo data download helper functions for ABI L1b data."""
+import os
+
+from satpy import config
+
+
+def get_us_midlatitude_cyclone_abi(base_dir=None, method=None, force=False):
+ """Get GOES-16 ABI (CONUS sector) data from 2019-03-14 00:00Z.
+
+ Args:
+ base_dir (str): Base directory for downloaded files.
+ method (str): Force download method for the data if not already cached.
+ Allowed options are: 'gcsfs'. Default of ``None`` will
+ choose the best method based on environment settings.
+ force (bool): Force re-download of data regardless of its existence on
+ the local system. Warning: May delete non-demo files stored in
+ download directory.
+
+ Total size: ~110MB
+
+ """
+ base_dir = base_dir or config.get('demo_data_dir', '.')
+ if method is None:
+ method = 'gcsfs'
+ if method not in ['gcsfs']:
+ raise NotImplementedError("Demo data download method '{}' not "
+ "implemented yet.".format(method))
+
+ from ._google_cloud_platform import get_bucket_files
+ patterns = ['gs://gcp-public-data-goes-16/ABI-L1b-RadC/2019/073/00/*s20190730002*.nc']
+ subdir = os.path.join(base_dir, 'abi_l1b', '20190314_us_midlatitude_cyclone')
+ os.makedirs(subdir, exist_ok=True)
+ filenames = get_bucket_files(patterns, subdir, force=force)
+ assert len(filenames) == 16, "Not all files could be downloaded"
+ return filenames
+
+
+def get_hurricane_florence_abi(base_dir=None, method=None, force=False,
+ channels=None, num_frames=10):
+ """Get GOES-16 ABI (Meso sector) data from 2018-09-11 13:00Z to 17:00Z.
+
+ Args:
+ base_dir (str): Base directory for downloaded files.
+ method (str): Force download method for the data if not already cached.
+ Allowed options are: 'gcsfs'. Default of ``None`` will
+ choose the best method based on environment settings.
+ force (bool): Force re-download of data regardless of its existence on
+ the local system. Warning: May delete non-demo files stored in
+ download directory.
+ channels (list): Channels to include in download. Defaults to all
+ 16 channels.
+ num_frames (int or slice): Number of frames to download. Maximum
+ 240 frames. Default 10 frames.
+
+ Size per frame (all channels): ~15MB
+
+ Total size (default 10 frames, all channels): ~124MB
+
+ Total size (240 frames, all channels): ~3.5GB
+
+ """
+ base_dir = base_dir or config.get('demo_data_dir', '.')
+ if channels is None:
+ channels = range(1, 17)
+ if method is None:
+ method = 'gcsfs'
+ if method not in ['gcsfs']:
+ raise NotImplementedError("Demo data download method '{}' not "
+ "implemented yet.".format(method))
+ if isinstance(num_frames, (int, float)):
+ frame_slice = slice(0, num_frames)
+ else:
+ frame_slice = num_frames
+
+ from ._google_cloud_platform import get_bucket_files
+
+ patterns = []
+ for channel in channels:
+ # patterns += ['gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/1[3456]/'
+ # '*C{:02d}*s20182541[3456]*.nc'.format(channel)]
+ patterns += [(
+ 'gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/13/*RadM1*C{:02d}*s201825413*.nc'.format(channel),
+ 'gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/14/*RadM1*C{:02d}*s201825414*.nc'.format(channel),
+ 'gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/15/*RadM1*C{:02d}*s201825415*.nc'.format(channel),
+ 'gs://gcp-public-data-goes-16/ABI-L1b-RadM/2018/254/16/*RadM1*C{:02d}*s201825416*.nc'.format(channel),
+ )]
+ subdir = os.path.join(base_dir, 'abi_l1b', '20180911_hurricane_florence_abi_l1b')
+ os.makedirs(subdir, exist_ok=True)
+ filenames = get_bucket_files(patterns, subdir, force=force, pattern_slice=frame_slice)
+
+ actual_slice = frame_slice.indices(240) # 240 max frames
+ num_frames = int((actual_slice[1] - actual_slice[0]) / actual_slice[2])
+ assert len(filenames) == len(channels) * num_frames, "Not all files could be downloaded"
+ return filenames
diff --git a/satpy/demo/ahi_hsd.py b/satpy/demo/ahi_hsd.py
new file mode 100644
index 0000000000..784d90719f
--- /dev/null
+++ b/satpy/demo/ahi_hsd.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Demo data download helper functions for AHI HSD data."""
+import os
+
+from satpy import config
+
+
+def download_typhoon_surigae_ahi(base_dir=None,
+ channels=(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16),
+ segments=(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)):
+ """Download Himawari 8 data.
+
+ This scene shows the Typhoon Surigae.
+ """
+ import s3fs
+ base_dir = base_dir or config.get('demo_data_dir', '.')
+ channel_resolution = {1: 10,
+ 2: 10,
+ 3: 5,
+ 4: 10}
+ data_files = []
+ for channel in channels:
+ resolution = channel_resolution.get(channel, 20)
+ for segment in segments:
+ data_files.append(f"HS_H08_20210417_0500_B{channel:02d}_FLDK_R{resolution:02d}_S{segment:02d}10.DAT.bz2")
+
+ subdir = os.path.join(base_dir, 'ahi_hsd', '20210417_0500_typhoon_surigae')
+ os.makedirs(subdir, exist_ok=True)
+ fs = s3fs.S3FileSystem(anon=True)
+
+ result = []
+ for filename in data_files:
+ destination_filename = os.path.join(subdir, filename)
+ result.append(destination_filename)
+ if os.path.exists(destination_filename):
+ continue
+ to_get = 'noaa-himawari8/AHI-L1b-FLDK/2021/04/17/0500/' + filename
+ fs.get_file(to_get, destination_filename)
+
+ return result
diff --git a/satpy/demo/fci.py b/satpy/demo/fci.py
new file mode 100644
index 0000000000..7c4160b203
--- /dev/null
+++ b/satpy/demo/fci.py
@@ -0,0 +1,57 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Demo FCI data download."""
+
+import pathlib
+import tarfile
+import tempfile
+
+from satpy import config
+
+from . import utils
+
+_fci_uncompressed_nominal = (
+ "https://sftp.eumetsat.int/public/folder/UsCVknVOOkSyCdgpMimJNQ/"
+ "User-Materials/Test-Data/MTG/MTG_FCI_L1C_Enhanced-NonN_TD-272_May2020/"
+ "FCI_1C_UNCOMPRESSED_NOMINAL.tar.gz")
+
+
+def download_fci_test_data(base_dir=None):
+ """Download FCI test data.
+
+ Download the nominal FCI test data from July 2020.
+ """
+ subdir = get_fci_test_data_dir(base_dir=base_dir)
+ with tempfile.TemporaryDirectory() as td:
+ nm = pathlib.Path(td) / "fci-test-data.tar.gz"
+ utils.download_url(_fci_uncompressed_nominal, nm)
+ return _unpack_tarfile_to(nm, subdir)
+
+
+def get_fci_test_data_dir(base_dir=None):
+ """Get directory for FCI test data."""
+ base_dir = base_dir or config.get("demo_data_dir", ".")
+ return pathlib.Path(base_dir) / "fci" / "test_data"
+
+
+def _unpack_tarfile_to(filename, subdir):
+ """Unpack content of tarfile in filename to subdir."""
+ with tarfile.open(filename, mode="r:gz") as tf:
+ contents = tf.getnames()
+ tf.extractall(path=subdir)
+ return contents
diff --git a/satpy/demo/seviri_hrit.py b/satpy/demo/seviri_hrit.py
new file mode 100644
index 0000000000..3bf87ce2ea
--- /dev/null
+++ b/satpy/demo/seviri_hrit.py
@@ -0,0 +1,87 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Demo data download for SEVIRI HRIT files."""
+
+import logging
+import os.path
+
+from satpy import config
+from satpy.demo.utils import download_url
+
+logger = logging.getLogger(__name__)
+
+ZENODO_BASE_URL = "https://zenodo.org/api/files/dcc5ab29-d8a3-4fb5-ab2b-adc405d18c23/"
+FILENAME = "H-000-MSG4__-MSG4________-{channel:_<9s}-{segment:_<9s}-201802281500-__"
+
+
+def download_seviri_hrit_20180228_1500(base_dir=None, subset=None):
+ """Download the SEVIRI HRIT files for 2018-02-28T15:00.
+
+ *subset* is a dictionary with the channels as keys and granules to download
+ as values, eg::
+
+ {"HRV": [1, 2, 3], "IR_108": [1, 2], "EPI": None}
+
+ """
+ files = generate_subset_of_filenames(subset)
+
+ base_dir = base_dir or config.get("demo_data_dir", ".")
+ subdir = os.path.join(base_dir, "seviri_hrit", "20180228_1500")
+ os.makedirs(subdir, exist_ok=True)
+ targets = []
+ for the_file in files:
+ target = os.path.join(subdir, the_file)
+ targets.append(target)
+ if os.path.isfile(target):
+ continue
+ download_url(ZENODO_BASE_URL + the_file, target)
+ return targets
+
+
+def generate_subset_of_filenames(subset=None, base_dir=""):
+ """Generate SEVIRI HRIT filenames."""
+ if subset is None:
+ subset = _create_full_set()
+ pattern = os.path.join(base_dir, FILENAME)
+ files = []
+ for channel, segments in subset.items():
+ new_files = _generate_filenames(pattern, channel, segments)
+ files.extend(new_files)
+ return files
+
+
+def _generate_filenames(pattern, channel, segments):
+ """Generate the filenames for *channel* and *segments*."""
+ if channel in ["PRO", "EPI"]:
+ new_files = [pattern.format(channel="", segment=channel)]
+ else:
+ new_files = (pattern.format(channel=channel, segment=f"{segment:06d}") for segment in segments)
+ return new_files
+
+
+def _create_full_set():
+ """Create the full set dictionary."""
+ subset = {"HRV": range(1, 25),
+ "EPI": None,
+ "PRO": None}
+ channels = ["IR_016", "IR_039", "IR_087", "IR_097", "IR_108", "IR_120", "IR_134",
+ "VIS006", "VIS008",
+ "WV_062", "WV_073"]
+ for channel in channels:
+ subset[channel] = range(1, 9)
+ return subset
diff --git a/satpy/demo/utils.py b/satpy/demo/utils.py
new file mode 100644
index 0000000000..0fd1d1d1a7
--- /dev/null
+++ b/satpy/demo/utils.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Utilities for demo data download."""
+
+import requests
+
+
+def download_url(source, target):
+ """Download a url in stream mode."""
+ with requests.get(source, stream=True) as r:
+ r.raise_for_status()
+ with open(target, "wb") as f:
+ for chunk in r.iter_content(chunk_size=8192):
+ f.write(chunk)
diff --git a/satpy/demo/viirs_sdr.py b/satpy/demo/viirs_sdr.py
new file mode 100644
index 0000000000..a22bb9c652
--- /dev/null
+++ b/satpy/demo/viirs_sdr.py
@@ -0,0 +1,422 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Demo data download for VIIRS SDR HDF5 files."""
+
+import logging
+import os
+
+from satpy import config
+from satpy.demo.utils import download_url
+
+logger = logging.getLogger(__name__)
+ZENODO_BASE_URL = "https://zenodo.org/api/files/6aae2ac7-5e8e-4a42-96d0-393ad6a620ea/"
+GDNBO_URLS = [
+ "GDNBO_npp_d20170128_t1230144_e1231386_b27228_c20170128123806232923_cspp_dev.h5",
+ "GDNBO_npp_d20170128_t1231398_e1233040_b27228_c20170128123931141440_cspp_dev.h5",
+ "GDNBO_npp_d20170128_t1233052_e1234294_b27228_c20170128124058766619_cspp_dev.h5",
+ "GDNBO_npp_d20170128_t1234306_e1235548_b27228_c20170128124307612305_cspp_dev.h5",
+ "GDNBO_npp_d20170128_t1235560_e1237184_b27228_c20170128124429250510_cspp_dev.h5",
+ "GDNBO_npp_d20170128_t1237197_e1238439_b27228_c20170128124604860922_cspp_dev.h5",
+ "GDNBO_npp_d20170128_t1238451_e1240093_b27228_c20170128124804684300_cspp_dev.h5",
+ "GDNBO_npp_d20170128_t1240105_e1241347_b27228_c20170128124931597063_cspp_dev.h5",
+ "GDNBO_npp_d20170128_t1241359_e1243001_b27228_c20170128125104219695_cspp_dev.h5",
+ "GDNBO_npp_d20170128_t1243013_e1244238_b27228_c20170128125239512908_cspp_dev.h5",
+]
+GITCO_URLS = [
+ "GITCO_npp_d20170128_t1230144_e1231386_b27228_c20170128123806844060_cspp_dev.h5",
+ "GITCO_npp_d20170128_t1231398_e1233040_b27228_c20170128123931757165_cspp_dev.h5",
+ "GITCO_npp_d20170128_t1233052_e1234294_b27228_c20170128124059393347_cspp_dev.h5",
+ "GITCO_npp_d20170128_t1234306_e1235548_b27228_c20170128124308254991_cspp_dev.h5",
+ "GITCO_npp_d20170128_t1235560_e1237184_b27228_c20170128124429909006_cspp_dev.h5",
+ "GITCO_npp_d20170128_t1237197_e1238439_b27228_c20170128124605535586_cspp_dev.h5",
+ "GITCO_npp_d20170128_t1238451_e1240093_b27228_c20170128124805310389_cspp_dev.h5",
+ "GITCO_npp_d20170128_t1240105_e1241347_b27228_c20170128124932240716_cspp_dev.h5",
+ "GITCO_npp_d20170128_t1241359_e1243001_b27228_c20170128125104876016_cspp_dev.h5",
+ "GITCO_npp_d20170128_t1243013_e1244238_b27228_c20170128125240141821_cspp_dev.h5",
+]
+GMTCO_URLS = [
+ "GMTCO_npp_d20170128_t1230144_e1231386_b27228_c20170128123807370375_cspp_dev.h5",
+ "GMTCO_npp_d20170128_t1231398_e1233040_b27228_c20170128123932277110_cspp_dev.h5",
+ "GMTCO_npp_d20170128_t1233052_e1234294_b27228_c20170128124059920205_cspp_dev.h5",
+ "GMTCO_npp_d20170128_t1234306_e1235548_b27228_c20170128124308776985_cspp_dev.h5",
+ "GMTCO_npp_d20170128_t1235560_e1237184_b27228_c20170128124430441905_cspp_dev.h5",
+ "GMTCO_npp_d20170128_t1237197_e1238439_b27228_c20170128124606068231_cspp_dev.h5",
+ "GMTCO_npp_d20170128_t1238451_e1240093_b27228_c20170128124805827641_cspp_dev.h5",
+ "GMTCO_npp_d20170128_t1240105_e1241347_b27228_c20170128124932760643_cspp_dev.h5",
+ "GMTCO_npp_d20170128_t1241359_e1243001_b27228_c20170128125105397710_cspp_dev.h5",
+ "GMTCO_npp_d20170128_t1243013_e1244238_b27228_c20170128125240670869_cspp_dev.h5",
+]
+SVDNB_FILES = [
+ "SVDNB_npp_d20170128_t1230144_e1231386_b27228_c20170128123806052274_cspp_dev.h5",
+ "SVDNB_npp_d20170128_t1231398_e1233040_b27228_c20170128123930950786_cspp_dev.h5",
+ "SVDNB_npp_d20170128_t1233052_e1234294_b27228_c20170128124058573341_cspp_dev.h5",
+ "SVDNB_npp_d20170128_t1234306_e1235548_b27228_c20170128124307412059_cspp_dev.h5",
+ "SVDNB_npp_d20170128_t1235560_e1237184_b27228_c20170128124429036820_cspp_dev.h5",
+ "SVDNB_npp_d20170128_t1237197_e1238439_b27228_c20170128124604651619_cspp_dev.h5",
+ "SVDNB_npp_d20170128_t1238451_e1240093_b27228_c20170128124804485537_cspp_dev.h5",
+ "SVDNB_npp_d20170128_t1240105_e1241347_b27228_c20170128124931392535_cspp_dev.h5",
+ "SVDNB_npp_d20170128_t1241359_e1243001_b27228_c20170128125104024324_cspp_dev.h5",
+ "SVDNB_npp_d20170128_t1243013_e1244238_b27228_c20170128125239325940_cspp_dev.h5",
+]
+SVI01_FILES = [
+ "SVI01_npp_d20170128_t1230144_e1231386_b27228_c20170128123807637119_cspp_dev.h5",
+ "SVI01_npp_d20170128_t1231398_e1233040_b27228_c20170128123932561605_cspp_dev.h5",
+ "SVI01_npp_d20170128_t1233052_e1234294_b27228_c20170128124100227434_cspp_dev.h5",
+ "SVI01_npp_d20170128_t1234306_e1235548_b27228_c20170128124309038634_cspp_dev.h5",
+ "SVI01_npp_d20170128_t1235560_e1237184_b27228_c20170128124430720302_cspp_dev.h5",
+ "SVI01_npp_d20170128_t1237197_e1238439_b27228_c20170128124606429178_cspp_dev.h5",
+ "SVI01_npp_d20170128_t1238451_e1240093_b27228_c20170128124806092384_cspp_dev.h5",
+ "SVI01_npp_d20170128_t1240105_e1241347_b27228_c20170128124933022697_cspp_dev.h5",
+ "SVI01_npp_d20170128_t1241359_e1243001_b27228_c20170128125105683986_cspp_dev.h5",
+ "SVI01_npp_d20170128_t1243013_e1244238_b27228_c20170128125240927888_cspp_dev.h5",
+]
+SVI02_FILES = [
+ "SVI02_npp_d20170128_t1230144_e1231386_b27228_c20170128123807711244_cspp_dev.h5",
+ "SVI02_npp_d20170128_t1231398_e1233040_b27228_c20170128123932632807_cspp_dev.h5",
+ "SVI02_npp_d20170128_t1233052_e1234294_b27228_c20170128124100316117_cspp_dev.h5",
+ "SVI02_npp_d20170128_t1234306_e1235548_b27228_c20170128124309108964_cspp_dev.h5",
+ "SVI02_npp_d20170128_t1235560_e1237184_b27228_c20170128124430789039_cspp_dev.h5",
+ "SVI02_npp_d20170128_t1237197_e1238439_b27228_c20170128124606564398_cspp_dev.h5",
+ "SVI02_npp_d20170128_t1238451_e1240093_b27228_c20170128124806162998_cspp_dev.h5",
+ "SVI02_npp_d20170128_t1240105_e1241347_b27228_c20170128124933090354_cspp_dev.h5",
+ "SVI02_npp_d20170128_t1241359_e1243001_b27228_c20170128125105758438_cspp_dev.h5",
+ "SVI02_npp_d20170128_t1243013_e1244238_b27228_c20170128125240934475_cspp_dev.h5",
+]
+SVI03_FILES = [
+ "SVI03_npp_d20170128_t1230144_e1231386_b27228_c20170128123807790854_cspp_dev.h5",
+ "SVI03_npp_d20170128_t1231398_e1233040_b27228_c20170128123932703535_cspp_dev.h5",
+ "SVI03_npp_d20170128_t1233052_e1234294_b27228_c20170128124100406626_cspp_dev.h5",
+ "SVI03_npp_d20170128_t1234306_e1235548_b27228_c20170128124309179885_cspp_dev.h5",
+ "SVI03_npp_d20170128_t1235560_e1237184_b27228_c20170128124430858868_cspp_dev.h5",
+ "SVI03_npp_d20170128_t1237197_e1238439_b27228_c20170128124606750872_cspp_dev.h5",
+ "SVI03_npp_d20170128_t1238451_e1240093_b27228_c20170128124806231759_cspp_dev.h5",
+ "SVI03_npp_d20170128_t1240105_e1241347_b27228_c20170128124933157871_cspp_dev.h5",
+ "SVI03_npp_d20170128_t1241359_e1243001_b27228_c20170128125105832479_cspp_dev.h5",
+ "SVI03_npp_d20170128_t1243013_e1244238_b27228_c20170128125240940464_cspp_dev.h5",
+]
+SVI04_FILES = [
+ "SVI04_npp_d20170128_t1230144_e1231386_b27228_c20170128123807879916_cspp_dev.h5",
+ "SVI04_npp_d20170128_t1231398_e1233040_b27228_c20170128123932774251_cspp_dev.h5",
+ "SVI04_npp_d20170128_t1233052_e1234294_b27228_c20170128124100502220_cspp_dev.h5",
+ "SVI04_npp_d20170128_t1234306_e1235548_b27228_c20170128124309251788_cspp_dev.h5",
+ "SVI04_npp_d20170128_t1235560_e1237184_b27228_c20170128124430928643_cspp_dev.h5",
+ "SVI04_npp_d20170128_t1237197_e1238439_b27228_c20170128124606941637_cspp_dev.h5",
+ "SVI04_npp_d20170128_t1238451_e1240093_b27228_c20170128124806300867_cspp_dev.h5",
+ "SVI04_npp_d20170128_t1240105_e1241347_b27228_c20170128124933224276_cspp_dev.h5",
+ "SVI04_npp_d20170128_t1241359_e1243001_b27228_c20170128125105908005_cspp_dev.h5",
+ "SVI04_npp_d20170128_t1243013_e1244238_b27228_c20170128125240946462_cspp_dev.h5",
+]
+SVI05_FILES = [
+ "SVI05_npp_d20170128_t1230144_e1231386_b27228_c20170128123807965352_cspp_dev.h5",
+ "SVI05_npp_d20170128_t1231398_e1233040_b27228_c20170128123932843985_cspp_dev.h5",
+ "SVI05_npp_d20170128_t1233052_e1234294_b27228_c20170128124100619023_cspp_dev.h5",
+ "SVI05_npp_d20170128_t1234306_e1235548_b27228_c20170128124309321883_cspp_dev.h5",
+ "SVI05_npp_d20170128_t1235560_e1237184_b27228_c20170128124430998015_cspp_dev.h5",
+ "SVI05_npp_d20170128_t1237197_e1238439_b27228_c20170128124607124779_cspp_dev.h5",
+ "SVI05_npp_d20170128_t1238451_e1240093_b27228_c20170128124806370721_cspp_dev.h5",
+ "SVI05_npp_d20170128_t1240105_e1241347_b27228_c20170128124933292345_cspp_dev.h5",
+ "SVI05_npp_d20170128_t1241359_e1243001_b27228_c20170128125105983240_cspp_dev.h5",
+ "SVI05_npp_d20170128_t1243013_e1244238_b27228_c20170128125241011931_cspp_dev.h5",
+]
+SVM01_FILES = [
+ "SVM01_npp_d20170128_t1230144_e1231386_b27228_c20170128123808056273_cspp_dev.h5",
+ "SVM01_npp_d20170128_t1231398_e1233040_b27228_c20170128123932914817_cspp_dev.h5",
+ "SVM01_npp_d20170128_t1233052_e1234294_b27228_c20170128124100687072_cspp_dev.h5",
+ "SVM01_npp_d20170128_t1234306_e1235548_b27228_c20170128124309391583_cspp_dev.h5",
+ "SVM01_npp_d20170128_t1235560_e1237184_b27228_c20170128124431068152_cspp_dev.h5",
+ "SVM01_npp_d20170128_t1237197_e1238439_b27228_c20170128124607341439_cspp_dev.h5",
+ "SVM01_npp_d20170128_t1238451_e1240093_b27228_c20170128124806439930_cspp_dev.h5",
+ "SVM01_npp_d20170128_t1240105_e1241347_b27228_c20170128124933359550_cspp_dev.h5",
+ "SVM01_npp_d20170128_t1241359_e1243001_b27228_c20170128125106057121_cspp_dev.h5",
+ "SVM01_npp_d20170128_t1243013_e1244238_b27228_c20170128125241079274_cspp_dev.h5",
+]
+SVM02_FILES = [
+ "SVM02_npp_d20170128_t1230144_e1231386_b27228_c20170128123808083056_cspp_dev.h5",
+ "SVM02_npp_d20170128_t1231398_e1233040_b27228_c20170128123932936791_cspp_dev.h5",
+ "SVM02_npp_d20170128_t1233052_e1234294_b27228_c20170128124100708303_cspp_dev.h5",
+ "SVM02_npp_d20170128_t1234306_e1235548_b27228_c20170128124309411322_cspp_dev.h5",
+ "SVM02_npp_d20170128_t1235560_e1237184_b27228_c20170128124431089436_cspp_dev.h5",
+ "SVM02_npp_d20170128_t1237197_e1238439_b27228_c20170128124607386792_cspp_dev.h5",
+ "SVM02_npp_d20170128_t1238451_e1240093_b27228_c20170128124806460870_cspp_dev.h5",
+ "SVM02_npp_d20170128_t1240105_e1241347_b27228_c20170128124933381053_cspp_dev.h5",
+ "SVM02_npp_d20170128_t1241359_e1243001_b27228_c20170128125106080807_cspp_dev.h5",
+ "SVM02_npp_d20170128_t1243013_e1244238_b27228_c20170128125241085636_cspp_dev.h5",
+]
+SVM03_FILES = [
+ "SVM03_npp_d20170128_t1230144_e1231386_b27228_c20170128123808110482_cspp_dev.h5",
+ "SVM03_npp_d20170128_t1231398_e1233040_b27228_c20170128123932959109_cspp_dev.h5",
+ "SVM03_npp_d20170128_t1233052_e1234294_b27228_c20170128124100729893_cspp_dev.h5",
+ "SVM03_npp_d20170128_t1234306_e1235548_b27228_c20170128124309431166_cspp_dev.h5",
+ "SVM03_npp_d20170128_t1235560_e1237184_b27228_c20170128124431111317_cspp_dev.h5",
+ "SVM03_npp_d20170128_t1237197_e1238439_b27228_c20170128124607452947_cspp_dev.h5",
+ "SVM03_npp_d20170128_t1238451_e1240093_b27228_c20170128124806482313_cspp_dev.h5",
+ "SVM03_npp_d20170128_t1240105_e1241347_b27228_c20170128124933402956_cspp_dev.h5",
+ "SVM03_npp_d20170128_t1241359_e1243001_b27228_c20170128125106104416_cspp_dev.h5",
+ "SVM03_npp_d20170128_t1243013_e1244238_b27228_c20170128125241091894_cspp_dev.h5",
+]
+SVM04_FILES = [
+ "SVM04_npp_d20170128_t1230144_e1231386_b27228_c20170128123808144258_cspp_dev.h5",
+ "SVM04_npp_d20170128_t1231398_e1233040_b27228_c20170128123932987116_cspp_dev.h5",
+ "SVM04_npp_d20170128_t1233052_e1234294_b27228_c20170128124100757998_cspp_dev.h5",
+ "SVM04_npp_d20170128_t1234306_e1235548_b27228_c20170128124309456779_cspp_dev.h5",
+ "SVM04_npp_d20170128_t1235560_e1237184_b27228_c20170128124431139074_cspp_dev.h5",
+ "SVM04_npp_d20170128_t1237197_e1238439_b27228_c20170128124607542297_cspp_dev.h5",
+ "SVM04_npp_d20170128_t1238451_e1240093_b27228_c20170128124806582119_cspp_dev.h5",
+ "SVM04_npp_d20170128_t1240105_e1241347_b27228_c20170128124933430115_cspp_dev.h5",
+ "SVM04_npp_d20170128_t1241359_e1243001_b27228_c20170128125106135317_cspp_dev.h5",
+ "SVM04_npp_d20170128_t1243013_e1244238_b27228_c20170128125241097854_cspp_dev.h5",
+]
+SVM05_FILES = [
+ "SVM05_npp_d20170128_t1230144_e1231386_b27228_c20170128123808174909_cspp_dev.h5",
+ "SVM05_npp_d20170128_t1231398_e1233040_b27228_c20170128123933013965_cspp_dev.h5",
+ "SVM05_npp_d20170128_t1233052_e1234294_b27228_c20170128124100786454_cspp_dev.h5",
+ "SVM05_npp_d20170128_t1234306_e1235548_b27228_c20170128124309482588_cspp_dev.h5",
+ "SVM05_npp_d20170128_t1235560_e1237184_b27228_c20170128124431167292_cspp_dev.h5",
+ "SVM05_npp_d20170128_t1237197_e1238439_b27228_c20170128124607571141_cspp_dev.h5",
+ "SVM05_npp_d20170128_t1238451_e1240093_b27228_c20170128124806609136_cspp_dev.h5",
+ "SVM05_npp_d20170128_t1240105_e1241347_b27228_c20170128124933456985_cspp_dev.h5",
+ "SVM05_npp_d20170128_t1241359_e1243001_b27228_c20170128125106166701_cspp_dev.h5",
+ "SVM05_npp_d20170128_t1243013_e1244238_b27228_c20170128125241103776_cspp_dev.h5",
+]
+SVM06_FILES = [
+ "SVM06_npp_d20170128_t1230144_e1231386_b27228_c20170128123808209437_cspp_dev.h5",
+ "SVM06_npp_d20170128_t1231398_e1233040_b27228_c20170128123933040415_cspp_dev.h5",
+ "SVM06_npp_d20170128_t1233052_e1234294_b27228_c20170128124100814386_cspp_dev.h5",
+ "SVM06_npp_d20170128_t1234306_e1235548_b27228_c20170128124309508530_cspp_dev.h5",
+ "SVM06_npp_d20170128_t1235560_e1237184_b27228_c20170128124431195933_cspp_dev.h5",
+ "SVM06_npp_d20170128_t1237197_e1238439_b27228_c20170128124607627637_cspp_dev.h5",
+ "SVM06_npp_d20170128_t1238451_e1240093_b27228_c20170128124806636359_cspp_dev.h5",
+ "SVM06_npp_d20170128_t1240105_e1241347_b27228_c20170128124933483996_cspp_dev.h5",
+ "SVM06_npp_d20170128_t1241359_e1243001_b27228_c20170128125106198061_cspp_dev.h5",
+ "SVM06_npp_d20170128_t1243013_e1244238_b27228_c20170128125241109756_cspp_dev.h5",
+]
+SVM07_FILES = [
+ "SVM07_npp_d20170128_t1230144_e1231386_b27228_c20170128123808817507_cspp_dev.h5",
+ "SVM07_npp_d20170128_t1231398_e1233040_b27228_c20170128123933681441_cspp_dev.h5",
+ "SVM07_npp_d20170128_t1233052_e1234294_b27228_c20170128124101490225_cspp_dev.h5",
+ "SVM07_npp_d20170128_t1234306_e1235548_b27228_c20170128124310169252_cspp_dev.h5",
+ "SVM07_npp_d20170128_t1235560_e1237184_b27228_c20170128124431921741_cspp_dev.h5",
+ "SVM07_npp_d20170128_t1237197_e1238439_b27228_c20170128124608449604_cspp_dev.h5",
+ "SVM07_npp_d20170128_t1238451_e1240093_b27228_c20170128124807323479_cspp_dev.h5",
+ "SVM07_npp_d20170128_t1240105_e1241347_b27228_c20170128124934114857_cspp_dev.h5",
+ "SVM07_npp_d20170128_t1241359_e1243001_b27228_c20170128125106915897_cspp_dev.h5",
+ "SVM07_npp_d20170128_t1243013_e1244238_b27228_c20170128125241115831_cspp_dev.h5",
+]
+SVM08_FILES = [
+ "SVM08_npp_d20170128_t1230144_e1231386_b27228_c20170128123808263071_cspp_dev.h5",
+ "SVM08_npp_d20170128_t1231398_e1233040_b27228_c20170128123933088148_cspp_dev.h5",
+ "SVM08_npp_d20170128_t1233052_e1234294_b27228_c20170128124100871070_cspp_dev.h5",
+ "SVM08_npp_d20170128_t1234306_e1235548_b27228_c20170128124309555838_cspp_dev.h5",
+ "SVM08_npp_d20170128_t1235560_e1237184_b27228_c20170128124431248317_cspp_dev.h5",
+ "SVM08_npp_d20170128_t1237197_e1238439_b27228_c20170128124607703167_cspp_dev.h5",
+ "SVM08_npp_d20170128_t1238451_e1240093_b27228_c20170128124806684245_cspp_dev.h5",
+ "SVM08_npp_d20170128_t1240105_e1241347_b27228_c20170128124933531899_cspp_dev.h5",
+ "SVM08_npp_d20170128_t1241359_e1243001_b27228_c20170128125106322404_cspp_dev.h5",
+ "SVM08_npp_d20170128_t1243013_e1244238_b27228_c20170128125241141517_cspp_dev.h5",
+]
+SVM09_FILES = [
+ "SVM09_npp_d20170128_t1230144_e1231386_b27228_c20170128123808287273_cspp_dev.h5",
+ "SVM09_npp_d20170128_t1231398_e1233040_b27228_c20170128123933108818_cspp_dev.h5",
+ "SVM09_npp_d20170128_t1233052_e1234294_b27228_c20170128124100892937_cspp_dev.h5",
+ "SVM09_npp_d20170128_t1234306_e1235548_b27228_c20170128124309576967_cspp_dev.h5",
+ "SVM09_npp_d20170128_t1235560_e1237184_b27228_c20170128124431271226_cspp_dev.h5",
+ "SVM09_npp_d20170128_t1237197_e1238439_b27228_c20170128124607724822_cspp_dev.h5",
+ "SVM09_npp_d20170128_t1238451_e1240093_b27228_c20170128124806704840_cspp_dev.h5",
+ "SVM09_npp_d20170128_t1240105_e1241347_b27228_c20170128124933552828_cspp_dev.h5",
+ "SVM09_npp_d20170128_t1241359_e1243001_b27228_c20170128125106345774_cspp_dev.h5",
+ "SVM09_npp_d20170128_t1243013_e1244238_b27228_c20170128125241161505_cspp_dev.h5",
+]
+SVM10_FILES = [
+ "SVM10_npp_d20170128_t1230144_e1231386_b27228_c20170128123808310591_cspp_dev.h5",
+ "SVM10_npp_d20170128_t1231398_e1233040_b27228_c20170128123933130017_cspp_dev.h5",
+ "SVM10_npp_d20170128_t1233052_e1234294_b27228_c20170128124100914429_cspp_dev.h5",
+ "SVM10_npp_d20170128_t1234306_e1235548_b27228_c20170128124309597409_cspp_dev.h5",
+ "SVM10_npp_d20170128_t1235560_e1237184_b27228_c20170128124431293295_cspp_dev.h5",
+ "SVM10_npp_d20170128_t1237197_e1238439_b27228_c20170128124607775262_cspp_dev.h5",
+ "SVM10_npp_d20170128_t1238451_e1240093_b27228_c20170128124806725948_cspp_dev.h5",
+ "SVM10_npp_d20170128_t1240105_e1241347_b27228_c20170128124933573645_cspp_dev.h5",
+ "SVM10_npp_d20170128_t1241359_e1243001_b27228_c20170128125106368109_cspp_dev.h5",
+ "SVM10_npp_d20170128_t1243013_e1244238_b27228_c20170128125241167901_cspp_dev.h5",
+]
+SVM11_FILES = [
+ "SVM11_npp_d20170128_t1230144_e1231386_b27228_c20170128123808334604_cspp_dev.h5",
+ "SVM11_npp_d20170128_t1231398_e1233040_b27228_c20170128123933151513_cspp_dev.h5",
+ "SVM11_npp_d20170128_t1233052_e1234294_b27228_c20170128124100935872_cspp_dev.h5",
+ "SVM11_npp_d20170128_t1234306_e1235548_b27228_c20170128124309618913_cspp_dev.h5",
+ "SVM11_npp_d20170128_t1235560_e1237184_b27228_c20170128124431315343_cspp_dev.h5",
+ "SVM11_npp_d20170128_t1237197_e1238439_b27228_c20170128124607795773_cspp_dev.h5",
+ "SVM11_npp_d20170128_t1238451_e1240093_b27228_c20170128124806746702_cspp_dev.h5",
+ "SVM11_npp_d20170128_t1240105_e1241347_b27228_c20170128124933594619_cspp_dev.h5",
+ "SVM11_npp_d20170128_t1241359_e1243001_b27228_c20170128125106390787_cspp_dev.h5",
+ "SVM11_npp_d20170128_t1243013_e1244238_b27228_c20170128125241187089_cspp_dev.h5",
+]
+SVM12_FILES = [
+ "SVM12_npp_d20170128_t1230144_e1231386_b27228_c20170128123808354907_cspp_dev.h5",
+ "SVM12_npp_d20170128_t1231398_e1233040_b27228_c20170128123933172698_cspp_dev.h5",
+ "SVM12_npp_d20170128_t1233052_e1234294_b27228_c20170128124100958185_cspp_dev.h5",
+ "SVM12_npp_d20170128_t1234306_e1235548_b27228_c20170128124309641720_cspp_dev.h5",
+ "SVM12_npp_d20170128_t1235560_e1237184_b27228_c20170128124431337449_cspp_dev.h5",
+ "SVM12_npp_d20170128_t1237197_e1238439_b27228_c20170128124607849336_cspp_dev.h5",
+ "SVM12_npp_d20170128_t1238451_e1240093_b27228_c20170128124806767820_cspp_dev.h5",
+ "SVM12_npp_d20170128_t1240105_e1241347_b27228_c20170128124933615858_cspp_dev.h5",
+ "SVM12_npp_d20170128_t1241359_e1243001_b27228_c20170128125106413369_cspp_dev.h5",
+ "SVM12_npp_d20170128_t1243013_e1244238_b27228_c20170128125241193417_cspp_dev.h5",
+]
+SVM13_FILES = [
+ "SVM13_npp_d20170128_t1230144_e1231386_b27228_c20170128123808374740_cspp_dev.h5",
+ "SVM13_npp_d20170128_t1231398_e1233040_b27228_c20170128123933194069_cspp_dev.h5",
+ "SVM13_npp_d20170128_t1233052_e1234294_b27228_c20170128124100980119_cspp_dev.h5",
+ "SVM13_npp_d20170128_t1234306_e1235548_b27228_c20170128124309664100_cspp_dev.h5",
+ "SVM13_npp_d20170128_t1235560_e1237184_b27228_c20170128124431359731_cspp_dev.h5",
+ "SVM13_npp_d20170128_t1237197_e1238439_b27228_c20170128124607874078_cspp_dev.h5",
+ "SVM13_npp_d20170128_t1238451_e1240093_b27228_c20170128124806788761_cspp_dev.h5",
+ "SVM13_npp_d20170128_t1240105_e1241347_b27228_c20170128124933637079_cspp_dev.h5",
+ "SVM13_npp_d20170128_t1241359_e1243001_b27228_c20170128125106435940_cspp_dev.h5",
+ "SVM13_npp_d20170128_t1243013_e1244238_b27228_c20170128125241212475_cspp_dev.h5",
+]
+SVM14_FILES = [
+ "SVM14_npp_d20170128_t1230144_e1231386_b27228_c20170128123808406951_cspp_dev.h5",
+ "SVM14_npp_d20170128_t1231398_e1233040_b27228_c20170128123933225740_cspp_dev.h5",
+ "SVM14_npp_d20170128_t1233052_e1234294_b27228_c20170128124101014245_cspp_dev.h5",
+ "SVM14_npp_d20170128_t1234306_e1235548_b27228_c20170128124309701221_cspp_dev.h5",
+ "SVM14_npp_d20170128_t1235560_e1237184_b27228_c20170128124431396452_cspp_dev.h5",
+ "SVM14_npp_d20170128_t1237197_e1238439_b27228_c20170128124607945197_cspp_dev.h5",
+ "SVM14_npp_d20170128_t1238451_e1240093_b27228_c20170128124806821782_cspp_dev.h5",
+ "SVM14_npp_d20170128_t1240105_e1241347_b27228_c20170128124933671536_cspp_dev.h5",
+ "SVM14_npp_d20170128_t1241359_e1243001_b27228_c20170128125106472259_cspp_dev.h5",
+ "SVM14_npp_d20170128_t1243013_e1244238_b27228_c20170128125241244180_cspp_dev.h5",
+]
+SVM15_FILES = [
+ "SVM15_npp_d20170128_t1230144_e1231386_b27228_c20170128123808427359_cspp_dev.h5",
+ "SVM15_npp_d20170128_t1231398_e1233040_b27228_c20170128123933246722_cspp_dev.h5",
+ "SVM15_npp_d20170128_t1233052_e1234294_b27228_c20170128124101036439_cspp_dev.h5",
+ "SVM15_npp_d20170128_t1234306_e1235548_b27228_c20170128124309725283_cspp_dev.h5",
+ "SVM15_npp_d20170128_t1235560_e1237184_b27228_c20170128124431418392_cspp_dev.h5",
+ "SVM15_npp_d20170128_t1237197_e1238439_b27228_c20170128124607965779_cspp_dev.h5",
+ "SVM15_npp_d20170128_t1238451_e1240093_b27228_c20170128124806948533_cspp_dev.h5",
+ "SVM15_npp_d20170128_t1240105_e1241347_b27228_c20170128124933693703_cspp_dev.h5",
+ "SVM15_npp_d20170128_t1241359_e1243001_b27228_c20170128125106494806_cspp_dev.h5",
+ "SVM15_npp_d20170128_t1243013_e1244238_b27228_c20170128125241264993_cspp_dev.h5",
+]
+SVM16_FILES = [
+ "SVM16_npp_d20170128_t1230144_e1231386_b27228_c20170128123808447333_cspp_dev.h5",
+ "SVM16_npp_d20170128_t1231398_e1233040_b27228_c20170128123933268965_cspp_dev.h5",
+ "SVM16_npp_d20170128_t1233052_e1234294_b27228_c20170128124101058805_cspp_dev.h5",
+ "SVM16_npp_d20170128_t1234306_e1235548_b27228_c20170128124309747830_cspp_dev.h5",
+ "SVM16_npp_d20170128_t1235560_e1237184_b27228_c20170128124431440604_cspp_dev.h5",
+ "SVM16_npp_d20170128_t1237197_e1238439_b27228_c20170128124608015196_cspp_dev.h5",
+ "SVM16_npp_d20170128_t1238451_e1240093_b27228_c20170128124806970479_cspp_dev.h5",
+ "SVM16_npp_d20170128_t1240105_e1241347_b27228_c20170128124933715705_cspp_dev.h5",
+ "SVM16_npp_d20170128_t1241359_e1243001_b27228_c20170128125106518023_cspp_dev.h5",
+ "SVM16_npp_d20170128_t1243013_e1244238_b27228_c20170128125241285533_cspp_dev.h5",
+]
+
+FILES_20170128_1229 = {
+ "DNB": SVDNB_FILES,
+ "I01": SVI01_FILES,
+ "I02": SVI02_FILES,
+ "I03": SVI03_FILES,
+ "I04": SVI04_FILES,
+ "I05": SVI05_FILES,
+ "M01": SVM01_FILES,
+ "M02": SVM02_FILES,
+ "M03": SVM03_FILES,
+ "M04": SVM04_FILES,
+ "M05": SVM05_FILES,
+ "M06": SVM06_FILES,
+ "M07": SVM07_FILES,
+ "M08": SVM08_FILES,
+ "M09": SVM09_FILES,
+ "M10": SVM10_FILES,
+ "M11": SVM11_FILES,
+ "M12": SVM12_FILES,
+ "M13": SVM13_FILES,
+ "M14": SVM14_FILES,
+ "M15": SVM15_FILES,
+ "M16": SVM16_FILES,
+}
+
+
+def get_viirs_sdr_20170128_1229(
+ base_dir=None,
+ channels=("I01", "I02", "I03", "I04", "I05",
+ "M01", "M02", "M03", "M04", "M05", "M06", "M07", "M08", "M09", "M10",
+ "M11", "M12", "M13", "M14", "M15", "M16",
+ "DNB"),
+ granules=(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)):
+ r"""Get VIIRS SDR files for 2017-01-28 12:29 to 12:43.
+
+ These files are downloaded from Zenodo. You can see the full file
+ listing here: https://zenodo.org/record/263296
+
+ Specific channels can be specified with the ``channels`` keyword argument.
+ By default, all channels (all I bands, M bands, and DNB bands) will be
+ downloaded. Channels are referred to by their band type and channel number
+ (ex. "I01" or "M16" or "DNB"). Terrain-corrected geolocation files are
+ always downloaded when the corresponding band data is specified.
+
+ The ``granules`` argument will control which granules ("time steps")
+ are downloaded. There are 10 available and the keyword argument can be
+ specified as a tuple of integers from 1 to 10.
+
+ This full dataset is ~10.1GB.
+
+ Notes:
+ File list was retrieved using the zenodo API.
+
+ .. code-block:: python
+
+ import requests
+ viirs_listing = requests.get("https://zenodo.org/api/records/263296")
+ viirs_dict = json.loads(viirs_listing.content)
+ print("\n".join(sorted(x['links']['self'] for x in viirs_dict['files'])))
+
+ """
+ base_dir = base_dir or config.get("demo_data_dir", ".")
+
+ subdir = os.path.join(base_dir, "viirs_sdr", "20170128_1229")
+ os.makedirs(subdir, exist_ok=True)
+ urls = (ZENODO_BASE_URL + fn for fn in _get_filenames_to_download(channels, granules))
+
+ files = []
+ for url in urls:
+ target = os.path.join(subdir, os.path.basename(url))
+ files.append(target)
+ if os.path.isfile(target):
+ logger.info(f"File {target} already exists, skipping...")
+ continue
+ logger.info(f"Downloading file to {target}...")
+ download_url(url, target)
+
+ return files
+
+
+def _get_filenames_to_download(channels, granules):
+ if any("DNB" in chan for chan in channels):
+ yield from _yield_specific_granules(GDNBO_URLS, granules)
+ if any("I" in chan for chan in channels):
+ yield from _yield_specific_granules(GITCO_URLS, granules)
+ if any("M" in chan for chan in channels):
+ yield from _yield_specific_granules(GMTCO_URLS, granules)
+ for channel in channels:
+ yield from _yield_specific_granules(FILES_20170128_1229[channel], granules)
+
+
+def _yield_specific_granules(filenames, granules):
+ for gran_num in granules:
+ yield filenames[gran_num - 1]
diff --git a/satpy/dependency_tree.py b/satpy/dependency_tree.py
index b413e80538..331483cabc 100644
--- a/satpy/dependency_tree.py
+++ b/satpy/dependency_tree.py
@@ -19,15 +19,15 @@
from __future__ import annotations
-from typing import Optional, Iterable, Container
-
-from satpy import DataID
-from satpy.dataset import create_filtered_query, ModifierTuple
-from satpy.dataset.data_dict import TooManyResults, get_key
-from satpy.node import CompositorNode, Node, EMPTY_LEAF_NAME, MissingDependencies, LOG, ReaderNode
+from typing import Container, Iterable, Optional
import numpy as np
+from satpy import DataID, DatasetDict
+from satpy.dataset import ModifierTuple, create_filtered_query
+from satpy.dataset.data_dict import TooManyResults, get_key
+from satpy.node import EMPTY_LEAF_NAME, LOG, CompositorNode, MissingDependencies, Node, ReaderNode
+
class Tree:
"""A tree implementation."""
@@ -155,7 +155,7 @@ class is a subclass of.
"""
- def __init__(self, readers, compositors, modifiers, available_only=False):
+ def __init__(self, readers, compositors=None, modifiers=None, available_only=False):
"""Collect Dataset generating information.
Collect the objects that generate and have information about Datasets
@@ -168,8 +168,10 @@ def __init__(self, readers, compositors, modifiers, available_only=False):
Args:
readers (dict): Reader name -> Reader Object
- compositors (dict): Sensor name -> Composite ID -> Composite Object
- modifiers (dict): Sensor name -> Modifier name -> (Modifier Class, modifier options)
+ compositors (dict): Sensor name -> Composite ID -> Composite Object.
+ Empty dictionary by default.
+ modifiers (dict): Sensor name -> Modifier name -> (Modifier Class, modifier options).
+ Empty dictionary by default.
available_only (bool): Whether only reader's available/loadable
datasets should be used when searching for dependencies (True)
or use all known/configured datasets regardless of whether the
@@ -181,9 +183,28 @@ def __init__(self, readers, compositors, modifiers, available_only=False):
"""
super().__init__()
self.readers = readers
- self.compositors = compositors
- self.modifiers = modifiers
+ self.compositors = {}
+ self.modifiers = {}
self._available_only = available_only
+ self.update_compositors_and_modifiers(compositors or {}, modifiers or {})
+
+ def update_compositors_and_modifiers(self, compositors: dict, modifiers: dict) -> None:
+ """Add additional compositors and modifiers to the tree.
+
+ Provided dictionaries and the first sub-level dictionaries are copied
+ to avoid modifying the input.
+
+ Args:
+ compositors (dict):
+ Sensor name -> composite ID -> Composite Object
+ modifiers (dict):
+ Sensor name -> Modifier name -> (Modifier Class, modifier options)
+
+ """
+ for sensor_name, sensor_comps in compositors.items():
+ self.compositors.setdefault(sensor_name, DatasetDict()).update(sensor_comps)
+ for sensor_name, sensor_mods in modifiers.items():
+ self.modifiers.setdefault(sensor_name, {}).update(sensor_mods)
def copy(self):
"""Copy this node tree.
diff --git a/satpy/enhancements/__init__.py b/satpy/enhancements/__init__.py
index a5609434c3..5e1df97e11 100644
--- a/satpy/enhancements/__init__.py
+++ b/satpy/enhancements/__init__.py
@@ -16,16 +16,21 @@
# satpy. If not, see .
"""Enhancements."""
-import numpy as np
-import xarray as xr
+import logging
+import os
+import warnings
+from collections import namedtuple
+from functools import wraps
+from numbers import Number
+
import dask
import dask.array as da
+import numpy as np
+import xarray as xr
from trollimage.xrimage import XRImage
-from numbers import Number
-import logging
-import warnings
-from functools import partial
+
from satpy._compat import ArrayLike
+from satpy._config import get_config_path
LOG = logging.getLogger(__name__)
@@ -45,67 +50,77 @@ def invert(img, *args):
return img.invert(*args)
-def apply_enhancement(data, func, exclude=None, separate=False,
- pass_dask=False):
- """Apply `func` to the provided data.
+def exclude_alpha(func):
+ """Exclude the alpha channel from the DataArray before further processing."""
+ @wraps(func)
+ def wrapper(data, **kwargs):
+ bands = data.coords['bands'].values
+ exclude = ['A'] if 'A' in bands else []
+ band_data = data.sel(bands=[b for b in bands
+ if b not in exclude])
+ band_data = func(band_data, **kwargs)
- Args:
- data (xarray.DataArray): Data to be modified inplace.
- func (callable): Function to be applied to an xarray
- exclude (iterable): Bands in the 'bands' dimension to not include
- in the calculations.
- separate (bool): Apply `func` one band at a time. Default is False.
- pass_dask (bool): Pass the underlying dask array instead of the
- xarray.DataArray.
+ attrs = data.attrs
+ attrs.update(band_data.attrs)
+ # combine the new data with the excluded data
+ new_data = xr.concat([band_data, data.sel(bands=exclude)],
+ dim='bands')
+ data.data = new_data.sel(bands=bands).data
+ data.attrs = attrs
+ return data
+ return wrapper
- """
- attrs = data.attrs
- bands = data.coords['bands'].values
- if exclude is None:
- exclude = ['A'] if 'A' in bands else []
- if separate:
+def on_separate_bands(func):
+ """Apply `func` one band of the DataArray at a time.
+
+ If this decorator is to be applied along with `on_dask_array`, this decorator has to be applied first, eg::
+
+ @on_separate_bands
+ @on_dask_array
+ def my_enhancement_function(data):
+ ...
+
+
+ """
+ @wraps(func)
+ def wrapper(data, **kwargs):
+ attrs = data.attrs
data_arrs = []
- for idx, band_name in enumerate(bands):
- band_data = data.sel(bands=[band_name])
- if band_name in exclude:
- # don't modify alpha
- data_arrs.append(band_data)
- continue
-
- if pass_dask:
- dims = band_data.dims
- coords = band_data.coords
- d_arr = func(band_data.data, index=idx)
- band_data = xr.DataArray(d_arr, dims=dims, coords=coords)
- else:
- band_data = func(band_data, index=idx)
+ for idx, band in enumerate(data.coords['bands'].values):
+ band_data = func(data.sel(bands=[band]), index=idx, **kwargs)
data_arrs.append(band_data)
# we assume that the func can add attrs
attrs.update(band_data.attrs)
-
data.data = xr.concat(data_arrs, dim='bands').data
data.attrs = attrs
return data
- else:
- band_data = data.sel(bands=[b for b in bands
- if b not in exclude])
- if pass_dask:
- dims = band_data.dims
- coords = band_data.coords
- d_arr = func(band_data.data)
- band_data = xr.DataArray(d_arr, dims=dims, coords=coords)
- else:
- band_data = func(band_data)
- attrs.update(band_data.attrs)
- # combine the new data with the excluded data
- new_data = xr.concat([band_data, data.sel(bands=exclude)],
- dim='bands')
- data.data = new_data.sel(bands=bands).data
- data.attrs = attrs
+ return wrapper
+
+
+def on_dask_array(func):
+ """Pass the underlying dask array to *func* instead of the xarray.DataArray."""
+ @wraps(func)
+ def wrapper(data, **kwargs):
+ dims = data.dims
+ coords = data.coords
+ d_arr = func(data.data, **kwargs)
+ return xr.DataArray(d_arr, dims=dims, coords=coords)
+ return wrapper
+
- return data
+def using_map_blocks(func):
+ """Run the provided function using :func:`dask.array.core.map_blocks`.
+
+ This means dask will call the provided function with a single chunk
+ as a numpy array.
+ """
+ @wraps(func)
+ def wrapper(data, **kwargs):
+ return da.map_blocks(func, data, meta=np.array((), dtype=data.dtype), dtype=data.dtype, chunks=data.chunks,
+ **kwargs)
+ return on_dask_array(wrapper)
def crefl_scaling(img, **kwargs):
@@ -181,15 +196,16 @@ def piecewise_linear_stretch(
xp = np.asarray(xp) / reference_scale_factor
fp = np.asarray(fp) / reference_scale_factor
- def func(band_data, xp, fp, index=None):
- # Interpolate band on [0,1] using "lazy" arrays (put calculations off until the end).
- band_data = xr.DataArray(da.clip(band_data.data.map_blocks(np.interp, xp=xp, fp=fp), 0, 1),
- coords=band_data.coords, dims=band_data.dims, name=band_data.name,
- attrs=band_data.attrs)
- return band_data
+ return _piecewise_linear(img.data, xp=xp, fp=fp)
+
- func_with_kwargs = partial(func, xp=xp, fp=fp)
- return apply_enhancement(img.data, func_with_kwargs, separate=True)
+@exclude_alpha
+@using_map_blocks
+def _piecewise_linear(band_data, xp, fp):
+ # Interpolate band on [0,1] using "lazy" arrays (put calculations off until the end).
+ interp_data = np.interp(band_data, xp=xp, fp=fp)
+ interp_data = np.clip(interp_data, 0, 1, out=interp_data)
+ return interp_data
def cira_stretch(img, **kwargs):
@@ -198,18 +214,19 @@ def cira_stretch(img, **kwargs):
Applicable only for visible channels.
"""
LOG.debug("Applying the cira-stretch")
+ return _cira_stretch(img.data)
- def func(band_data):
- log_root = np.log10(0.0223)
- denom = (1.0 - log_root) * 0.75
- band_data *= 0.01
- band_data = band_data.clip(np.finfo(float).eps)
- band_data = np.log10(band_data)
- band_data -= log_root
- band_data /= denom
- return band_data
- return apply_enhancement(img.data, func)
+@exclude_alpha
+def _cira_stretch(band_data):
+ log_root = np.log10(0.0223)
+ denom = (1.0 - log_root) * 0.75
+ band_data *= 0.01
+ band_data = band_data.clip(np.finfo(float).eps)
+ band_data = np.log10(band_data)
+ band_data -= log_root
+ band_data /= denom
+ return band_data
def reinhard_to_srgb(img, saturation=1.25, white=100, **kwargs):
@@ -260,26 +277,20 @@ def _srgb_gamma(arr):
return da.where(arr < 0.0031308, arr * 12.92, 1.055 * arr ** 0.41666 - 0.055)
-def _lookup_delayed(luts, band_data):
- # can't use luts.__getitem__ for some reason
- return luts[band_data]
-
-
def lookup(img, **kwargs):
"""Assign values to channels based on a table."""
luts = np.array(kwargs['luts'], dtype=np.float32) / 255.0
+ return _lookup_table(img.data, luts=luts)
- def func(band_data, luts=luts, index=-1):
- # NaN/null values will become 0
- lut = luts[:, index] if len(luts.shape) == 2 else luts
- band_data = band_data.clip(0, lut.size - 1).astype(np.uint8)
- new_delay = dask.delayed(_lookup_delayed)(lut, band_data)
- new_data = da.from_delayed(new_delay, shape=band_data.shape,
- dtype=luts.dtype)
- return new_data
-
- return apply_enhancement(img.data, func, separate=True, pass_dask=True)
+@exclude_alpha
+@on_separate_bands
+@using_map_blocks
+def _lookup_table(band_data, luts=None, index=-1):
+ # NaN/null values will become 0
+ lut = luts[:, index] if len(luts.shape) == 2 else luts
+ band_data = band_data.clip(0, lut.size - 1).astype(np.uint8)
+ return lut[band_data]
def colorize(img, **kwargs):
@@ -352,10 +363,21 @@ def create_colormap(palette):
**From a file**
- Colormaps can be loaded from ``.npy`` files as 2D raw arrays with rows for
- each color. The filename to load can be provided with the ``filename`` key
- in the provided palette information. The colormap is interpreted as 1 of 4
- different "colormap modes": ``RGB``, ``RGBA``, ``VRGB``, or ``VRGBA``. The
+ Colormaps can be loaded from ``.npy``, ``.npz``, or comma-separated text
+ files. Numpy (npy/npz) files should be 2D arrays with rows for each color.
+ Comma-separated files should have a row for each color with each column
+ representing a single value/channel. The filename to load can be provided
+ with the ``filename`` key in the provided palette information. A filename
+ ending with ``.npy`` or ``.npz`` is read as a numpy file with
+ :func:`numpy.load`. All other extensions are
+ read as a comma-separated file. For ``.npz`` files the data must be stored
+ as a positional list where the first element represents the colormap to
+ use. See :func:`numpy.savez` for more information. The path to the
+ colormap can be relative if it is stored in a directory specified by
+ :ref:`config_path_setting`. Otherwise it should be an absolute path.
+
+ The colormap is interpreted as 1 of 4 different "colormap modes":
+ ``RGB``, ``RGBA``, ``VRGB``, or ``VRGBA``. The
colormap mode can be forced with the ``colormap_mode`` key in the provided
palette information. If it is not provided then a default will be chosen
based on the number of columns in the array (3: RGB, 4: VRGB, 5: VRGBA).
@@ -411,52 +433,18 @@ def create_colormap(palette):
information.
"""
- from trollimage.colormap import Colormap
fname = palette.get('filename', None)
colors = palette.get('colors', None)
# are colors between 0-255 or 0-1
color_scale = palette.get('color_scale', 255)
if fname:
- data = np.load(fname)
- cols = data.shape[1]
- default_modes = {
- 3: 'RGB',
- 4: 'VRGB',
- 5: 'VRGBA'
- }
- default_mode = default_modes.get(cols)
- mode = palette.setdefault('colormap_mode', default_mode)
- if mode is None or len(mode) != cols:
- raise ValueError(
- "Unexpected colormap shape for mode '{}'".format(mode))
-
- rows = data.shape[0]
- if mode[0] == 'V':
- colors = data[:, 1:]
- if color_scale != 1:
- colors = data[:, 1:] / float(color_scale)
- values = data[:, 0]
- else:
- colors = data
- if color_scale != 1:
- colors = colors / float(color_scale)
- values = np.arange(rows) / float(rows - 1)
- cmap = Colormap(*zip(values, colors))
+ cmap = _create_colormap_from_file(fname, palette, color_scale)
elif isinstance(colors, (tuple, list)):
- cmap = []
- values = palette.get('values', None)
- for idx, color in enumerate(colors):
- if values is not None:
- value = values[idx]
- else:
- value = idx / float(len(colors) - 1)
- if color_scale != 1:
- color = tuple(elem / float(color_scale) for elem in color)
- cmap.append((value, tuple(color)))
- cmap = Colormap(*cmap)
+ cmap = _create_colormap_from_sequence(colors, palette, color_scale)
elif isinstance(colors, str):
- from trollimage import colormap
import copy
+
+ from trollimage import colormap
cmap = copy.copy(getattr(colormap, colors))
else:
raise ValueError("Unknown colormap format: {}".format(palette))
@@ -471,12 +459,62 @@ def create_colormap(palette):
return cmap
-def _three_d_effect_delayed(band_data, kernel, mode):
- """Kernel for running delayed 3D effect creation."""
- from scipy.signal import convolve2d
- band_data = band_data.reshape(band_data.shape[1:])
- new_data = convolve2d(band_data, kernel, mode=mode)
- return new_data.reshape((1, band_data.shape[0], band_data.shape[1]))
+def _create_colormap_from_sequence(colors, palette, color_scale):
+ from trollimage.colormap import Colormap
+ cmap = []
+ values = palette.get('values', None)
+ for idx, color in enumerate(colors):
+ if values is not None:
+ value = values[idx]
+ else:
+ value = idx / float(len(colors) - 1)
+ if color_scale != 1:
+ color = tuple(elem / float(color_scale) for elem in color)
+ cmap.append((value, tuple(color)))
+ return Colormap(*cmap)
+
+
+def _create_colormap_from_file(filename, palette, color_scale):
+ from trollimage.colormap import Colormap
+ data = _read_colormap_data_from_file(filename)
+ cols = data.shape[1]
+ default_modes = {
+ 3: 'RGB',
+ 4: 'VRGB',
+ 5: 'VRGBA'
+ }
+ default_mode = default_modes.get(cols)
+ mode = palette.setdefault('colormap_mode', default_mode)
+ if mode is None or len(mode) != cols:
+ raise ValueError(
+ "Unexpected colormap shape for mode '{}'".format(mode))
+ rows = data.shape[0]
+ if mode[0] == 'V':
+ colors = data[:, 1:]
+ if color_scale != 1:
+ colors = data[:, 1:] / float(color_scale)
+ values = data[:, 0]
+ else:
+ colors = data
+ if color_scale != 1:
+ colors = colors / float(color_scale)
+ values = np.arange(rows) / float(rows - 1)
+ return Colormap(*zip(values, colors))
+
+
+def _read_colormap_data_from_file(filename):
+ if not os.path.exists(filename):
+ filename = get_config_path(filename)
+ ext = os.path.splitext(filename)[1]
+ if ext in (".npy", ".npz"):
+ file_content = np.load(filename)
+ if ext == ".npz":
+ # .npz is a collection
+ # assume position list-like and get the first element
+ file_content = file_content["arr_0"]
+ return file_content
+ # CSV
+ return np.loadtxt(filename, delimiter=",")
def three_d_effect(img, **kwargs):
@@ -487,15 +525,26 @@ def three_d_effect(img, **kwargs):
[-w, 1, w],
[-w, 0, w]])
mode = kwargs.get('convolve_mode', 'same')
+ return _three_d_effect(img.data, kernel=kernel, mode=mode)
- def func(band_data, kernel=kernel, mode=mode, index=None):
- del index
- delay = dask.delayed(_three_d_effect_delayed)(band_data, kernel, mode)
- new_data = da.from_delayed(delay, shape=band_data.shape, dtype=band_data.dtype)
- return new_data
+@exclude_alpha
+@on_separate_bands
+@on_dask_array
+def _three_d_effect(band_data, kernel=None, mode=None, index=None):
+ del index
- return apply_enhancement(img.data, func, separate=True, pass_dask=True)
+ delay = dask.delayed(_three_d_effect_delayed)(band_data, kernel, mode)
+ new_data = da.from_delayed(delay, shape=band_data.shape, dtype=band_data.dtype)
+ return new_data
+
+
+def _three_d_effect_delayed(band_data, kernel, mode):
+ """Kernel for running delayed 3D effect creation."""
+ from scipy.signal import convolve2d
+ band_data = band_data.reshape(band_data.shape[1:])
+ new_data = convolve2d(band_data, kernel, mode=mode)
+ return new_data.reshape((1, band_data.shape[0], band_data.shape[1]))
def btemp_threshold(img, min_in, max_in, threshold, threshold_out=None, **kwargs):
@@ -524,10 +573,20 @@ def btemp_threshold(img, min_in, max_in, threshold, threshold_out=None, **kwargs
high_factor = threshold_out / (max_in - threshold)
high_offset = high_factor * max_in
- def _bt_threshold(band_data):
- # expects dask array to be passed
- return da.where(band_data >= threshold,
- high_offset - high_factor * band_data,
- low_offset - low_factor * band_data)
+ Coeffs = namedtuple("Coeffs", "factor offset")
+ high = Coeffs(high_factor, high_offset)
+ low = Coeffs(low_factor, low_offset)
+
+ return _bt_threshold(img.data,
+ threshold=threshold,
+ high_coeffs=high,
+ low_coeffs=low)
+
- return apply_enhancement(img.data, _bt_threshold, pass_dask=True)
+@exclude_alpha
+@using_map_blocks
+def _bt_threshold(band_data, threshold, high_coeffs, low_coeffs):
+ # expects dask array to be passed
+ return np.where(band_data >= threshold,
+ high_coeffs.offset - high_coeffs.factor * band_data,
+ low_coeffs.offset - low_coeffs.factor * band_data)
diff --git a/satpy/enhancements/abi.py b/satpy/enhancements/abi.py
index da246f51d9..ca19b4b252 100644
--- a/satpy/enhancements/abi.py
+++ b/satpy/enhancements/abi.py
@@ -16,29 +16,32 @@
# satpy. If not, see .
"""Enhancement functions specific to the ABI sensor."""
-from satpy.enhancements import apply_enhancement
+from satpy.enhancements import exclude_alpha, using_map_blocks
def cimss_true_color_contrast(img, **kwargs):
"""Scale data based on CIMSS True Color recipe for AWIPS."""
- def func(img_data):
- """Perform per-chunk enhancement.
+ _cimss_true_color_contrast(img.data)
- Code ported from Kaba Bah's AWIPS python plugin for creating the
- CIMSS Natural (True) Color image in AWIPS. AWIPS provides that python
- code the image data on a 0-255 scale. Satpy gives this function the
- data on a 0-1.0 scale (assuming linear stretching and sqrt
- enhancements have already been applied).
- """
- max_value = 1.0
- acont = (255.0 / 10.0) / 255.0
- amax = (255.0 + 4.0) / 255.0
- amid = 1.0 / 2.0
- afact = (amax * (acont + max_value) / (max_value * (amax - acont)))
- aband = (afact * (img_data - amid) + amid)
- aband[aband <= 10 / 255.0] = 0
- aband[aband >= 1.0] = 1.0
- return aband
+@exclude_alpha
+@using_map_blocks
+def _cimss_true_color_contrast(img_data):
+ """Perform per-chunk enhancement.
- apply_enhancement(img.data, func, pass_dask=True)
+ Code ported from Kaba Bah's AWIPS python plugin for creating the
+ CIMSS Natural (True) Color image in AWIPS. AWIPS provides that python
+ code the image data on a 0-255 scale. Satpy gives this function the
+ data on a 0-1.0 scale (assuming linear stretching and sqrt
+ enhancements have already been applied).
+
+ """
+ max_value = 1.0
+ acont = (255.0 / 10.0) / 255.0
+ amax = (255.0 + 4.0) / 255.0
+ amid = 1.0 / 2.0
+ afact = (amax * (acont + max_value) / (max_value * (amax - acont)))
+ aband = (afact * (img_data - amid) + amid)
+ aband[aband <= 10 / 255.0] = 0
+ aband[aband >= 1.0] = 1.0
+ return aband
diff --git a/satpy/enhancements/ahi.py b/satpy/enhancements/ahi.py
new file mode 100644
index 0000000000..a0f332cfa2
--- /dev/null
+++ b/satpy/enhancements/ahi.py
@@ -0,0 +1,46 @@
+#!/usr/bin/env python
+# Copyright (c) 2021 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Enhancement functions specific to the AHI sensor."""
+import dask.array as da
+import numpy as np
+
+from satpy.enhancements import exclude_alpha, on_dask_array
+
+
+def jma_true_color_reproduction(img, **kwargs):
+ """Apply CIE XYZ matrix and return True Color Reproduction data.
+
+ Himawari-8 True Color Reproduction Approach Based on the CIE XYZ Color System
+ Hidehiko MURATA, Kotaro SAITOH, and Yasuhiko SUMIDA
+ Meteorological Satellite Center, Japan Meteorological Agency
+ NOAA National Environmental Satellite, Data, and Information Service
+ Colorado State University—CIRA
+ https://www.jma.go.jp/jma/jma-eng/satellite/introduction/TCR.html
+ """
+ _jma_true_color_reproduction(img.data)
+
+
+@exclude_alpha
+@on_dask_array
+def _jma_true_color_reproduction(img_data):
+ ccm = np.array([
+ [1.1759, 0.0561, -0.1322],
+ [-0.0386, 0.9587, 0.0559],
+ [-0.0189, -0.1161, 1.0777]
+ ])
+ output = da.dot(img_data.T, ccm.T)
+ return output.T
diff --git a/satpy/enhancements/atmosphere.py b/satpy/enhancements/atmosphere.py
new file mode 100644
index 0000000000..bbc4bc3a86
--- /dev/null
+++ b/satpy/enhancements/atmosphere.py
@@ -0,0 +1,110 @@
+# Copyright (c) 2022- Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Enhancements related to visualising atmospheric phenomena."""
+
+import datetime
+
+import dask.array as da
+import xarray as xr
+
+
+def essl_moisture(img, low=1.1, high=1.6) -> None:
+ r"""Low level moisture by European Severe Storms Laboratory (ESSL).
+
+ Expects a mode L image with data corresponding to the ratio of the
+ calibrated reflectances for the 0.86 µm and 0.906 µm channel.
+
+ This composite and its colorisation were developed by ESSL.
+
+ Ratio values are scaled from the range ``[low, high]``, which is by default
+ between 1.1 and 1.6, but might be tuned based on region or sensor,
+ to ``[0, 1]``. Values outside this range are clipped. Color values
+ for red, green, and blue are calculated as follows, where ``x`` is the
+ ratio between the 0.86 µm and 0.905 µm channels:
+
+ .. math::
+
+ R = \max(1.375 - 2.67 x, -0.75 + x) \\
+ G = 1 - \frac{8x}{7} \\
+ B = \max(0.75 - 1.5 x, 0.25 - (x - 0.75)^2) \\
+
+ The value of ``img.data`` is modified in-place.
+
+ A color interpretation guide is pending further adjustments to the
+ parameters for current and future sensors.
+
+ Args:
+ img: XRImage containing the relevant composite
+ low: optional, low end for scaling, defaults to 1.1
+ high: optional, high end for scaling, defaults to 1.6
+ """
+ ratio = img.data
+ if _is_fci_test_data(img.data):
+ # Due to a bug in the FCI pre-launch simulated test data,
+ # the 0.86 µm channel is too bright. To correct for this, its
+ # reflectances should be multiplied by 0.8.
+ ratio *= 0.8
+
+ with xr.set_options(keep_attrs=True):
+ ratio = _scale_and_clip(ratio, low, high)
+ red = _calc_essl_red(ratio)
+ green = _calc_essl_green(ratio)
+ blue = _calc_essl_blue(ratio)
+ data = xr.concat([red, green, blue], dim="bands")
+ data.attrs["mode"] = "RGB"
+ data["bands"] = ["R", "G", "B"]
+ img.data = data
+
+
+def _scale_and_clip(ratio, low, high):
+ """Scale ratio values to [0, 1] and clip values outside this range."""
+ scaled = (ratio - low) / (high - low)
+ scaled.data = da.clip(scaled.data, 0, 1)
+ return scaled
+
+
+def _calc_essl_red(ratio):
+ """Calculate values for red based on scaled and clipped ratio."""
+ red_a = 1.375 - 2.67 * ratio
+ red_b = -0.75 + ratio
+ red = xr.where(red_a > red_b, red_a, red_b)
+ red.data = da.clip(red.data, 0, 1)
+ return red
+
+
+def _calc_essl_green(ratio):
+ """Calculate values for green based on scaled and clipped ratio."""
+ green = 1 - (8/7) * ratio
+ green.data = da.clip(green.data, 0, 1)
+ return green
+
+
+def _calc_essl_blue(ratio):
+ """Calculate values for blue based on scaled and clipped ratio."""
+ blue_a = 0.75 - 1.5 * ratio
+ blue_b = 0.25 - (ratio - 0.75)**2
+ blue = xr.where(blue_a > blue_b, blue_a, blue_b)
+ blue.data = da.clip(blue.data, 0, 1)
+ return blue
+
+
+def _is_fci_test_data(data):
+ """Check if we are working with FCI test data."""
+ return ("sensor" in data.attrs and
+ "start_time" in data.attrs and
+ data.attrs["sensor"] == "fci" and
+ isinstance(data.attrs["start_time"], datetime.datetime) and
+ data.attrs["start_time"] < datetime.datetime(2022, 11, 30))
diff --git a/satpy/enhancements/viirs.py b/satpy/enhancements/viirs.py
index 4d2acd376a..627fc80220 100644
--- a/satpy/enhancements/viirs.py
+++ b/satpy/enhancements/viirs.py
@@ -15,9 +15,10 @@
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
"""Enhancements specific to the VIIRS instrument."""
-from trollimage.colormap import Colormap
import numpy as np
-from satpy.enhancements import apply_enhancement
+from trollimage.colormap import Colormap
+
+from satpy.enhancements import exclude_alpha, using_map_blocks
def water_detection(img, **kwargs):
@@ -29,14 +30,17 @@ def water_detection(img, **kwargs):
palette = kwargs['palettes']
palette['colors'] = tuple(map(tuple, palette['colors']))
- def func(img_data):
- data = np.asarray(img_data)
- data[data == 150] = 31
- data[data == 199] = 18
- data[data >= 200] = data[data >= 200] - 100
-
- return data
-
- apply_enhancement(img.data, func, pass_dask=True)
+ _water_detection(img.data)
cm = Colormap(*palette['colors'])
img.palettize(cm)
+
+
+@exclude_alpha
+@using_map_blocks
+def _water_detection(img_data):
+ data = np.asarray(img_data).copy()
+ data[data == 150] = 31
+ data[data == 199] = 18
+ data[data >= 200] = data[data >= 200] - 100
+
+ return data
diff --git a/satpy/etc/areas.yaml b/satpy/etc/areas.yaml
index f1486bd98e..04233d059d 100644
--- a/satpy/etc/areas.yaml
+++ b/satpy/etc/areas.yaml
@@ -114,6 +114,160 @@ msg_seviri_iodc_1km:
lower_left_xy: [-5571248.412732527, -5566247.740968115]
upper_right_xy: [5566247.740968115, 5571248.412732527]
+# Full disk - segmented products
+msg_seviri_fes_9km:
+ description:
+ MSG SEVIRI Full Earth Scanning service area definition
+ with 9 km resolution
+ projection:
+ proj: geos
+ lon_0: 0.0
+ a: 6378169.0
+ b: 6356583.8
+ h: 35785831.0
+ shape:
+ height: 1237
+ width: 1237
+ area_extent:
+ lower_left_xy: [-5567248.28351984, -5567248.28340708]
+ upper_right_xy: [5567248.28340708 , 5567248.28351984]
+
+msg_seviri_rss_9km:
+ description:
+ MSG SEVIRI Rapid Scanning Service area definition
+ with 9 km resolution
+ projection:
+ proj: geos
+ lon_0: 9.5
+ a: 6378169.0
+ b: 6356583.8
+ h: 35785831.0
+ shape:
+ height: 1237
+ width: 1237
+ area_extent:
+ lower_left_xy: [-5567248.28351984, -5567248.28340708]
+ upper_right_xy: [5567248.28340708 , 5567248.28351984]
+
+msg_seviri_iodc_9km:
+ description:
+ MSG SEVIRI Indian Ocean Data Coverage service area definition
+ with 9 km resolution
+ projection:
+ proj: geos
+ lon_0: 41.5
+ a: 6378169.0
+ b: 6356583.8
+ h: 35785831.0
+ shape:
+ height: 1237
+ width: 1237
+ area_extent:
+ lower_left_xy: [-5567248.28351984, -5567248.28340708]
+ upper_right_xy: [5567248.28340708 , 5567248.28351984]
+
+msg_seviri_fes_9km_ext:
+ description:
+ MSG SEVIRI Full Earth Scanning service area definition
+ with 9 km resolution (extended outside original 3km grid)
+ projection:
+ proj: geos
+ lon_0: 0.0
+ a: 6378169.0
+ b: 6356583.8
+ h: 35785831.0
+ shape:
+ height: 1238
+ width: 1238
+ area_extent:
+ lower_left_xy: [-5571748.888268564, -5571748.888155806]
+ upper_right_xy: [5571748.888155806, 5571748.888268564]
+
+msg_seviri_rss_9km_ext:
+ description:
+ MSG SEVIRI Rapid Scanning Service area definition
+ with 9 km resolution (extended outside original 3km grid)
+ projection:
+ proj: geos
+ lon_0: 9.5
+ a: 6378169.0
+ b: 6356583.8
+ h: 35785831.0
+ shape:
+ height: 1238
+ width: 1238
+ area_extent:
+ lower_left_xy: [-5571748.888268564, -5571748.888155806]
+ upper_right_xy: [5571748.888155806, 5571748.888268564]
+
+msg_seviri_iodc_9km_ext:
+ description:
+ MSG SEVIRI Indian Ocean Data Coverage service area definition
+ with 9 km resolution (extended outside original 3km grid)
+ projection:
+ proj: geos
+ lon_0: 41.5
+ a: 6378169.0
+ b: 6356583.8
+ h: 35785831.0
+ shape:
+ height: 1238
+ width: 1238
+ area_extent:
+ lower_left_xy: [-5571748.888268564, -5571748.888155806]
+ upper_right_xy: [5571748.888155806, 5571748.888268564]
+
+msg_seviri_fes_48km:
+ description:
+ MSG SEVIRI Full Earth Scanning service area definition
+ with 48 km resolution
+ projection:
+ proj: geos
+ lon_0: 0.0
+ a: 6378169.0
+ b: 6356583.8
+ h: 35785831.0
+ shape:
+ height: 232
+ width: 232
+ area_extent:
+ lower_left_xy: [-5570248.686685662, -5567248.28340708]
+ upper_right_xy: [5567248.28340708, 5570248.686685662]
+
+msg_seviri_rss_48km:
+ description:
+ MSG SEVIRI Rapid Scanning Service area definition
+ with 48 km resolution
+ projection:
+ proj: geos
+ lon_0: 9.5
+ a: 6378169.0
+ b: 6356583.8
+ h: 35785831.0
+ shape:
+ height: 232
+ width: 232
+ area_extent:
+ lower_left_xy: [-5570248.686685662, -5567248.28340708]
+ upper_right_xy: [5567248.28340708, 5570248.686685662]
+
+msg_seviri_iodc_48km:
+ description:
+ MSG SEVIRI Indian Ocean Data Coverage service area definition
+ with 48 km resolution
+ projection:
+ proj: geos
+ lon_0: 41.5
+ a: 6378169.0
+ b: 6356583.8
+ h: 35785831.0
+ shape:
+ height: 232
+ width: 232
+ area_extent:
+ lower_left_xy: [-5570248.686685662, -5567248.28340708]
+ upper_right_xy: [5567248.28340708, 5570248.686685662]
+
# Regional
@@ -133,7 +287,7 @@ EuropeCanary:
upper_right_xy: [4178061.408400173, 5570248.477339261]
EastEurope:
- description: Easten part of Northern disk MSG image 0 degrees
+ description: Eastern part of Northern disk MSG image 0 degrees
projection:
proj: geos
lon_0: 0.0
@@ -221,6 +375,293 @@ mtg_fci_fdss_2km:
upper_right_xy: [5567999.994206558, 5567999.994206558]
units: m
+# Full disk - segmented products
+mtg_fci_fdss_6km:
+ description:
+ MTG FCI Full Disk Scanning Service area definition
+ with 6 km resolution
+ projection:
+ proj: geos
+ lon_0: 0
+ h: 35786400
+ x_0: 0
+ y_0: 0
+ ellps: WGS84
+ no_defs: null
+ shape:
+ height: 1856
+ width: 1856
+ area_extent:
+ lower_left_xy: [-5567999.994200589, -5567999.994200589]
+ upper_right_xy: [5567999.994206558, 5567999.994206558]
+ units: m
+
+mtg_fci_fdss_32km:
+ description:
+ MTG FCI Full Disk Scanning Service area definition
+ with 32 km resolution
+ projection:
+ proj: geos
+ lon_0: 0
+ h: 35786400
+ x_0: 0
+ y_0: 0
+ ellps: WGS84
+ no_defs: null
+ shape:
+ height: 348
+ width: 348
+ area_extent:
+ lower_left_xy: [-5567999.994200589, -5567999.994200589]
+ upper_right_xy: [5567999.994206558, 5567999.994206558]
+ units: m
+
+# Geostationary Operational Environmental Satellite (GOES) / ABI Instrument
+
+# Full disk
+
+goes_east_abi_f_500m:
+ description: GOES East ABI Full Disk at 500 m SSP resolution
+ projection:
+ proj: geos
+ sweep: x
+ lon_0: -75
+ h: 35786023
+ x_0: 0
+ y_0: 0
+ ellps: GRS80
+ no_defs: null
+ type: crs
+ shape:
+ height: 21696
+ width: 21696
+ area_extent:
+ lower_left_xy: [-5434894.885056, -5434894.885056]
+ upper_right_xy: [5434894.885056, 5434894.885056]
+ units: m
+
+goes_east_abi_f_1km:
+ description: GOES East ABI Full Disk at 1 km SSP resolution
+ projection:
+ proj: geos
+ sweep: x
+ lon_0: -75
+ h: 35786023
+ x_0: 0
+ y_0: 0
+ ellps: GRS80
+ no_defs: null
+ type: crs
+ shape:
+ height: 10848
+ width: 10848
+ area_extent:
+ lower_left_xy: [-5434894.885056, -5434894.885056]
+ upper_right_xy: [5434894.885056, 5434894.885056]
+ units: m
+
+goes_east_abi_f_2km:
+ description: GOES East ABI Full Disk at 2 km SSP resolution
+ projection:
+ proj: geos
+ sweep: x
+ lon_0: -75
+ h: 35786023
+ x_0: 0
+ y_0: 0
+ ellps: GRS80
+ no_defs: null
+ type: crs
+ shape:
+ height: 5424
+ width: 5424
+ area_extent:
+ lower_left_xy: [-5434894.885056, -5434894.885056]
+ upper_right_xy: [5434894.885056, 5434894.885056]
+ units: m
+
+goes_west_abi_f_500m:
+ description: GOES West ABI Full Disk at 500 m SSP resolution
+ projection:
+ proj: geos
+ sweep: x
+ lon_0: -137
+ h: 35786023
+ x_0: 0
+ y_0: 0
+ ellps: GRS80
+ no_defs: null
+ type: crs
+ shape:
+ height: 21696
+ width: 21696
+ area_extent:
+ lower_left_xy: [-5434894.885056, -5434894.885056]
+ upper_right_xy: [5434894.885056, 5434894.885056]
+ units: m
+
+goes_west_abi_f_1km:
+ description: GOES West ABI Full Disk at 1 km SSP resolution
+ projection:
+ proj: geos
+ sweep: x
+ lon_0: -137
+ h: 35786023
+ x_0: 0
+ y_0: 0
+ ellps: GRS80
+ no_defs: null
+ type: crs
+ shape:
+ height: 10848
+ width: 10848
+ area_extent:
+ lower_left_xy: [-5434894.885056, -5434894.885056]
+ upper_right_xy: [5434894.885056, 5434894.885056]
+ units: m
+
+goes_west_abi_f_2km:
+ description: GOES West ABI Full Disk at 2 km SSP resolution
+ projection:
+ proj: geos
+ sweep: x
+ lon_0: -137
+ h: 35786023
+ x_0: 0
+ y_0: 0
+ ellps: GRS80
+ no_defs: null
+ type: crs
+ shape:
+ height: 5424
+ width: 5424
+ area_extent:
+ lower_left_xy: [-5434894.885056, -5434894.885056]
+ upper_right_xy: [5434894.885056, 5434894.885056]
+ units: m
+
+# Regional
+
+goes_east_abi_c_500m:
+ description: GOES East ABI CONUS at 500 m SSP resolution
+ projection:
+ proj: geos
+ sweep: x
+ lon_0: -75
+ h: 35786023
+ x_0: 0
+ y_0: 0
+ ellps: GRS80
+ no_defs: null
+ type: crs
+ shape:
+ height: 6000
+ width: 10000
+ area_extent:
+ lower_left_xy: [-3627271.29128, 1583173.65752]
+ upper_right_xy: [1382771.92872, 4589199.58952]
+ units: m
+
+goes_east_abi_c_1km:
+ description: GOES East ABI CONUS at 1 km SSP resolution
+ projection:
+ proj: geos
+ sweep: x
+ lon_0: -75
+ h: 35786023
+ x_0: 0
+ y_0: 0
+ ellps: GRS80
+ no_defs: null
+ type: crs
+ shape:
+ height: 3000
+ width: 5000
+ area_extent:
+ lower_left_xy: [-3627271.29128, 1583173.65752]
+ upper_right_xy: [1382771.92872, 4589199.58952]
+ units: m
+
+goes_east_abi_c_2km:
+ description: GOES East ABI CONUS at 2 km SSP resolution
+ projection:
+ proj: geos
+ sweep: x
+ lon_0: -75
+ h: 35786023
+ x_0: 0
+ y_0: 0
+ ellps: GRS80
+ no_defs: null
+ type: crs
+ shape:
+ height: 1500
+ width: 2500
+ area_extent:
+ lower_left_xy: [-3627271.29128, 1583173.65752]
+ upper_right_xy: [1382771.92872, 4589199.58952]
+ units: m
+
+goes_west_abi_p_500m:
+ description: GOES West ABI PACUS at 500 m resolution
+ projection:
+ proj: geos
+ sweep: x
+ lon_0: -137
+ h: 35786023
+ x_0: 0
+ y_0: 0
+ ellps: GRS80
+ no_defs: null
+ type: crs
+ shape:
+ height: 6000
+ width: 10000
+ area_extent:
+ lower_left_xy: [-2505021.61, 1583173.65752]
+ upper_right_xy: [2505021.61, 4589199.58952]
+ units: m
+
+goes_west_abi_p_1km:
+ description: GOES West ABI PACUS at 1 km SSP resolution
+ projection:
+ proj: geos
+ sweep: x
+ lon_0: -137
+ h: 35786023
+ x_0: 0
+ y_0: 0
+ ellps: GRS80
+ no_defs: null
+ type: crs
+ shape:
+ height: 3000
+ width: 5000
+ area_extent:
+ lower_left_xy: [-2505021.61, 1583173.65752]
+ upper_right_xy: [2505021.61, 4589199.58952]
+ units: m
+
+goes_west_abi_p_2km:
+ description: GOES West ABI PACUS at 2 km resolution
+ projection:
+ proj: geos
+ sweep: x
+ lon_0: -137
+ h: 35786023
+ x_0: 0
+ y_0: 0
+ ellps: GRS80
+ no_defs: null
+ type: crs
+ shape:
+ height: 1500
+ width: 2500
+ area_extent:
+ lower_left_xy: [-2505021.61, 1583173.65752]
+ upper_right_xy: [2505021.61, 4589199.58952]
+ units: m
+
# -----------------------------------------------------------------------------
# ------------------------- Miscellaneous Areas -------------------------------
# -----------------------------------------------------------------------------
@@ -285,27 +726,13 @@ sve:
description:
Sweden and baltic sea
projection:
- init: epsg:3006
+ EPSG: 3006
shape:
height: 2000
width: 2000
area_extent:
lower_left_xy: [-342379.698, 6032580.06]
upper_right_xy: [1423701.52, 8029648.75]
-iber:
- description:
- North half of the Iberian Peninsula and the Gulf of Biscay
- image 0 degrees
- projection:
- proj: utm
- shape:
- height: 1000
- width: 2000
- area_extent:
- lower_left_xy: [-342379.698, 4432580.06]
- upper_right_xy: [723701.52, 5029648.75]
- units: m
-
brazil2:
description: brazil, platecarree
projection:
diff --git a/satpy/etc/composites/abi.yaml b/satpy/etc/composites/abi.yaml
index a0cd4f18c2..0875366616 100644
--- a/satpy/etc/composites/abi.yaml
+++ b/satpy/etc/composites/abi.yaml
@@ -3,7 +3,8 @@ sensor_name: visir/abi
modifiers:
rayleigh_corrected_crefl:
modifier: !!python/name:satpy.modifiers.atmosphere.ReflectanceCorrector
- dem_filename: CMGDEM.hdf
+ url: "https://www.ssec.wisc.edu/~davidh/polar2grid/viirs_crefl/CMGDEM.hdf"
+ known_hash: "sha256:f33f1f867d79fff4fafe128f61c154236dd74fcc97bf418ea1437977a38d0604"
optional_prerequisites:
- name: satellite_azimuth_angle
- name: satellite_zenith_angle
@@ -194,7 +195,7 @@ composites:
modifiers: [sunz_corrected, rayleigh_corrected]
- name: C03
modifiers: [sunz_corrected]
- standard_name: toa_bidirection_reflectance
+ standard_name: toa_bidirectional_reflectance
cimss_green_sunz:
compositor: !!python/name:satpy.composites.abi.SimulatedGreen
@@ -207,7 +208,7 @@ composites:
modifiers: [sunz_corrected]
- name: C03
modifiers: [sunz_corrected]
- standard_name: toa_bidirection_reflectance
+ standard_name: toa_bidirectional_reflectance
cimss_green:
compositor: !!python/name:satpy.composites.abi.SimulatedGreen
@@ -217,7 +218,7 @@ composites:
- name: C01
- name: C02
- name: C03
- standard_name: toa_bidirection_reflectance
+ standard_name: toa_bidirectional_reflectance
cimss_true_color_sunz_rayleigh:
compositor: !!python/name:satpy.composites.SelfSharpenedRGB
@@ -271,15 +272,38 @@ composites:
- name: C01
standard_name: cimss_true_color
+ true_color_with_night_fires_nocorr:
+ compositor: !!python/name:satpy.composites.DayNightCompositor
+ standard_name: true_color_with_night_fires
+ prerequisites:
+ - true_color_nocorr
+ - cira_fire_temperature
+
+ true_color_with_night_fires:
+ compositor: !!python/name:satpy.composites.DayNightCompositor
+ standard_name: true_color_with_night_fires
+ prerequisites:
+ - true_color
+ - cira_fire_temperature
+
true_color_with_night_ir:
compositor: !!python/name:satpy.composites.DayNightCompositor
standard_name: true_color_with_night_ir
- lim_low: 90.0
- lim_high: 100.0
+ lim_low: 80
+ lim_high: 90
prerequisites:
- true_color
- night_ir_with_background
+ natural_color_raw_with_night_ir:
+ compositor: !!python/name:satpy.composites.DayNightCompositor
+ standard_name: natural_color_with_night_ir
+ lim_low: 80
+ lim_high: 90
+ prerequisites:
+ - natural_color_raw
+ - cloudtop
+
true_color_with_night_ir_hires:
compositor: !!python/name:satpy.composites.DayNightCompositor
standard_name: true_color_with_night_ir_hires
@@ -307,7 +331,7 @@ composites:
night_ir_with_background_hires:
compositor: !!python/name:satpy.composites.BackgroundCompositor
- standard_name: night_ir_with_background_hires
+ standard_name: night_ir_with_background
prerequisites:
- night_ir_alpha
- _night_background_hires
@@ -321,7 +345,7 @@ composites:
appear red. With the increasing intensity and temperature the fires will also be detected
by the 2.2 μm and 1.6 μm bands resulting very intense fires in white.
references:
- Research Article: http://rammb.cira.colostate.edu/training/visit/quick_guides/Fire_Temperature_RGB.pdf
+ Research Article: https://rammb.cira.colostate.edu/training/visit/quick_guides/Fire_Temperature_RGB.pdf
prerequisites:
- name: C07
- name: C06
@@ -335,7 +359,7 @@ composites:
indicative of severe storms. Bright yellow in the RGB indicates strong updrafts prior
to the mature storm stage.
references:
- Research Article: http://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_DayConvectionRGB_final.pdf
+ Research Article: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_DayConvectionRGB_final.pdf
prerequisites:
- compositor: !!python/name:satpy.composites.DifferenceCompositor
prerequisites:
@@ -363,11 +387,12 @@ composites:
- name: C04
- name: C02
- name: C05
+
ash:
description: >
Ash RGB, for GOESR: NASA, NOAA
references:
- CIRA Quick Guide: http://rammb.cira.colostate.edu/training/visit/quick_guides/GOES_Ash_RGB.pdf
+ CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/GOES_Ash_RGB.pdf
compositor: !!python/name:satpy.composites.GenericCompositor
prerequisites:
- compositor: !!python/name:satpy.composites.DifferenceCompositor
@@ -385,7 +410,7 @@ composites:
description: >
Dust RGB, for GOESR: NASA, NOAA
references:
- CIRA Quick Guide: http://rammb.cira.colostate.edu/training/visit/quick_guides/Dust_RGB_Quick_Guide.pdf
+ CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/Dust_RGB_Quick_Guide.pdf
compositor: !!python/name:satpy.composites.GenericCompositor
prerequisites:
- compositor: !!python/name:satpy.composites.DifferenceCompositor
@@ -404,8 +429,8 @@ composites:
Day Cloud Phase Distinction RGB, for GOESR: NASA, NOAA
Cloud Type RGB, for EUMETSAT (https://www.eumetsat.int/website/home/Images/ImageLibrary/DAT_3958037.html)
references:
- CIRA Quick Guide: http://rammb.cira.colostate.edu/training/visit/quick_guides/Day_Cloud_Phase_Distinction.pdf
- Cloud Type recipe and typical colours: https://www.eumetsat.int/website/wcm/idc/idcplg?IdcService=GET_FILE&dDocName=PDF_IL_18_05_13_A&RevisionSelectionMethod=LatestReleased&Rendition=Web
+ CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_DayCloudPhaseDistinction_final_v2.pdf
+ Cloud Type recipe and typical colours: https://www.eumetsat.int/fr/media/45659
## it uses the default used in etc/enhancements/generic.yaml
compositor: !!python/name:satpy.composites.GenericCompositor
prerequisites:
@@ -430,7 +455,7 @@ composites:
description: >
Simple Water Vapor RGB, for GOESR: NASA, NOAA
references:
- CIRA Quick Guide: http://rammb.cira.colostate.edu/training/visit/quick_guides/Simple_Water_Vapor_RGB.pdf
+ CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/Simple_Water_Vapor_RGB.pdf
## it uses the default used in etc/enhancements/generic.yaml
compositor: !!python/name:satpy.composites.GenericCompositor
prerequisites:
@@ -443,7 +468,7 @@ composites:
description: >
Differential Water Vapor RGB, for GOESR: NASA, NOAA
references:
- CIRA Quick Guide: http://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_DifferentialWaterVaporRGB_final.pdf
+ CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_DifferentialWaterVaporRGB_final.pdf
## it uses the default used in etc/enhancements/generic.yaml
compositor: !!python/name:satpy.composites.GenericCompositor
prerequisites:
@@ -459,7 +484,7 @@ composites:
description: >
Day Convection RGB, for GOESR: NASA, NOAA
references:
- CIRA Quick Guide: http://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_DayConvectionRGB_final.pdf
+ CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_DayConvectionRGB_final.pdf
compositor: !!python/name:satpy.composites.GenericCompositor
prerequisites:
- compositor: !!python/name:satpy.composites.DifferenceCompositor
@@ -480,7 +505,7 @@ composites:
description: >
SO2 RGB, for GOESR: NASA, NOAA
references:
- CIRA Quick Guide: http://rammb.cira.colostate.edu/training/visit/quick_guides/Quick_Guide_SO2_RGB.pdf
+ CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/Quick_Guide_SO2_RGB.pdf
compositor: !!python/name:satpy.composites.GenericCompositor
prerequisites:
- compositor: !!python/name:satpy.composites.DifferenceCompositor
@@ -498,7 +523,7 @@ composites:
description: >
Day Snow-Fog RGB, for GOESR: NASA, NOAA
references:
- CIRA Quick Guide: http://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_DaySnowFog.pdf
+ CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_DaySnowFogRGB_final_v2.pdf
## it uses the default used in etc/enhancements/generic.yaml of snow_defaul
compositor: !!python/name:satpy.composites.GenericCompositor
prerequisites:
@@ -516,7 +541,7 @@ composites:
description: >
Nighttime Microphysics RGB, for GOESR: NASA, NOAA
references:
- CIRA Quick Guide: http://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_NtMicroRGB_final.pdf
+ CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_NtMicroRGB_Final_20191206.pdf
compositor: !!python/name:satpy.composites.GenericCompositor
prerequisites:
- compositor: !!python/name:satpy.composites.DifferenceCompositor
@@ -534,7 +559,7 @@ composites:
description: >
Fire Temperature RGB, for GOESR: NASA, NOAA
references:
- CIRA Quick Guide: http://rammb.cira.colostate.edu/training/visit/quick_guides/Fire_Temperature_RGB.pdf
+ CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/Fire_Temperature_RGB.pdf
## adapted from etc/composites/viirs.yaml
compositor: !!python/name:satpy.composites.GenericCompositor
prerequisites:
@@ -548,7 +573,7 @@ composites:
description: >
Day Land Cloud Fire RGB, for GOESR: NASA, NOAA
references:
- CIRA Quick Guide: http://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_DayLandCloudFireRGB_final.pdf
+ CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_DayLandCloudFireRGB_final.pdf
compositor: !!python/name:satpy.composites.GenericCompositor
prerequisites:
- name: C06
@@ -562,7 +587,7 @@ composites:
description: >
Day Land Cloud RGB, for GOESR: NASA, NOAA
references:
- CIRA Quick Guide: http://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_daylandcloudRGB_final.pdf
+ CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_daylandcloudRGB_final.pdf
compositor: !!python/name:satpy.composites.GenericCompositor
prerequisites:
- name: C05
@@ -618,12 +643,10 @@ composites:
cloud_phase:
description: >
- Cloud Phase RGB, for EUMETSAT
- Day Cloud Phase RGB, for EUMETSAT (https://www.eumetsat.int/website/home/Images/ImageLibrary/DAT_2861499.html)
- "When we use the NIR2.3 instead of the VIS0.8 on the green beam, we can devise a new RGB product (let us call it 'Day Cloud Phase RGB') that has similar cloud colours than the Natural Colour RGB, but with improved separation of ice and water clouds."
+ EUMETSAT Cloud Phase RGB product
references:
EUMETRAIN Quick Guide: http://www.eumetrain.org/rgb_quick_guides/quick_guides/CloudPhaseRGB.pdf
- Cloud Phase recipe and typical colours: https://www.eumetsat.int/website/wcm/idc/idcplg?IdcService=GET_FILE&dDocName=PDF_IL_18_05_13&RevisionSelectionMethod=LatestReleased&Rendition=Web
+ Recipe : http://eumetrain.org/RGBguide/recipes/RGB_recipes.pdf
compositor: !!python/name:satpy.composites.GenericCompositor
prerequisites:
- name: C05
@@ -632,17 +655,17 @@ composites:
modifiers: [sunz_corrected]
- name: C02
modifiers: [sunz_corrected, rayleigh_corrected]
- standard_name: natural_color
+ standard_name: cloud_phase
cloud_phase_raw:
description: >
- same as cloud_phase
+ same as eum_cloud_phase RGB product, without modifiers
compositor: !!python/name:satpy.composites.GenericCompositor
prerequisites:
- name: C05
- name: C06
- name: C02
- standard_name: natural_color
+ standard_name: cloud_phase
tropical_airmass:
description: >
diff --git a/satpy/etc/composites/agri.yaml b/satpy/etc/composites/agri.yaml
index 33b4ffe803..722dfea10e 100644
--- a/satpy/etc/composites/agri.yaml
+++ b/satpy/etc/composites/agri.yaml
@@ -15,22 +15,62 @@ composites:
modifiers: [sunz_corrected]
standard_name: toa_bidirectional_reflectance
- true_color:
- compositor: !!python/name:satpy.composites.SelfSharpenedRGB
+ green_nocorr:
+ compositor: !!python/name:satpy.composites.abi.SimulatedGreen
+ # FUTURE: Set a wavelength...see what happens. Dependency finding
+ # probably wouldn't work.
+ prerequisites:
+ # should we be using the most corrected or least corrected inputs?
+ - name: C01
+ - name: C02
+ - name: C03
+ standard_name: toa_bidirectional_reflectance
+
+ pseudored:
+ compositor: !!python/name:satpy.composites.agri.SimulatedRed
+ # FUTURE: Set a wavelength...see what happens. Dependency finding
+ # probably wouldn't work.
prerequisites:
+ # should we be using the most corrected or least corrected inputs?
- name: C02
modifiers: [sunz_corrected, rayleigh_corrected]
+ - name: C03
+ modifiers: [sunz_corrected]
+ standard_name: toa_bidirectional_reflectance
+
+ pseudored_nocorr:
+ compositor: !!python/name:satpy.composites.agri.SimulatedRed
+ # FUTURE: Set a wavelength...see what happens. Dependency finding
+ # probably wouldn't work.
+ prerequisites:
+ # should we be using the most corrected or least corrected inputs?
+ - name: C02
+ - name: C03
+ standard_name: toa_bidirectional_reflectance
+
+ true_color:
+ compositor: !!python/name:satpy.composites.SelfSharpenedRGB
+ prerequisites:
+ - name: pseudored
- name: green
- name: C01
modifiers: [sunz_corrected, rayleigh_corrected]
standard_name: true_color
+
+ true_color_nocorr:
+ compositor: !!python/name:satpy.composites.SelfSharpenedRGB
+ prerequisites:
+ - name: pseudored_nocorr
+ - name: green_nocorr
+ - name: C01
+ standard_name: true_color
#
cloud_phase_distinction:
description: >
Day Cloud Phase Distinction RGB, for GOESR: NASA, NOAA
Cloud Type RGB, for EUMETSAT (https://www.eumetsat.int/website/home/Images/ImageLibrary/DAT_3958037.html)
references:
- CIRA Quick Guide: http://rammb.cira.colostate.edu/training/visit/quick_guides/Day_Cloud_Phase_Distinction.pdf
+ CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_DayCloudPhaseDistinction_final_v2.pdf
Cloud Type recipe and typical colours: https://www.eumetsat.int/website/wcm/idc/idcplg?IdcService=GET_FILE&dDocName=PDF_IL_18_05_13_A&RevisionSelectionMethod=LatestReleased&Rendition=Web
## it uses the default used in etc/enhancements/generic.yaml
compositor: !!python/name:satpy.composites.GenericCompositor
@@ -56,7 +96,7 @@ composites:
description: >
Day Snow-Fog RGB, for GOESR: NASA, NOAA
references:
- CIRA Quick Guide: http://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_DaySnowFog.pdf
+ CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_DaySnowFogRGB_final_v2.pdf
## it uses the default used in etc/enhancements/generic.yaml of snow_defaul
compositor: !!python/name:satpy.composites.GenericCompositor
prerequisites:
@@ -74,7 +114,7 @@ composites:
description: >
Fire Temperature RGB, for GOESR: NASA, NOAA
references:
- CIRA Quick Guide: http://rammb.cira.colostate.edu/training/visit/quick_guides/Fire_Temperature_RGB.pdf
+ CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/Fire_Temperature_RGB.pdf
## adapted from etc/composites/viirs.yaml
compositor: !!python/name:satpy.composites.GenericCompositor
prerequisites:
@@ -88,7 +128,7 @@ composites:
description: >
Day Land Cloud Fire RGB, for GOESR: NASA, NOAA
references:
- CIRA Quick Guide: http://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_DayLandCloudFireRGB_final.pdf
+ CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_DayLandCloudFireRGB_final.pdf
compositor: !!python/name:satpy.composites.GenericCompositor
prerequisites:
- name: C06
@@ -102,7 +142,8 @@ composites:
description: >
Day Land Cloud RGB, for GOESR: NASA, NOAA
references:
- CIRA Quick Guide: http://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_daylandcloudRGB_final.pdf
+ CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_GOESR_daylandcloudRGB_final.pdf
+
compositor: !!python/name:satpy.composites.GenericCompositor
prerequisites:
- name: C05
diff --git a/satpy/etc/composites/ahi.yaml b/satpy/etc/composites/ahi.yaml
index e733b00507..ad77eeab50 100644
--- a/satpy/etc/composites/ahi.yaml
+++ b/satpy/etc/composites/ahi.yaml
@@ -4,9 +4,9 @@ modifiers:
rayleigh_corrected:
modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance
atmosphere: us-standard
- aerosol_type: marine_clean_aerosol
+ aerosol_type: rayleigh_only
prerequisites:
- - wavelength: 0.64
+ - name: B03
modifiers: [sunz_corrected]
optional_prerequisites:
- satellite_azimuth_angle
@@ -16,7 +16,7 @@ modifiers:
composites:
green:
- compositor: !!python/name:satpy.composites.ahi.GreenCorrector
+ compositor: !!python/name:satpy.composites.spectral.GreenCorrector
# FUTURE: Set a wavelength...see what happens. Dependency finding
# probably wouldn't work.
prerequisites:
@@ -28,8 +28,22 @@ composites:
modifiers: [sunz_corrected]
standard_name: toa_bidirectional_reflectance
+ green_true_color_reproduction:
+ # JMA True Color Reproduction green band
+ # http://www.jma.go.jp/jma/jma-eng/satellite/introduction/TCR.html
+ compositor: !!python/name:satpy.composites.spectral.GreenCorrector
+ fractions: [0.6321, 0.2928, 0.0751]
+ prerequisites:
+ - name: B02
+ modifiers: [sunz_corrected, rayleigh_corrected]
+ - name: B03
+ modifiers: [sunz_corrected, rayleigh_corrected]
+ - name: B04
+ modifiers: [sunz_corrected]
+ standard_name: none
+
green_nocorr:
- compositor: !!python/name:satpy.composites.ahi.GreenCorrector
+ compositor: !!python/name:satpy.composites.spectral.GreenCorrector
# FUTURE: Set a wavelength...see what happens. Dependency finding
# probably wouldn't work.
prerequisites:
@@ -155,13 +169,22 @@ composites:
standard_name: fire_temperature
name: fire_temperature_39refl
-
overview:
compositor: !!python/name:satpy.composites.GenericCompositor
prerequisites:
- - 0.65
- - 0.85
- - 10.4
+ - name: B03
+ modifiers: [sunz_corrected]
+ - name: B04
+ modifiers: [sunz_corrected]
+ - name: B13
+ standard_name: overview
+
+ overview_raw:
+ compositor: !!python/name:satpy.composites.GenericCompositor
+ prerequisites:
+ - B03
+ - B04
+ - B13
standard_name: overview
natural_color:
@@ -205,6 +228,18 @@ composites:
high_resolution_band: red
standard_name: true_color
+ true_color_reproduction:
+ # JMA True Color Reproduction
+ # http://www.jma.go.jp/jma/jma-eng/satellite/introduction/TCR.html
+ compositor: !!python/name:satpy.composites.SelfSharpenedRGB
+ prerequisites:
+ - name: B03
+ modifiers: [sunz_corrected, rayleigh_corrected]
+ - name: green_true_color_reproduction
+ - name: B01
+ modifiers: [sunz_corrected, rayleigh_corrected]
+ standard_name: true_color_reproduction
+
# true_color_reducedsize_land:
# compositor: !!python/name:satpy.composites.GenericCompositor
# prerequisites:
@@ -310,6 +345,15 @@ composites:
prerequisites:
- name: B14
+ natural_color_raw_with_night_ir:
+ compositor: !!python/name:satpy.composites.DayNightCompositor
+ standard_name: natural_color_with_night_ir
+ lim_low: 80
+ lim_high: 90
+ prerequisites:
+ - natural_color_raw
+ - cloudtop
+
true_color_with_night_ir:
compositor: !!python/name:satpy.composites.DayNightCompositor
standard_name: true_color_with_night_ir
@@ -333,6 +377,14 @@ composites:
- name: B15
- name: B13
+ cloudtop:
+ compositor: !!python/name:satpy.composites.GenericCompositor
+ standard_name: cloudtop
+ prerequisites:
+ - name: B07
+ - name: B14
+ - name: B15
+
night_ir_with_background:
compositor: !!python/name:satpy.composites.BackgroundCompositor
standard_name: night_ir_with_background
@@ -342,7 +394,7 @@ composites:
night_ir_with_background_hires:
compositor: !!python/name:satpy.composites.BackgroundCompositor
- standard_name: night_ir_with_background_hires
+ standard_name: night_ir_with_background
prerequisites:
- night_ir_alpha
- _night_background_hires
diff --git a/satpy/etc/composites/ami.yaml b/satpy/etc/composites/ami.yaml
index 634d5f1d78..0c2635d3c7 100644
--- a/satpy/etc/composites/ami.yaml
+++ b/satpy/etc/composites/ami.yaml
@@ -2,7 +2,7 @@ sensor_name: visir/ami
composites:
green_raw:
- compositor: !!python/name:satpy.composites.ahi.GreenCorrector
+ compositor: !!python/name:satpy.composites.spectral.GreenCorrector
prerequisites:
- name: VI005
modifiers: [sunz_corrected]
@@ -12,7 +12,7 @@ composites:
fractions: [0.85, 0.15]
green:
- compositor: !!python/name:satpy.composites.ahi.GreenCorrector
+ compositor: !!python/name:satpy.composites.spectral.GreenCorrector
prerequisites:
- name: VI005
modifiers: [sunz_corrected, rayleigh_corrected]
@@ -22,7 +22,7 @@ composites:
fractions: [0.85, 0.15]
green_nocorr:
- compositor: !!python/name:satpy.composites.ahi.GreenCorrector
+ compositor: !!python/name:satpy.composites.spectral.GreenCorrector
prerequisites:
- name: VI005
- name: VI008
@@ -92,6 +92,15 @@ composites:
high_resolution_band: blue
standard_name: natural_color
+ natural_color_raw_with_night_ir:
+ compositor: !!python/name:satpy.composites.DayNightCompositor
+ standard_name: natural_color_with_night_ir
+ lim_low: 80
+ lim_high: 90
+ prerequisites:
+ - natural_color_raw
+ - cloudtop
+
day_microphysics_eum:
compositor: !!python/name:satpy.composites.GenericCompositor
prerequisites:
diff --git a/satpy/etc/composites/fci.yaml b/satpy/etc/composites/fci.yaml
index f93480b9bb..853fcff835 100644
--- a/satpy/etc/composites/fci.yaml
+++ b/satpy/etc/composites/fci.yaml
@@ -1 +1,63 @@
sensor_name: visir/fci
+
+
+composites:
+ corrected_green:
+ description: >
+ The FCI green band at 0.51 µm deliberately misses the chlorophyl band, such that
+ the signal comes rather from aerosols and ash rather than vegetation. An effect
+ is that vegetation in a true colour RGB looks rather brown than green. Mixing in
+ some part of the NIR 0.8 channel reduced this effect. Note that the fractions
+ currently implemented are experimental and may change in future versions of Satpy.
+ compositor: !!python/name:satpy.composites.spectral.GreenCorrector
+ fractions: [0.93, 0.07]
+ prerequisites:
+ - name: vis_05
+ modifiers: [sunz_corrected, rayleigh_corrected]
+ - name: vis_08
+ modifiers: [sunz_corrected, rayleigh_corrected]
+ standard_name: toa_bidirectional_reflectance
+
+ corrected_green_raw:
+ description: >
+ Alternative to corrected_green, but without solar zenith or rayleigh correction.
+ compositor: !!python/name:satpy.composites.spectral.GreenCorrector
+ fractions: [0.93, 0.07]
+ prerequisites:
+ - name: vis_05
+ - name: vis_08
+ standard_name: toa_bidirectional_reflectance
+
+ binary_cloud_mask:
+ # This will set all clear pixels to '0', all pixles with cloudy features (meteorological/dust/ash clouds) to '1' and
+ # missing/undefined pixels to 'nan'. This can be used for the the official EUMETSAT cloud mask product (CLM).
+ compositor: !!python/name:satpy.composites.CategoricalDataCompositor
+ prerequisites:
+ - name: 'cloud_state'
+ lut: [.nan, 0, 1, 1, 1, 1, 1, 1, 0, .nan]
+ standard_name: binary_cloud_mask
+
+ true_color:
+ compositor: !!python/name:satpy.composites.SelfSharpenedRGB
+ description: >
+ FCI true color composite. The green band is simulated based on a combination of
+ channels. This simulation may change in future versions of Satpy. See the description
+ of the corrected_green composites for details.
+ prerequisites:
+ - name: vis_06
+ modifiers: [sunz_corrected, rayleigh_corrected]
+ - name: corrected_green
+ - name: vis_04
+ modifiers: [sunz_corrected, rayleigh_corrected]
+ standard_name: true_color
+
+ true_color_raw_with_corrected_green:
+ compositor: !!python/name:satpy.composites.SelfSharpenedRGB
+ description: >
+ FCI true color without solar zenith or rayleigh corrections, but with the
+ corrected green composite.
+ prerequisites:
+ - name: vis_06
+ - name: corrected_green_raw
+ - name: vis_04
+ standard_name: true_color_raw
diff --git a/satpy/etc/composites/ghi.yaml b/satpy/etc/composites/ghi.yaml
new file mode 100644
index 0000000000..a9b3ddb720
--- /dev/null
+++ b/satpy/etc/composites/ghi.yaml
@@ -0,0 +1,21 @@
+sensor_name: visir/ghi
+
+composites:
+ true_color:
+ compositor: !!python/name:satpy.composites.SelfSharpenedRGB
+ prerequisites:
+ - name: C04
+ modifiers: [sunz_corrected, rayleigh_corrected]
+ - name: C03
+ modifiers: [sunz_corrected, rayleigh_corrected]
+ - name: C02
+ modifiers: [sunz_corrected, rayleigh_corrected]
+ standard_name: true_color
+
+ true_color_nocorr:
+ compositor: !!python/name:satpy.composites.SelfSharpenedRGB
+ prerequisites:
+ - name: C04
+ - name: C03
+ - name: C02
+ standard_name: true_color
diff --git a/satpy/etc/composites/mhs.yaml b/satpy/etc/composites/mhs.yaml
new file mode 100644
index 0000000000..245991f53e
--- /dev/null
+++ b/satpy/etc/composites/mhs.yaml
@@ -0,0 +1,18 @@
+sensor_name: mhs
+
+composites:
+ mw183_humidity:
+ compositor: !!python/name:satpy.composites.RGBCompositor
+ prerequisites:
+ - name: '3'
+ - name: '4'
+ - name: '5'
+ standard_name: mw183_humidity
+
+ mw183_humidity_surface:
+ compositor: !!python/name:satpy.composites.RGBCompositor
+ prerequisites:
+ - name: '1'
+ - name: '2'
+ - name: '3'
+ standard_name: mw183_humidity_surface
diff --git a/satpy/etc/composites/modis.yaml b/satpy/etc/composites/modis.yaml
index 27524d81ee..c011731c58 100644
--- a/satpy/etc/composites/modis.yaml
+++ b/satpy/etc/composites/modis.yaml
@@ -4,6 +4,7 @@ modifiers:
modifier: !!python/name:satpy.modifiers.atmosphere.ReflectanceCorrector
url: "https://www.ssec.wisc.edu/~davidh/polar2grid/modis_crefl/tbase.hdf"
known_hash: "sha256:ed5183cddce905361c1cac8ae6e3a447212875ea421a05747751efe76f8a068e"
+ dem_sds: "Elevation"
prerequisites:
- name: satellite_azimuth_angle
- name: satellite_zenith_angle
diff --git a/satpy/etc/composites/msi.yaml b/satpy/etc/composites/msi.yaml
index b34ff4891d..010bd240b0 100644
--- a/satpy/etc/composites/msi.yaml
+++ b/satpy/etc/composites/msi.yaml
@@ -79,7 +79,7 @@ composites:
- name: 'B04'
modifiers: [effective_solar_pathlength_corrected]
standard_name: natural_color
-
+
true_color:
compositor: !!python/name:satpy.composites.GenericCompositor
prerequisites:
diff --git a/satpy/etc/composites/msu_gsa.yaml b/satpy/etc/composites/msu_gsa.yaml
new file mode 100644
index 0000000000..0ab6840af4
--- /dev/null
+++ b/satpy/etc/composites/msu_gsa.yaml
@@ -0,0 +1,77 @@
+sensor_name: visir/msu_gsa
+
+composites:
+ overview_raw:
+ compositor: !!python/name:satpy.composites.RGBCompositor
+ prerequisites:
+ - name: C01
+ - name: C03
+ - name: C09
+ standard_name: overview
+ overview:
+ compositor: !!python/name:satpy.composites.RGBCompositor
+ prerequisites:
+ - name: C01
+ modifiers: [sunz_corrected]
+ - name: C03
+ modifiers: [sunz_corrected]
+ - name: C09
+ standard_name: overview
+ msugsa_color:
+ compositor: !!python/name:satpy.composites.RGBCompositor
+ prerequisites:
+ - name: C03
+ modifiers: [sunz_corrected]
+ - name: C02
+ modifiers: [sunz_corrected]
+ - name: C01
+ modifiers: [sunz_corrected]
+ standard_name: natural_color
+ msugsa_color_raw:
+ compositor: !!python/name:satpy.composites.RGBCompositor
+ prerequisites:
+ - name: C03
+ - name: C02
+ - name: C01
+ standard_name: natural_color
+
+ night_ir_alpha:
+ compositor: !!python/name:satpy.composites.GenericCompositor
+ standard_name: night_ir_alpha
+ prerequisites:
+ - 3.8
+ - 10.8
+ - 11.9
+ - 10.8
+
+ day_color_with_night_ir:
+ compositor: !!python/name:satpy.composites.DayNightCompositor
+ standard_name: day_color_with_night_ir
+ lim_low: 90.0
+ lim_high: 100.0
+ prerequisites:
+ - msugsa_color_raw
+ - night_ir_with_background
+
+ day_color_with_night_ir_hires:
+ compositor: !!python/name:satpy.composites.DayNightCompositor
+ standard_name: day_color_with_night_ir
+ lim_low: 90.0
+ lim_high: 100.0
+ prerequisites:
+ - msugsa_color_raw
+ - night_ir_with_background_hires
+
+ night_ir_with_background:
+ compositor: !!python/name:satpy.composites.BackgroundCompositor
+ standard_name: night_ir_with_background
+ prerequisites:
+ - night_ir_alpha
+ - _night_background
+
+ night_ir_with_background_hires:
+ compositor: !!python/name:satpy.composites.BackgroundCompositor
+ standard_name: night_ir_with_background_hires
+ prerequisites:
+ - night_ir_alpha
+ - _night_background_hires
diff --git a/satpy/etc/composites/scatterometer.yaml b/satpy/etc/composites/scatterometer.yaml
index 02f6ceb0ff..30d1733f4a 100644
--- a/satpy/etc/composites/scatterometer.yaml
+++ b/satpy/etc/composites/scatterometer.yaml
@@ -12,4 +12,3 @@ composites:
prerequisites:
- name: surface_soil_moisture
standard_name: soil_moisture
-
diff --git a/satpy/etc/composites/seviri.yaml b/satpy/etc/composites/seviri.yaml
index 999192cb9a..69b2a4b6ee 100644
--- a/satpy/etc/composites/seviri.yaml
+++ b/satpy/etc/composites/seviri.yaml
@@ -11,6 +11,19 @@ modifiers:
- IR_108
- IR_134
+ rayleigh_corrected:
+ modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance
+ atmosphere: us-standard
+ aerosol_type: rayleigh_only
+ prerequisites:
+ - name: VIS006
+ modifiers: [sunz_corrected]
+ optional_prerequisites:
+ - satellite_azimuth_angle
+ - satellite_zenith_angle
+ - solar_azimuth_angle
+ - solar_zenith_angle
+
composites:
ct_masked_ir:
@@ -487,6 +500,15 @@ composites:
- natural_color
- night_ir_with_background
+ natural_color_raw_with_night_ir:
+ compositor: !!python/name:satpy.composites.DayNightCompositor
+ standard_name: natural_color_with_night_ir
+ lim_low: 80
+ lim_high: 90
+ prerequisites:
+ - natural_color
+ - cloudtop
+
natural_color_with_night_ir_hires:
compositor: !!python/name:satpy.composites.DayNightCompositor
standard_name: natural_color_with_night_ir_hires
@@ -494,6 +516,20 @@ composites:
- natural_color
- night_ir_with_background_hires
+ natural_enh_with_night_ir:
+ compositor: !!python/name:satpy.composites.DayNightCompositor
+ standard_name: natural_color_with_night_ir
+ prerequisites:
+ - natural_enh
+ - night_ir_with_background
+
+ natural_enh_with_night_ir_hires:
+ compositor: !!python/name:satpy.composites.DayNightCompositor
+ standard_name: natural_color_with_night_ir_hires
+ prerequisites:
+ - natural_enh
+ - night_ir_with_background_hires
+
night_ir_alpha:
compositor: !!python/name:satpy.composites.GenericCompositor
standard_name: night_ir_alpha
@@ -512,7 +548,7 @@ composites:
night_ir_with_background_hires:
compositor: !!python/name:satpy.composites.BackgroundCompositor
- standard_name: night_ir_with_background_hires
+ standard_name: night_ir_with_background
prerequisites:
- night_ir_alpha
- _night_background_hires
diff --git a/satpy/etc/composites/tropomi.yaml b/satpy/etc/composites/tropomi.yaml
index c52c0aa430..e7037de0c5 100644
--- a/satpy/etc/composites/tropomi.yaml
+++ b/satpy/etc/composites/tropomi.yaml
@@ -13,4 +13,3 @@ composites:
prerequisites:
- nitrogendioxide_tropospheric_column
standard_name: no2_tropospheric_polluted
-
diff --git a/satpy/etc/composites/vii.yaml b/satpy/etc/composites/vii.yaml
new file mode 100644
index 0000000000..4e150f2be4
--- /dev/null
+++ b/satpy/etc/composites/vii.yaml
@@ -0,0 +1,121 @@
+sensor_name: visir/vii
+modifiers:
+ nir_reflectance:
+ modifier: !!python/name:satpy.modifiers.NIRReflectance
+ prerequisites:
+ - name: 'vii_10690'
+ optional_prerequisites:
+ - solar_zenith
+ - name: 'vii_13345'
+ sunz_threshold: 85.0
+
+ rayleigh_corrected:
+ modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance
+ atmosphere: us-standard
+ aerosol_type: rayleigh_only
+ prerequisites:
+ - wavelength: 0.67
+ modifiers: [ sunz_corrected ]
+ optional_prerequisites:
+ - observation_azimuth
+ - observation_zenith
+ - solar_azimuth
+ - solar_zenith
+
+composites:
+ sepia:
+ compositor: !!python/name:satpy.composites.GenericCompositor
+ prerequisites:
+ - name: 'vii_443'
+ modifiers: [sunz_corrected]
+ - name: 'vii_555'
+ modifiers: [sunz_corrected]
+ - name: 'vii_668'
+ modifiers: [sunz_corrected]
+ standard_name: sepia
+
+ true_color_uncorrected:
+ compositor: !!python/name:satpy.composites.GenericCompositor
+ prerequisites:
+ - name: 'vii_668'
+ modifiers: [ sunz_corrected ]
+ - name: 'vii_555'
+ modifiers: [ sunz_corrected ]
+ - name: 'vii_443'
+ modifiers: [ sunz_corrected ]
+ standard_name: true_color
+
+ true_color:
+ compositor: !!python/name:satpy.composites.GenericCompositor
+ prerequisites:
+ - name: 'vii_668'
+ modifiers: [ sunz_corrected, rayleigh_corrected ]
+ - name: 'vii_555'
+ modifiers: [ sunz_corrected, rayleigh_corrected ]
+ - name: 'vii_443'
+ modifiers: [ sunz_corrected, rayleigh_corrected ]
+ standard_name: true_color
+
+ natural_color:
+ compositor: !!python/name:satpy.composites.GenericCompositor
+ prerequisites:
+ - name: 'vii_1630'
+ modifiers: [ sunz_corrected ]
+ - name: 'vii_865'
+ modifiers: [ sunz_corrected ]
+ - name: 'vii_668'
+ modifiers: [ sunz_corrected, rayleigh_corrected ]
+ standard_name: natural_color
+
+ day_microphysics:
+ compositor: !!python/name:satpy.composites.GenericCompositor
+ prerequisites:
+ - name: 'vii_865'
+ modifiers: [ sunz_corrected ]
+ - name: 'vii_3740'
+ modifiers: [ nir_reflectance ]
+ - name: 'vii_10690'
+ standard_name: day_microphysics
+
+ snow:
+ compositor: !!python/name:satpy.composites.GenericCompositor
+ prerequisites:
+ - name: 'vii_865'
+ modifiers: [ sunz_corrected ]
+ - name: 'vii_1630'
+ modifiers: [ sunz_corrected ]
+ - name: 'vii_3740'
+ modifiers: [ nir_reflectance ]
+ standard_name: snow
+
+ convection:
+ compositor: !!python/name:satpy.composites.GenericCompositor
+ prerequisites:
+ - compositor: !!python/name:satpy.composites.DifferenceCompositor
+ prerequisites:
+ - name: 'vii_6725'
+ - name: 'vii_7325'
+ - compositor: !!python/name:satpy.composites.DifferenceCompositor
+ prerequisites:
+ - name: 'vii_3740'
+ modifiers: [ co2_corrected ]
+ - name: 'vii_10690'
+ - compositor: !!python/name:satpy.composites.DifferenceCompositor
+ prerequisites:
+ - name: 'vii_1630'
+ - name: 'vii_668'
+ standard_name: convection
+
+ dust:
+ compositor: !!python/name:satpy.composites.GenericCompositor
+ prerequisites:
+ - compositor: !!python/name:satpy.composites.DifferenceCompositor
+ prerequisites:
+ - name: 'vii_12020'
+ - name: 'vii_10690'
+ - compositor: !!python/name:satpy.composites.DifferenceCompositor
+ prerequisites:
+ - name: 'vii_10690'
+ - name: 'vii_8540'
+ - name: 'vii_10690'
+ standard_name: dust
diff --git a/satpy/etc/composites/viirs.yaml b/satpy/etc/composites/viirs.yaml
index 2862bc027f..050df02cb5 100644
--- a/satpy/etc/composites/viirs.yaml
+++ b/satpy/etc/composites/viirs.yaml
@@ -162,6 +162,21 @@ composites:
standard_name: true_color
high_resolution_band: red
+ true_color_crefl:
+ compositor: !!python/name:satpy.composites.RatioSharpenedRGB
+ prerequisites:
+ - name: M05
+ modifiers: [sunz_corrected, rayleigh_corrected_crefl]
+ - name: M04
+ modifiers: [sunz_corrected, rayleigh_corrected_crefl]
+ - name: M03
+ modifiers: [sunz_corrected, rayleigh_corrected_crefl]
+ optional_prerequisites:
+ - name: I01
+ modifiers: [sunz_corrected_iband, rayleigh_corrected_crefl_iband]
+ standard_name: true_color
+ high_resolution_band: red
+
true_color_lowres:
compositor: !!python/name:satpy.composites.GenericCompositor
prerequisites:
@@ -532,3 +547,81 @@ composites:
modifiers: [sunz_corrected_iband, rayleigh_corrected_iband]
standard_name: ocean_color
high_resolution_band: red
+
+ cloud_phase_distinction:
+ description: >
+ Day Cloud Phase Distinction RGB, for GOESR: NASA, NOAA
+ Cloud Type RGB, for EUMETSAT (https://www.eumetsat.int/website/home/Images/ImageLibrary/DAT_3958037.html)
+ references:
+ CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_DayCloudPhaseDistinction_final_v2.pdf
+ Cloud Type recipe and typical colours: https://www.eumetsat.int/fr/media/45659
+ ## it uses the default used in etc/enhancements/generic.yaml
+ compositor: !!python/name:satpy.composites.GenericCompositor
+ prerequisites:
+ - name: M15
+ - name: I01
+ modifiers: [sunz_corrected_iband, rayleigh_corrected]
+ - name: I03
+ modifiers: [sunz_corrected_iband]
+ standard_name: cloud_phase_distinction
+
+ cloud_phase_distinction_raw:
+ description: >
+ same as cloud_phase_distinction
+ compositor: !!python/name:satpy.composites.GenericCompositor
+ prerequisites:
+ - name: M15
+ - name: I01
+ - name: I03
+ standard_name: cloud_phase_distinction
+
+ cloud_phase:
+ description: >
+ EUMETSAT Cloud Phase RGB product
+ references:
+ EUMETRAIN Quick Guide: http://www.eumetrain.org/rgb_quick_guides/quick_guides/CloudPhaseRGB.pdf
+ Recipe : http://eumetrain.org/RGBguide/recipes/RGB_recipes.pdf
+ compositor: !!python/name:satpy.composites.GenericCompositor
+ prerequisites:
+ - name: I03
+ modifiers: [sunz_corrected_iband]
+ - name: M11
+ modifiers: [sunz_corrected]
+ - name: M05
+ modifiers: [sunz_corrected, rayleigh_corrected]
+ standard_name: cloud_phase
+
+ cloud_phase_raw:
+ description: >
+ same as cloud_phase RGB product, without modifiers
+ compositor: !!python/name:satpy.composites.GenericCompositor
+ prerequisites:
+ - name: I03
+ - name: M11
+ - name: M05
+ standard_name: cloud_phase
+
+ cimss_cloud_type:
+ description: >
+ Cloud Type RGB, candidate for standard FCI RGB
+ references:
+ EUMETRAIN Quick Guide: http://eumetrain.org/rgb_quick_guides/quick_guides/CloudTypeRGB.pdf
+ compositor: !!python/name:satpy.composites.GenericCompositor
+ prerequisites:
+ - name: M09
+ modifiers: [sunz_corrected]
+ - name: I01
+ modifiers: [sunz_corrected_iband]
+ - name: I03
+ modifiers: [sunz_corrected_iband]
+ standard_name: cimss_cloud_type
+
+ cimss_cloud_type_raw:
+ description: >
+ Cloud Type RGB, candidate for standard FCI RGB. Raw version without sun zenith correction.
+ compositor: !!python/name:satpy.composites.GenericCompositor
+ prerequisites:
+ - name: M09
+ - name: I01
+ - name: I03
+ standard_name: cimss_cloud_type
diff --git a/satpy/etc/composites/visir.yaml b/satpy/etc/composites/visir.yaml
index b00cfe8dac..5c1866e1e0 100644
--- a/satpy/etc/composites/visir.yaml
+++ b/satpy/etc/composites/visir.yaml
@@ -211,9 +211,9 @@ composites:
natural_color_raw:
compositor: !!python/name:satpy.composites.GenericCompositor
prerequisites:
- - 1.63
- - 0.85
- - 0.635
+ - wavelength: 1.63
+ - wavelength: 0.85
+ - wavelength: 0.635
standard_name: natural_color
natural_color:
@@ -337,43 +337,43 @@ composites:
cloud_top_phase:
compositor: !!python/name:satpy.composites.PaletteCompositor
prerequisites:
- - cpp_phase
- - cpp_phase_pal
+ - cmic_phase
+ - cmic_phase_pal
standard_name: cloud_top_phase
cloud_drop_effective_radius:
compositor: !!python/name:satpy.composites.ColorizeCompositor
prerequisites:
- - cpp_reff
- - cpp_reff_pal
+ - cmic_reff
+ - cmic_reff_pal
standard_name: cloud_drop_effective_radius
cloud_optical_thickness:
compositor: !!python/name:satpy.composites.ColorizeCompositor
prerequisites:
- - cpp_cot
- - cpp_cot_pal
+ - cmic_cot
+ - cmic_cot_pal
standard_name: cloud_optical_thickness
cloud_water_path:
compositor: !!python/name:satpy.composites.ColorizeCompositor
prerequisites:
- - cpp_cwp
- - cpp_cwp_pal
+ - cmic_cwp
+ - cmic_cwp_pal
standard_name: cloud_water_path
ice_water_path:
compositor: !!python/name:satpy.composites.ColorizeCompositor
prerequisites:
- - cpp_iwp
- - cpp_iwp_pal
+ - cmic_iwp
+ - cmic_iwp_pal
standard_name: ice_water_path
liquid_water_path:
compositor: !!python/name:satpy.composites.ColorizeCompositor
prerequisites:
- - cpp_lwp
- - cpp_lwp_pal
+ - cmic_lwp
+ - cmic_lwp_pal
standard_name: liquid_water_path
night_microphysics:
@@ -419,11 +419,133 @@ composites:
_night_background:
compositor: !!python/name:satpy.composites.StaticImageCompositor
standard_name: night_background
- url: "https://neo.sci.gsfc.nasa.gov/archive/blackmarble/2016/global/BlackMarble_2016_01deg_geo.tif"
+ url: "https://neo.gsfc.nasa.gov/archive/blackmarble/2016/global/BlackMarble_2016_01deg_geo.tif"
known_hash: "sha256:146c116962677ae113d9233374715686737ff97141a77cc5da69a9451315a685" # optional
_night_background_hires:
compositor: !!python/name:satpy.composites.StaticImageCompositor
- standard_name: night_background_hires
- url: "https://neo.sci.gsfc.nasa.gov/archive/blackmarble/2016/global/BlackMarble_2016_3km_geo.tif"
+ standard_name: night_background
+ url: "https://neo.gsfc.nasa.gov/archive/blackmarble/2016/global/BlackMarble_2016_3km_geo.tif"
known_hash: "sha256:e915ef2a20d84e2a59e1547d3ad564463ad4bcf22bfa02e0e0b8ed1cd722e9c0" # optional
+
+ cloud_phase_distinction:
+ description: >
+ Day Cloud Phase Distinction RGB, for GOESR: NASA, NOAA
+ Cloud Type RGB, for EUMETSAT (https://www.eumetsat.int/website/home/Images/ImageLibrary/DAT_3958037.html)
+ references:
+ CIRA Quick Guide: https://rammb.cira.colostate.edu/training/visit/quick_guides/QuickGuide_DayCloudPhaseDistinction_final_v2.pdf
+ Cloud Type recipe and typical colours: https://www.eumetsat.int/fr/media/45659
+ ## it uses the default used in etc/enhancements/generic.yaml
+ compositor: !!python/name:satpy.composites.GenericCompositor
+ prerequisites:
+ - wavelength: 10.3
+ - wavelength: 0.64
+ modifiers: [sunz_corrected, rayleigh_corrected]
+ - wavelength: 1.6
+ modifiers: [sunz_corrected]
+ standard_name: cloud_phase_distinction
+
+ cloud_phase_distinction_raw:
+ description: >
+ same as cloud_phase_distinction
+ compositor: !!python/name:satpy.composites.GenericCompositor
+ prerequisites:
+ - wavelength: 10.3
+ - wavelength: 0.64
+ - wavelength: 1.6
+ standard_name: cloud_phase_distinction
+
+ cloud_phase:
+ description: >
+ EUMETSAT Cloud Phase RGB product
+ references:
+ EUMETRAIN Quick Guide: http://www.eumetrain.org/rgb_quick_guides/quick_guides/CloudPhaseRGB.pdf
+ Recipe : http://eumetrain.org/RGBguide/recipes/RGB_recipes.pdf
+ compositor: !!python/name:satpy.composites.GenericCompositor
+ prerequisites:
+ - wavelength: 1.6
+ modifiers: [sunz_corrected]
+ - wavelength: 2.25
+ modifiers: [sunz_corrected]
+ - wavelength: 0.67
+ modifiers: [sunz_corrected, rayleigh_corrected]
+ standard_name: cloud_phase
+
+ cloud_phase_raw:
+ description: >
+ same as cloud_phase RGB product, without modifiers
+ compositor: !!python/name:satpy.composites.GenericCompositor
+ prerequisites:
+ - wavelength: 1.6
+ - wavelength: 2.25
+ - wavelength: 0.67
+ standard_name: cloud_phase
+
+ cimss_cloud_type:
+ description: >
+ Cloud Type RGB, candidate for standard FCI RGB
+ references:
+ EUMETRAIN Quick Guide: http://eumetrain.org/rgb_quick_guides/quick_guides/CloudTypeRGB.pdf
+ compositor: !!python/name:satpy.composites.GenericCompositor
+ prerequisites:
+ - wavelength: 1.38
+ modifiers: [sunz_corrected]
+ - wavelength: 0.64
+ modifiers: [sunz_corrected]
+ - wavelength: 1.61
+ modifiers: [sunz_corrected]
+ standard_name: cimss_cloud_type
+
+ cimss_cloud_type_raw:
+ description: >
+ Cloud Type RGB, candidate for standard FCI RGB. Raw version without sun zenith correction.
+ compositor: !!python/name:satpy.composites.GenericCompositor
+ prerequisites:
+ - wavelength: 1.38
+ - wavelength: 0.64
+ - wavelength: 1.61
+ standard_name: cimss_cloud_type
+
+ essl_low_level_moisture:
+ description: >
+ Greyscale low level moisture using the ratio between the
+ 0.91 µm and the 0.86 µm channels. Developed by the
+ European Severe Storms Laboratory (ESSL). For a color version,
+ see essl_colorized_low_level_moisture.
+ compositor: !!python/name:satpy.composites.RatioCompositor
+ prerequisites:
+ - wavelength: 0.905
+ - wavelength: 0.86
+ standard_name: essl_low_level_moisture
+
+ day_essl_low_level_moisture:
+ description: >
+ Daytime only version of essl_low_level_moisture.
+ Nighttime part of the scene will be masked out.
+ compositor: !!python/name:satpy.composites.DayNightCompositor
+ day_night: day_only
+ prerequisites:
+ - name: essl_low_level_moisture
+ standard_name: day_essl_low_level_moisture
+
+ essl_colorized_low_level_moisture:
+ description: >
+ Colorized low level moisture using the ratio between the
+ 0.91 µm and the 0.86 µm channels. Developed by the
+ European Severe Storms Laboratory (ESSL). The colorization
+ is still under development and may be subject to change.
+ compositor: !!python/name:satpy.composites.RatioCompositor
+ prerequisites:
+ - wavelength: 0.86
+ - wavelength: 0.905
+ standard_name: essl_colorized_low_level_moisture
+
+ day_essl_colorized_low_level_moisture:
+ description: >
+ Daytime only version of essl_colorized_low_level_moisture.
+ Nighttime part of the scene will be masked out.
+ compositor: !!python/name:satpy.composites.DayNightCompositor
+ day_night: day_only
+ prerequisites:
+ - name: essl_colorized_low_level_moisture
+ standard_name: day_essl_colorized_low_level_moisture
diff --git a/satpy/etc/enhancements/abi.yaml b/satpy/etc/enhancements/abi.yaml
index 5260d8e9d7..2bc039c1f3 100644
--- a/satpy/etc/enhancements/abi.yaml
+++ b/satpy/etc/enhancements/abi.yaml
@@ -32,6 +32,19 @@ enhancements:
min_stretch: [-26.2, -43.2, 243.9]
max_stretch: [0.6, 6.7, 208.5]
+
+ true_color_with_night_fires:
+ standard_name: true_color_with_night_fires
+ sensor: abi
+ operations:
+ - name: stretch
+ method: !!python/name:satpy.enhancements.stretch
+ kwargs:
+ stretch: crude
+ min_stretch: [0., 0., 0.]
+ max_stretch: [1., 1., 1.]
+
+
cira_fire_temperature:
standard_name: cira_fire_temperature
operations:
@@ -155,3 +168,70 @@ enhancements:
threshold: 242.0
min_in: 163.0
max_in: 330.0
+
+ # EumetSat cloud phase and cloud type RGB recipes
+ # http://eumetrain.org/RGBguide/recipes/RGB_recipes.pdf
+ cloud_phase:
+ standard_name: cloud_phase
+ operations:
+ - name: stretch
+ method: !!python/name:satpy.enhancements.stretch
+ kwargs:
+ stretch: crude
+ min_stretch: [ 0, 0, 0]
+ max_stretch: [50, 50, 100]
+
+ # NOAA GOES-R Level-2 ABI Cloud Mask product
+ # https://www.goes-r.gov/products/baseline-clear-sky-mask.html
+ binary_cloud_mask:
+ name: BCM
+ operations:
+ - name: palettize
+ method: !!python/name:satpy.enhancements.palettize
+ kwargs:
+ palettes:
+ - {'values': [ 0, # Clear
+ 1, # Cloudy
+ 255, # Fill Value
+ ],
+ 'colors': [[ 94, 79, 162], # blue, 0 = Clear
+ [255, 255, 255], # white, 1 = Cloudy
+ [ 0, 0, 0], # black, 255 = Fill Value
+ ],
+ 'color_scale': 255,
+ }
+
+ four_level_cloud_mask:
+ name: ACM
+ operations:
+ - name: palettize
+ method: !!python/name:satpy.enhancements.palettize
+ kwargs:
+ palettes:
+ - {'values': [ 0, # Clear
+ 1, # Probably Clear
+ 2, # Probably Cloudy
+ 3, # Cloudy
+ 255, # Fill Value
+ ],
+ 'colors': [[ 94, 79, 162], # blue, 0 = Clear
+ [ 73, 228, 242], # cyan, 1 = Probably Clear
+ [158, 1, 66], # red, 2 = Probably Cloudy
+ [255, 255, 255], # white, 3 = Cloudy
+ [ 0, 0, 0], # black, 255 = Fill Value
+ ],
+ 'color_scale': 255,
+ }
+
+ cloud_probability:
+ name: Cloud_Probabilities
+ operations:
+ - name: colorize
+ method: !!python/name:satpy.enhancements.colorize
+ kwargs:
+ palettes:
+ - {colors: 'spectral',
+ reverse: true,
+ min_value: 0.0,
+ max_value: 1.0,
+ }
diff --git a/satpy/etc/enhancements/ahi.yaml b/satpy/etc/enhancements/ahi.yaml
index 8951eaf7cd..ffd96a45e0 100644
--- a/satpy/etc/enhancements/ahi.yaml
+++ b/satpy/etc/enhancements/ahi.yaml
@@ -9,3 +9,17 @@ enhancements:
stretch: crude
min_stretch: [-26.2, -43.2, 243.9]
max_stretch: [0.6, 6.7, 208.5]
+
+ true_color_reproduction:
+ standard_name: true_color_reproduction
+ operations:
+ - name: color
+ method: !!python/name:satpy.enhancements.ahi.jma_true_color_reproduction
+ - name: cira_stretch
+ method: !!python/name:satpy.enhancements.cira_stretch
+ - name: stretch
+ method: !!python/name:satpy.enhancements.stretch
+ kwargs:
+ stretch: crude
+ min_stretch: [0.08, 0.08, 0.08]
+ max_stretch: [.93, .90, .90]
diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml
index 73de008986..2688ee6345 100644
--- a/satpy/etc/enhancements/generic.yaml
+++ b/satpy/etc/enhancements/generic.yaml
@@ -917,3 +917,61 @@ enhancements:
stretch: crude
min_stretch: [26.2, 27.4, 243.9]
max_stretch: [ 0.6, -26.2, 208.5]
+ # SEADAS Cholorphyll A - MODIS or VIIRS
+ chlor_a_default:
+ name: chlor_a
+ reader: seadas_l2
+ operations:
+ - name: stretch
+ method: !!python/name:satpy.enhancements.stretch
+ kwargs:
+ stretch: log
+ base: "10"
+ factor: 21.0
+ min_stretch: 0.0
+ max_stretch: 20.0
+
+ cimss_cloud_type:
+ standard_name: cimss_cloud_type
+ operations:
+ - name: stretch
+ method: !!python/name:satpy.enhancements.stretch
+ kwargs:
+ stretch: crude
+ min_stretch: [0.0, 0.0, 0.0]
+ max_stretch: [10.0, 80.0, 80.0]
+ - name: gamma
+ method: !!python/name:satpy.enhancements.gamma
+ kwargs:
+ gamma: [1.5, 0.75, 1.0]
+
+ cloud_phase:
+ standard_name: cloud_phase
+ operations:
+ - name: stretch
+ method: !!python/name:satpy.enhancements.stretch
+ kwargs:
+ stretch: crude
+ min_stretch: [ 0, 0, 0]
+ max_stretch: [50, 50, 100]
+
+ essl_low_level_moisture:
+ name: essl_low_level_moisture
+ operations:
+ - name: linear_stretch
+ method: !!python/name:satpy.enhancements.stretch
+ kwargs: {stretch: 'crude', min_stretch: 0.35, max_stretch: 0.85}
+
+ day_essl_low_level_moisture:
+ standard_name: day_essl_low_level_moisture
+ operations: []
+
+ essl_colorized_low_level_moisture:
+ name: essl_colorized_low_level_moisture
+ operations:
+ - name: essl_moisture
+ method: !!python/name:satpy.enhancements.atmosphere.essl_moisture
+
+ day_essl_colorized_low_level_moisture:
+ standard_name: day_essl_colorized_low_level_moisture
+ operations: []
diff --git a/satpy/etc/enhancements/mhs.yaml b/satpy/etc/enhancements/mhs.yaml
new file mode 100644
index 0000000000..c997a11350
--- /dev/null
+++ b/satpy/etc/enhancements/mhs.yaml
@@ -0,0 +1,29 @@
+enhancements:
+
+ mw183_humidity:
+ standard_name: mw183_humidity
+ operations:
+ - name: inverse
+ method: !!python/name:satpy.enhancements.invert
+ args:
+ - [true, true, true]
+ - name: stretch
+ method: !!python/name:satpy.enhancements.stretch
+ kwargs: {stretch: linear}
+ - name: gamma
+ method: !!python/name:satpy.enhancements.gamma
+ kwargs: {gamma: 1.2}
+
+ mw183_humidity_surface:
+ standard_name: mw183_humidity_surface
+ operations:
+ - name: inverse
+ method: !!python/name:satpy.enhancements.invert
+ args:
+ - [true, true, true]
+ - name: stretch
+ method: !!python/name:satpy.enhancements.stretch
+ kwargs: {stretch: linear}
+ - name: gamma
+ method: !!python/name:satpy.enhancements.gamma
+ kwargs: {gamma: 1.2}
diff --git a/satpy/etc/enhancements/mimic.yaml b/satpy/etc/enhancements/mimic.yaml
index 3314f0d263..81f4091356 100644
--- a/satpy/etc/enhancements/mimic.yaml
+++ b/satpy/etc/enhancements/mimic.yaml
@@ -96,5 +96,3 @@ enhancements:
],
min_value: 0,
max_value: 8 }
-
-
diff --git a/satpy/etc/enhancements/scatterometer.yaml b/satpy/etc/enhancements/scatterometer.yaml
index 37315b8163..305c0bfba6 100644
--- a/satpy/etc/enhancements/scatterometer.yaml
+++ b/satpy/etc/enhancements/scatterometer.yaml
@@ -17,5 +17,3 @@ enhancements:
kwargs:
palettes:
- {colors: rdbu, min_value: 0, max_value: 100}
-
-
diff --git a/satpy/etc/eps_avhrrl1b_6.5.xml b/satpy/etc/eps_avhrrl1b_6.5.xml
index 22b95db353..469a3e9705 100644
--- a/satpy/etc/eps_avhrrl1b_6.5.xml
+++ b/satpy/etc/eps_avhrrl1b_6.5.xml
@@ -7,20 +7,20 @@
File: eps_avhrrl1b_6.5.xml
Copyright (c) 2004, 2005 Eumetsat
-
- This file is part of the EPSXML format specification generated
+
+ This file is part of the EPSXML format specification generated
automatically using pfs2xml.
-
+
This XML description is distributed under the GPL license; you can
redistribute it and/or modify it under the terms of the GNU General
Public License as published by the Free Software Foundation; either
version 2 of the License, or (at your option) any later version.
-
- This XML description is distributed in the hope that it will be
+
+ This XML description is distributed in the hope that it will be
useful, but WITHOUT ANY WARRANTY; without even the implied warranty
of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
-
+
You should have received a copy of the GNU General Public License
along with the pfs2xml package; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
diff --git a/satpy/etc/readers/abi_l1b.yaml b/satpy/etc/readers/abi_l1b.yaml
index 5bc4913e50..d9de341ff1 100644
--- a/satpy/etc/readers/abi_l1b.yaml
+++ b/satpy/etc/readers/abi_l1b.yaml
@@ -6,12 +6,14 @@
reader:
name: abi_l1b
short_name: ABI L1b
- long_name: GOES-R ABI Level 1b
+ long_name: GOES-R ABI imager Level 1b data in netcdf format
description: >
GOES-R ABI Level 1b data reader in the NetCDF4 format. The file format is
described in the GOES-R Product Definition and Users' Guide (PUG). Volume
4 of this document can be found
`here `_.
+ status: Nominal
+ supports_fsspec: true
sensors: [abi]
default_channels:
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
@@ -102,6 +104,9 @@ datasets:
reflectance:
standard_name: toa_bidirectional_reflectance
units: "%"
+ counts:
+ standard_name: counts
+ units: "1"
file_type: c01
C02:
@@ -115,6 +120,9 @@ datasets:
reflectance:
standard_name: toa_bidirectional_reflectance
units: "%"
+ counts:
+ standard_name: counts
+ units: "1"
file_type: c02
C03:
@@ -128,6 +136,9 @@ datasets:
reflectance:
standard_name: toa_bidirectional_reflectance
units: "%"
+ counts:
+ standard_name: counts
+ units: "1"
file_type: c03
C04:
@@ -141,6 +152,9 @@ datasets:
reflectance:
standard_name: toa_bidirectional_reflectance
units: "%"
+ counts:
+ standard_name: counts
+ units: "1"
file_type: c04
C05:
@@ -154,6 +168,9 @@ datasets:
reflectance:
standard_name: toa_bidirectional_reflectance
units: "%"
+ counts:
+ standard_name: counts
+ units: "1"
file_type: c05
C06:
@@ -167,6 +184,9 @@ datasets:
reflectance:
standard_name: toa_bidirectional_reflectance
units: "%"
+ counts:
+ standard_name: counts
+ units: "1"
file_type: c06
C07:
@@ -180,6 +200,9 @@ datasets:
brightness_temperature:
standard_name: toa_brightness_temperature
units: K
+ counts:
+ standard_name: counts
+ units: "1"
file_type: c07
C08:
@@ -193,6 +216,9 @@ datasets:
brightness_temperature:
standard_name: toa_brightness_temperature
units: K
+ counts:
+ standard_name: counts
+ units: "1"
file_type: c08
C09:
@@ -206,6 +232,9 @@ datasets:
brightness_temperature:
standard_name: toa_brightness_temperature
units: K
+ counts:
+ standard_name: counts
+ units: "1"
file_type: c09
C10:
@@ -219,6 +248,9 @@ datasets:
brightness_temperature:
standard_name: toa_brightness_temperature
units: K
+ counts:
+ standard_name: counts
+ units: "1"
file_type: c10
C11:
@@ -232,6 +264,9 @@ datasets:
brightness_temperature:
standard_name: toa_brightness_temperature
units: K
+ counts:
+ standard_name: counts
+ units: "1"
file_type: c11
C12:
@@ -245,6 +280,9 @@ datasets:
brightness_temperature:
standard_name: toa_brightness_temperature
units: K
+ counts:
+ standard_name: counts
+ units: "1"
file_type: c12
C13:
@@ -258,6 +296,9 @@ datasets:
brightness_temperature:
standard_name: toa_brightness_temperature
units: K
+ counts:
+ standard_name: counts
+ units: "1"
file_type: c13
C14:
@@ -271,6 +312,9 @@ datasets:
brightness_temperature:
standard_name: toa_brightness_temperature
units: K
+ counts:
+ standard_name: counts
+ units: "1"
file_type: c14
C15:
@@ -284,6 +328,9 @@ datasets:
brightness_temperature:
standard_name: toa_brightness_temperature
units: K
+ counts:
+ standard_name: counts
+ units: "1"
file_type: c15
C16:
@@ -297,4 +344,7 @@ datasets:
brightness_temperature:
standard_name: toa_brightness_temperature
units: K
+ counts:
+ standard_name: counts
+ units: "1"
file_type: c16
diff --git a/satpy/etc/readers/abi_l1b_scmi.yaml b/satpy/etc/readers/abi_l1b_scmi.yaml
index 25e07a503c..c5f281b63b 100644
--- a/satpy/etc/readers/abi_l1b_scmi.yaml
+++ b/satpy/etc/readers/abi_l1b_scmi.yaml
@@ -1,6 +1,10 @@
reader:
- description: SCMI NetCDF4 Reader for ABI data
name: abi_l1b_scmi
+ short_name: ABI level 1b
+ long_name: SCMI ABI L1B in netCDF4 format
+ description: SCMI NetCDF4 Reader for ABI data
+ status: Beta
+ supports_fsspec: false
sensors: []
default_channels:
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
diff --git a/satpy/etc/readers/abi_l2_nc.yaml b/satpy/etc/readers/abi_l2_nc.yaml
index a9b4b86a0d..9bf69a1b7f 100644
--- a/satpy/etc/readers/abi_l2_nc.yaml
+++ b/satpy/etc/readers/abi_l2_nc.yaml
@@ -1,12 +1,14 @@
reader:
name: abi_l2_nc
short_name: ABI L2 NetCDF4
- long_name: GOES-R ABI Level 2 NetCDF4
+ long_name: GOES-R ABI Level 2 products in netCDF4 format
description: >
GOES-R ABI Level 2+ data reader in the NetCDF4 format. The file format is
described in the GOES-R Product Definition and Users' Guide (PUG) Volume
5. This document can be found
`here `_.
+ status: Beta
+ supports_fsspec: true
sensors: ['abi']
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
# file pattern keys to sort files by with 'satpy.utils.group_files'
@@ -18,113 +20,113 @@ datasets:
name: C01
wavelength: [0.450, 0.470, 0.490]
calibration: reflectance
- file_type: abi_l2_cmip_c01
file_key: CMI
+ file_type: [abi_l2_cmip_c01, abi_l2_mcmip]
CMIP_C02: # Cloud Moisture Image Products Channel 2
name: C02
wavelength: [0.590, 0.640, 0.690]
calibration: reflectance
- file_type: abi_l2_cmip_c02
file_key: CMI
+ file_type: [abi_l2_cmip_c02, abi_l2_mcmip]
CMIP_C03: # Cloud Moisture Image Products Channel 3
name: C03
wavelength: [0.8455, 0.865, 0.8845]
calibration: reflectance
- file_type: abi_l2_cmip_c03
file_key: CMI
+ file_type: [abi_l2_cmip_c03, abi_l2_mcmip]
CMIP_C04: # Cloud Moisture Image Products Channel 4
name: C04
wavelength: [1.3705, 1.378, 1.3855]
calibration: reflectance
- file_type: abi_l2_cmip_c04
file_key: CMI
+ file_type: [abi_l2_cmip_c04, abi_l2_mcmip]
CMIP_C05: # Cloud Moisture Image Products Channel 5
name: C05
wavelength: [1.580, 1.610, 1.640]
calibration: reflectance
- file_type: abi_l2_cmip_c05
file_key: CMI
+ file_type: [abi_l2_cmip_c05, abi_l2_mcmip]
CMIP_C06: # Cloud Moisture Image Products Channel 6
name: C06
wavelength: [2.225, 2.250, 2.275]
calibration: reflectance
- file_type: abi_l2_cmip_c06
file_key: CMI
+ file_type: [abi_l2_cmip_c06, abi_l2_mcmip]
CMIP_C07: # Cloud Moisture Image Products Channel 7
name: C07
wavelength: [3.80, 3.90, 4.00]
calibration: brightness_temperature
- file_type: abi_l2_cmip_c07
file_key: CMI
+ file_type: [abi_l2_cmip_c07, abi_l2_mcmip]
CMIP_C08: # Cloud Moisture Image Products Channel 8
name: C08
wavelength: [5.770, 6.185, 6.600]
calibration: brightness_temperature
- file_type: abi_l2_cmip_c08
file_key: CMI
+ file_type: [abi_l2_cmip_c08, abi_l2_mcmip]
CMIP_C09: # Cloud Moisture Image Products Channel 9
name: C09
wavelength: [6.75, 6.95, 7.15]
calibration: brightness_temperature
- file_type: abi_l2_cmip_c09
file_key: CMI
+ file_type: [abi_l2_cmip_c09, abi_l2_mcmip]
CMIP_C10: # Cloud Moisture Image Products Channel 10
name: C10
wavelength: [7.24, 7.34, 7.44]
calibration: brightness_temperature
- file_type: abi_l2_cmip_c10
file_key: CMI
+ file_type: [abi_l2_cmip_c10, abi_l2_mcmip]
CMIP_C11: # Cloud Moisture Image Products Channel 11
name: C11
wavelength: [8.30, 8.50, 8.70]
calibration: brightness_temperature
- file_type: abi_l2_cmip_c11
file_key: CMI
+ file_type: [abi_l2_cmip_c11, abi_l2_mcmip]
CMIP_C12: # Cloud Moisture Image Products Channel 12
name: C12
wavelength: [9.42, 9.61, 9.80]
calibration: brightness_temperature
- file_type: abi_l2_cmip_c12
file_key: CMI
+ file_type: [abi_l2_cmip_c12, abi_l2_mcmip]
CMIP_C13: # Cloud Moisture Image Products Channel 13
name: C13
wavelength: [10.10, 10.35, 10.60]
calibration: brightness_temperature
- file_type: abi_l2_cmip_c13
file_key: CMI
+ file_type: [abi_l2_cmip_c13, abi_l2_mcmip]
CMIP_C14: # Cloud Moisture Image Products Channel 14
name: C14
wavelength: [10.80, 11.20, 11.60]
calibration: brightness_temperature
- file_type: abi_l2_cmip_c14
file_key: CMI
+ file_type: [abi_l2_cmip_c14, abi_l2_mcmip]
CMIP_C15: # Cloud Moisture Image Products Channel 15
name: C15
wavelength: [11.80, 12.30, 12.80]
calibration: brightness_temperature
- file_type: abi_l2_cmip_c15
file_key: CMI
+ file_type: [abi_l2_cmip_c15, abi_l2_mcmip]
CMIP_C16: # Cloud Moisture Image Products Channel 16
name: C16
wavelength: [13.00, 13.30, 13.60]
calibration: brightness_temperature
- file_type: abi_l2_cmip_c16
file_key: CMI
+ file_type: [abi_l2_cmip_c16, abi_l2_mcmip]
# --- Cloud Top Height ---
cloud_top_height:
@@ -150,6 +152,16 @@ datasets:
file_type: abi_l2_acm
file_key: BCM
+ four_level_cloud_mask:
+ name: ACM
+ file_type: abi_l2_acm
+ file_key: ACM
+
+ cloud_probabilities:
+ name: Cloud_Probabilities
+ file_type: abi_l2_acm
+ file_key: Cloud_Probabilities
+
# --- Aerosol Detection Products ---
aerosol_binary_mask:
name: Aerosol
@@ -323,180 +335,218 @@ datasets:
# ----
file_types:
abi_l2_cmip_c01:
- file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
- file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C01_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
+ file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C01_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ observation_type: "CMIP"
abi_l2_cmip_c02:
- file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
- file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C02_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
+ file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C02_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ observation_type: "CMIP"
abi_l2_cmip_c03:
- file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
- file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C03_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
+ file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C03_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ observation_type: "CMIP"
abi_l2_cmip_c04:
- file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
- file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C04_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
+ file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C04_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ observation_type: "CMIP"
abi_l2_cmip_c05:
- file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
- file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C05_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
+ file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C05_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ observation_type: "CMIP"
abi_l2_cmip_c06:
- file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
- file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C06_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
+ file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C06_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ observation_type: "CMIP"
abi_l2_cmip_c07:
- file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
- file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C07_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
+ file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C07_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ observation_type: "CMIP"
abi_l2_cmip_c08:
- file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
- file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C08_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
+ file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C08_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ observation_type: "CMIP"
abi_l2_cmip_c09:
- file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
- file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C09_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
+ file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C09_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ observation_type: "CMIP"
abi_l2_cmip_c10:
- file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
- file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C10_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
+ file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C10_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ observation_type: "CMIP"
abi_l2_cmip_c11:
- file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
- file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C11_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
+ file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C11_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ observation_type: "CMIP"
abi_l2_cmip_c12:
- file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
- file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C12_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
+ file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C12_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ observation_type: "CMIP"
abi_l2_cmip_c13:
- file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
- file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C13_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
+ file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C13_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ observation_type: "CMIP"
abi_l2_cmip_c14:
- file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
- file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C14_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
+ file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C14_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ observation_type: "CMIP"
abi_l2_cmip_c15:
- file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
- file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C15_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
+ file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:s}-{scan_mode:2s}C15_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ observation_type: "CMIP"
abi_l2_cmip_c16:
- file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
- file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{area_code:1s}-{scan_mode:2s}C16_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
+ file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CMIP{scene_abbr:1s}-{scan_mode:2s}C16_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ observation_type: "CMIP"
+
+ abi_l2_mcmip:
+ file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
+ file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-MCMIP{scene_abbr:1s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ observation_type: "MCMIP"
abi_l2_acha:
- file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
- file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-ACHA{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
+ file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-ACHA{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ observation_type: "ACHA"
abi_l2_acht:
- file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
- file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-ACHT{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
+ file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-ACHT{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ observation_type: "ACHT"
abi_l2_acm:
- file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
- file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-ACM{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
+ file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-ACM{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ observation_type: "ACM"
abi_l2_actp:
- file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
- file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-ACTP{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
+ file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-ACTP{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ observation_type: "ACTP"
abi_l2_adp:
- file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
- file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-ADP{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
+ file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-ADP{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ observation_type: "ADP"
abi_l2_aod:
- file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
- file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-AOD{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
+ file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-AOD{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ observation_type: "AOD"
abi_l2_cod:
- file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
- file_patterns:
- # F (Full Disk) or C (CONUS)
- - '{system_environment:2s}_{mission_id:3s}-L2-COD{area_code:1s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'
- # M1 or M2 for mesoscale
- - '{system_environment:2s}_{mission_id:3s}-L2-CODM{area_code:1d}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'
+ file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
+ file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-COD{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ observation_type: "COD"
# CSPP Geo keeps Day and Night algorithm outputs separate
abi_l2_codd:
file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
file_patterns:
- '{system_environment:2s}_{mission_id:3s}-L2-CODD{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'
+ observation_type: "CODD"
abi_l2_codn:
file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
file_patterns:
- '{system_environment:2s}_{mission_id:3s}-L2-CODN{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'
+ observation_type: "CODN"
abi_l2_cps:
- file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
- file_patterns:
- # F (Full Disk) or C (CONUS)
- - '{system_environment:2s}_{mission_id:3s}-L2-CPS{area_code:1s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'
- # M1 or M2 for mesoscale
- - '{system_environment:2s}_{mission_id:3s}-L2-CPSM{area_code:1d}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'
+ file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
+ file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CPS{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ observation_type: "CPS"
# CSPP Geo keeps Day and Night algorithm outputs separate
abi_l2_cpsd:
file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
file_patterns:
- '{system_environment:2s}_{mission_id:3s}-L2-CPSD{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'
+ observation_type: "CPSD"
abi_l2_cpsn:
file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
file_patterns:
- '{system_environment:2s}_{mission_id:3s}-L2-CPSN{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc'
+ observation_type: "CPSN"
abi_l2_ctp:
- file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
- file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CTP{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
+ file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-CTP{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ observation_type: "CTP"
abi_l2_dsi:
- file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
- file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-DSI{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
+ file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-DSI{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ observation_type: "DSI"
abi_l2_drs:
- file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
- file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-DRS{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
+ file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-DRS{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ observation_type: "DRS"
abi_l2_fdc:
- file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
- file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-FDC{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
+ file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-FDC{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ observation_type: "FDC"
abi_l2_fsc:
file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-FSC{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ observation_type: "FSC"
abi_l2_lst:
- file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
- file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-LST{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
+ file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-LST{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ observation_type: "LST"
abi_l2_rrqpe:
- file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
- file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-RRQPE{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
+ file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-RRQPE{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ observation_type: "RRQPE"
abi_l2_rsr:
- file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
- file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-RSR{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
+ file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-RSR{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ observation_type: "RSR"
abi_l2_dsr:
- file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
- file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-DSR{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
+ file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-DSR{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ observation_type: "DSR"
abi_l2_sst:
- file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
- file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-SST{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
+ file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-SST{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ observation_type: "SST"
abi_l2_tpw:
- file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
- file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-TPW{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
+ file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-TPW{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ observation_type: "TPW"
abi_l2_vaa:
- file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
- file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-VAA{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc', '{system_environment:2s}_{mission_id:3s}-L2-VAA{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}-128600_0.nc']
+ file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
+ file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-VAA{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc', '{system_environment:2s}_{mission_id:3s}-L2-VAA{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}-128600_0.nc']
+ observation_type: "VAA"
# CSPP - Geo Unofficial product
abi_l2_nav:
file_reader: !!python/name:satpy.readers.abi_l2_nc.NC_ABI_L2
file_patterns: ['{system_environment:2s}_{mission_id:3s}-L2-NAV{scene_abbr:s}-{scan_mode:2s}_{platform_shortname:3s}_s{start_time:%Y%j%H%M%S%f}_e{end_time:%Y%j%H%M%S%f}_c{creation_time:%Y%j%H%M%S%f}.nc']
+ observation_type: "NAV"
diff --git a/satpy/etc/readers/acspo.yaml b/satpy/etc/readers/acspo.yaml
index 4a3049058d..7f7587beb0 100644
--- a/satpy/etc/readers/acspo.yaml
+++ b/satpy/etc/readers/acspo.yaml
@@ -1,6 +1,10 @@
reader:
- description: NOAA Level 2 Product (L2P) ACSPO SST File Reader
name: acspo
+ short_name: ACSPO SST
+ long_name: NOAA Level 2 ACSPO SST data in netCDF4 format
+ description: NOAA Level 2 Product (L2P) ACSPO SST File Reader
+ status: Nominal
+ supports_fsspec: false
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
sensors: [viirs,modis,avhrr]
default_datasets:
@@ -8,7 +12,7 @@ reader:
file_types:
acspo_sst:
file_reader: !!python/name:satpy.readers.acspo.ACSPOFileHandler
- file_patterns: ['{start_time:%Y%m%d%H%M%S}-{rdac:4s}-L2P_GHRSST-SSTskin-{sensor_id}-ACSPO_V{version}-v{gds_version}-fv{file_version}.nc']
+ file_patterns: ['{start_time:%Y%m%d%H%M%S}-{rdac:4s}-L2P_GHRSST-{dataset_name}-{sensor_id}-ACSPO_V{version}-v{gds_version}-fv{file_version}.nc']
datasets:
longitude:
@@ -37,4 +41,3 @@ datasets:
name: wind_speed
coordinates: [longitude, latitude]
file_type: acspo_sst
-
diff --git a/satpy/etc/readers/agri_l1.yaml b/satpy/etc/readers/agri_fy4a_l1.yaml
old mode 100755
new mode 100644
similarity index 92%
rename from satpy/etc/readers/agri_l1.yaml
rename to satpy/etc/readers/agri_fy4a_l1.yaml
index 27b553e714..5e3dfead35
--- a/satpy/etc/readers/agri_l1.yaml
+++ b/satpy/etc/readers/agri_fy4a_l1.yaml
@@ -1,327 +1,317 @@
-# References:
-# - L1_SDR Data of FY4A Advanced Geostationary Radiation Imager
-# - http://fy4.nsmc.org.cn/data/en/data/realtime.html
-
-reader:
- name: agri_l1
- description: FY-4A AGRI instrument HDF5 reader
- sensors: [agri]
- default_channels:
- reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
-
-file_types:
- agri_l1_0500m:
- file_reader: !!python/name:satpy.readers.agri_l1.HDF_AGRI_L1
- file_patterns: ['{platform_id:4s}-_{instrument:4s}--_N_{observation_type:s}_{longitude:5s}_L1-_FDI-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_0500M_{version:s}.HDF']
- agri_l1_1000m:
- file_reader: !!python/name:satpy.readers.agri_l1.HDF_AGRI_L1
- file_patterns: ['{platform_id:4s}-_{instrument:4s}--_N_{observation_type:s}_{longitude:5s}_L1-_FDI-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_1000M_{version:s}.HDF']
- agri_l1_2000m:
- file_reader: !!python/name:satpy.readers.agri_l1.HDF_AGRI_L1
- file_patterns: ['{platform_id:4s}-_{instrument:4s}--_N_{observation_type:s}_{longitude:5s}_L1-_FDI-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_2000M_{version:s}.HDF']
- agri_l1_4000m:
- file_reader: !!python/name:satpy.readers.agri_l1.HDF_AGRI_L1
- file_patterns: ['{platform_id:4s}-_{instrument:4s}--_N_{observation_type:s}_{longitude:5s}_L1-_FDI-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_4000M_{version:s}.HDF']
- agri_l1_4000m_geo:
- file_reader: !!python/name:satpy.readers.agri_l1.HDF_AGRI_L1
- file_patterns: ['{platform_id:4s}-_{instrument:4s}--_N_{observation_type:s}_{longitude:5s}_L1-_GEO-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_4000M_{version:s}.HDF']
-
-datasets:
- C01:
- name: C01
- wavelength: [0.45, 0.47, 0.49]
- resolution:
- 1000: {file_type: agri_l1_1000m}
- 2000: {file_type: agri_l1_2000m}
- 4000: {file_type: agri_l1_4000m}
- calibration:
- reflectance:
- standard_name: toa_bidirectional_reflectance
- units: "%"
- counts:
- standard_name: counts
- units: "1"
- file_key: NOMChannel01
- lut_key: CALChannel01
- fill_value: 65535
-
- C02:
- name: C02
- wavelength: [0.55, 0.65, 0.75]
- resolution:
- 500: {file_type: agri_l1_0500m}
- 1000: {file_type: agri_l1_1000m}
- 2000: {file_type: agri_l1_2000m}
- 4000: {file_type: agri_l1_4000m}
- calibration:
- reflectance:
- standard_name: toa_bidirectional_reflectance
- units: "%"
- counts:
- standard_name: counts
- units: "1"
- file_key: NOMChannel02
- lut_key: CALChannel02
- fill_value: 65535
-
- C03:
- name: C03
- wavelength: [0.75, 0.83, 0.90]
- resolution:
- 1000: {file_type: agri_l1_1000m}
- 2000: {file_type: agri_l1_2000m}
- 4000: {file_type: agri_l1_4000m}
- calibration:
- reflectance:
- standard_name: toa_bidirectional_reflectance
- units: "%"
- counts:
- standard_name: counts
- units: "1"
- file_key: NOMChannel03
- lut_key: CALChannel03
- fill_value: 65535
-
- C04:
- name: C04
- wavelength: [1.36, 1.37, 1.39]
- resolution:
- 2000: {file_type: agri_l1_2000m}
- 4000: {file_type: agri_l1_4000m}
- calibration:
- reflectance:
- standard_name: toa_bidirectional_reflectance
- units: "%"
- counts:
- standard_name: counts
- units: "1"
- file_key: NOMChannel04
- lut_key: CALChannel04
- fill_value: 65535
-
- C05:
- name: C05
- wavelength: [1.58, 1.61, 1.64]
- resolution:
- 2000: {file_type: agri_l1_2000m}
- 4000: {file_type: agri_l1_4000m}
- calibration:
- reflectance:
- standard_name: toa_bidirectional_reflectance
- units: "%"
- counts:
- standard_name: counts
- units: "1"
- file_key: NOMChannel05
- lut_key: CALChannel05
- fill_value: 65535
-
- C06:
- name: C06
- wavelength: [2.10, 2.22, 2.35]
- resolution:
- 2000: {file_type: agri_l1_2000m}
- 4000: {file_type: agri_l1_4000m}
- calibration:
- reflectance:
- standard_name: toa_bidirectional_reflectance
- units: "%"
- counts:
- standard_name: counts
- units: "1"
- file_key: NOMChannel06
- lut_key: CALChannel06
- fill_value: 65535
-
- C07:
- name: C07
- wavelength: [3.5, 3.72, 4.0]
- resolution:
- 2000: {file_type: agri_l1_2000m}
- 4000: {file_type: agri_l1_4000m}
- calibration:
- radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavenumber
- units: "mW/ (m2 cm-1 sr)"
- brightness_temperature:
- standard_name: toa_brightness_temperature
- units: "K"
- counts:
- standard_name: counts
- units: "1"
- file_key: NOMChannel07
- lut_key: CALChannel07
- fill_value: 65535
-
- C08:
- name: C08
- wavelength: [3.5, 3.72, 4.0]
- resolution: 4000
- calibration:
- radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavenumber
- units: "mW/ (m2 cm-1 sr)"
- brightness_temperature:
- standard_name: toa_brightness_temperature
- units: "K"
- counts:
- standard_name: counts
- units: "1"
- file_key: NOMChannel08
- lut_key: CALChannel08
- file_type: agri_l1_4000m
- fill_value: 65535
-
- C09:
- name: C09
- wavelength: [5.8, 6.25, 6.7]
- resolution: 4000
- calibration:
- radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavenumber
- units: "mW/ (m2 cm-1 sr)"
- brightness_temperature:
- standard_name: toa_brightness_temperature
- units: "K"
- counts:
- standard_name: counts
- units: "1"
- file_key: NOMChannel09
- lut_key: CALChannel09
- file_type: agri_l1_4000m
- fill_value: 65535
-
- C10:
- name: C10
- wavelength: [6.9, 7.10, 7.3]
- resolution: 4000
- calibration:
- radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavenumber
- units: "mW/ (m2 cm-1 sr)"
- brightness_temperature:
- standard_name: toa_brightness_temperature
- units: "K"
- counts:
- standard_name: counts
- units: "1"
- file_key: NOMChannel10
- lut_key: CALChannel10
- file_type: agri_l1_4000m
- fill_value: 65535
-
- C11:
- name: C11
- wavelength: [8.0, 8.5, 9.0]
- resolution: 4000
- calibration:
- radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavenumber
- units: "mW/ (m2 cm-1 sr)"
- brightness_temperature:
- standard_name: toa_brightness_temperature
- units: "K"
- counts:
- standard_name: counts
- units: "1"
- file_key: NOMChannel11
- lut_key: CALChannel11
- file_type: agri_l1_4000m
- fill_value: 65535
-
- C12:
- name: C12
- wavelength: [10.3, 10.8, 11.1]
- resolution: 4000
- calibration:
- radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavenumber
- units: "mW/ (m2 cm-1 sr)"
- brightness_temperature:
- standard_name: toa_brightness_temperature
- units: "K"
- counts:
- standard_name: counts
- units: "1"
- file_key: NOMChannel12
- lut_key: CALChannel12
- file_type: agri_l1_4000m
- fill_value: 65535
-
- C13:
- name: C13
- wavelength: [11.5, 12.0, 12.5]
- resolution: 4000
- calibration:
- radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavenumber
- units: "mW/ (m2 cm-1 sr)"
- brightness_temperature:
- standard_name: toa_brightness_temperature
- units: "K"
- counts:
- standard_name: counts
- units: "1"
- file_key: NOMChannel13
- lut_key: CALChannel13
- file_type: agri_l1_4000m
- fill_value: 65535
-
- C14:
- name: C14
- wavelength: [13.2, 13.5, 13.8]
- resolution: 4000
- calibration:
- radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavenumber
- units: "mW/ (m2 cm-1 sr)"
- brightness_temperature:
- standard_name: toa_brightness_temperature
- units: "K"
- counts:
- standard_name: counts
- units: "1"
- file_key: NOMChannel14
- lut_key: CALChannel14
- file_type: agri_l1_4000m
- fill_value: 65535
-
- solar_zenith_angle:
- name: solar_zenith_angle
- units: degree
- standard_name: solar_zenith_angle
- resolution: 4000
- file_type: agri_l1_4000m_geo
- file_key: NOMSunZenith
-
- solar_azimuth_angle:
- name: solar_azimuth_angle
- units: degree
- standard_name: solar_azimuth_angle
- resolution: 4000
- file_type: agri_l1_4000m_geo
- file_key: NOMSunAzimuth
-
- solar_glint_angle:
- name: solar_glint_angle
- units: degree
- standard_name: solar_glint_angle
- resolution: 4000
- file_type: agri_l1_4000m_geo
- file_key: NOMSunGlintAngle
-
- satellite_zenith_angle:
- name: satellite_zenith_angle
- units: degree
- standard_name: satellite_zenith_angle
- resolution: 4000
- file_type: agri_l1_4000m_geo
- file_key: NOMSatelliteZenith
-
- satellite_azimuth_angle:
- name: satellite_azimuth_angle
- units: degree
- standard_name: satellite_azimuth_angle
- resolution: 4000
- file_type: agri_l1_4000m_geo
- file_key: NOMSatelliteAzimuth
\ No newline at end of file
+# References:
+# - L1_SDR Data of FY4A Advanced Geostationary Radiation Imager
+# - http://fy4.nsmc.org.cn/data/en/data/realtime.html
+
+reader:
+ name: agri_fy4a_l1
+ short_name: AGRI FY4A L1
+ long_name: FY-4A AGRI L1 data in HDF5 format
+ description: FY-4A AGRI instrument HDF5 reader
+ status: Beta
+ supports_fsspec: false
+ sensors: [agri]
+ default_channels:
+ reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
+
+file_types:
+ agri_l1_0500m:
+ file_reader: !!python/name:satpy.readers.agri_l1.HDF_AGRI_L1
+ file_patterns: ['{platform_id:4s}-_{instrument:4s}--_N_{observation_type:s}_{longitude:5s}_L1-_FDI-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_0500M_{version:s}.HDF']
+ agri_l1_1000m:
+ file_reader: !!python/name:satpy.readers.agri_l1.HDF_AGRI_L1
+ file_patterns: ['{platform_id:4s}-_{instrument:4s}--_N_{observation_type:s}_{longitude:5s}_L1-_FDI-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_1000M_{version:s}.HDF']
+ agri_l1_2000m:
+ file_reader: !!python/name:satpy.readers.agri_l1.HDF_AGRI_L1
+ file_patterns: ['{platform_id:4s}-_{instrument:4s}--_N_{observation_type:s}_{longitude:5s}_L1-_FDI-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_2000M_{version:s}.HDF']
+ agri_l1_4000m:
+ file_reader: !!python/name:satpy.readers.agri_l1.HDF_AGRI_L1
+ file_patterns: ['{platform_id:4s}-_{instrument:4s}--_N_{observation_type:s}_{longitude:5s}_L1-_FDI-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_4000M_{version:s}.HDF']
+ agri_l1_4000m_geo:
+ file_reader: !!python/name:satpy.readers.agri_l1.HDF_AGRI_L1
+ file_patterns: ['{platform_id:4s}-_{instrument:4s}--_N_{observation_type:s}_{longitude:5s}_L1-_GEO-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_4000M_{version:s}.HDF']
+
+datasets:
+ C01:
+ name: C01
+ wavelength: [0.45, 0.47, 0.49]
+ resolution:
+ 1000: {file_type: agri_l1_1000m}
+ 2000: {file_type: agri_l1_2000m}
+ 4000: {file_type: agri_l1_4000m}
+ calibration:
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ counts:
+ standard_name: counts
+ units: "1"
+ file_key: NOMChannel01
+ lut_key: CALChannel01
+
+ C02:
+ name: C02
+ wavelength: [0.55, 0.65, 0.75]
+ resolution:
+ 500: {file_type: agri_l1_0500m}
+ 1000: {file_type: agri_l1_1000m}
+ 2000: {file_type: agri_l1_2000m}
+ 4000: {file_type: agri_l1_4000m}
+ calibration:
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ counts:
+ standard_name: counts
+ units: "1"
+ file_key: NOMChannel02
+ lut_key: CALChannel02
+
+ C03:
+ name: C03
+ wavelength: [0.75, 0.83, 0.90]
+ resolution:
+ 1000: {file_type: agri_l1_1000m}
+ 2000: {file_type: agri_l1_2000m}
+ 4000: {file_type: agri_l1_4000m}
+ calibration:
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ counts:
+ standard_name: counts
+ units: "1"
+ file_key: NOMChannel03
+ lut_key: CALChannel03
+
+ C04:
+ name: C04
+ wavelength: [1.36, 1.37, 1.39]
+ resolution:
+ 2000: {file_type: agri_l1_2000m}
+ 4000: {file_type: agri_l1_4000m}
+ calibration:
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ counts:
+ standard_name: counts
+ units: "1"
+ file_key: NOMChannel04
+ lut_key: CALChannel04
+
+ C05:
+ name: C05
+ wavelength: [1.58, 1.61, 1.64]
+ resolution:
+ 2000: {file_type: agri_l1_2000m}
+ 4000: {file_type: agri_l1_4000m}
+ calibration:
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ counts:
+ standard_name: counts
+ units: "1"
+ file_key: NOMChannel05
+ lut_key: CALChannel05
+
+ C06:
+ name: C06
+ wavelength: [2.10, 2.22, 2.35]
+ resolution:
+ 2000: {file_type: agri_l1_2000m}
+ 4000: {file_type: agri_l1_4000m}
+ calibration:
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ counts:
+ standard_name: counts
+ units: "1"
+ file_key: NOMChannel06
+ lut_key: CALChannel06
+
+ C07:
+ name: C07
+ wavelength: [3.5, 3.72, 4.0]
+ resolution:
+ 2000: {file_type: agri_l1_2000m}
+ 4000: {file_type: agri_l1_4000m}
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: "mW/ (m2 cm-1 sr)"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: "K"
+ counts:
+ standard_name: counts
+ units: "1"
+ file_key: NOMChannel07
+ lut_key: CALChannel07
+
+ C08:
+ name: C08
+ wavelength: [3.5, 3.72, 4.0]
+ resolution: 4000
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: "mW/ (m2 cm-1 sr)"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: "K"
+ counts:
+ standard_name: counts
+ units: "1"
+ file_key: NOMChannel08
+ lut_key: CALChannel08
+ file_type: agri_l1_4000m
+
+ C09:
+ name: C09
+ wavelength: [5.8, 6.25, 6.7]
+ resolution: 4000
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: "mW/ (m2 cm-1 sr)"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: "K"
+ counts:
+ standard_name: counts
+ units: "1"
+ file_key: NOMChannel09
+ lut_key: CALChannel09
+ file_type: agri_l1_4000m
+
+ C10:
+ name: C10
+ wavelength: [6.9, 7.10, 7.3]
+ resolution: 4000
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: "mW/ (m2 cm-1 sr)"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: "K"
+ counts:
+ standard_name: counts
+ units: "1"
+ file_key: NOMChannel10
+ lut_key: CALChannel10
+ file_type: agri_l1_4000m
+
+ C11:
+ name: C11
+ wavelength: [8.0, 8.5, 9.0]
+ resolution: 4000
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: "mW/ (m2 cm-1 sr)"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: "K"
+ counts:
+ standard_name: counts
+ units: "1"
+ file_key: NOMChannel11
+ lut_key: CALChannel11
+ file_type: agri_l1_4000m
+
+ C12:
+ name: C12
+ wavelength: [10.3, 10.8, 11.1]
+ resolution: 4000
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: "mW/ (m2 cm-1 sr)"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: "K"
+ counts:
+ standard_name: counts
+ units: "1"
+ file_key: NOMChannel12
+ lut_key: CALChannel12
+ file_type: agri_l1_4000m
+
+ C13:
+ name: C13
+ wavelength: [11.5, 12.0, 12.5]
+ resolution: 4000
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: "mW/ (m2 cm-1 sr)"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: "K"
+ counts:
+ standard_name: counts
+ units: "1"
+ file_key: NOMChannel13
+ lut_key: CALChannel13
+ file_type: agri_l1_4000m
+
+ C14:
+ name: C14
+ wavelength: [13.2, 13.5, 13.8]
+ resolution: 4000
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: "mW/ (m2 cm-1 sr)"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: "K"
+ counts:
+ standard_name: counts
+ units: "1"
+ file_key: NOMChannel14
+ lut_key: CALChannel14
+ file_type: agri_l1_4000m
+
+ solar_zenith_angle:
+ name: solar_zenith_angle
+ units: degree
+ standard_name: solar_zenith_angle
+ resolution: 4000
+ file_type: agri_l1_4000m_geo
+ file_key: NOMSunZenith
+
+ solar_azimuth_angle:
+ name: solar_azimuth_angle
+ units: degree
+ standard_name: solar_azimuth_angle
+ resolution: 4000
+ file_type: agri_l1_4000m_geo
+ file_key: NOMSunAzimuth
+
+ solar_glint_angle:
+ name: solar_glint_angle
+ units: degree
+ standard_name: solar_glint_angle
+ resolution: 4000
+ file_type: agri_l1_4000m_geo
+ file_key: NOMSunGlintAngle
+
+ satellite_zenith_angle:
+ name: satellite_zenith_angle
+ units: degree
+ standard_name: satellite_zenith_angle
+ resolution: 4000
+ file_type: agri_l1_4000m_geo
+ file_key: NOMSatelliteZenith
+
+ satellite_azimuth_angle:
+ name: satellite_azimuth_angle
+ units: degree
+ standard_name: satellite_azimuth_angle
+ resolution: 4000
+ file_type: agri_l1_4000m_geo
+ file_key: NOMSatelliteAzimuth
diff --git a/satpy/etc/readers/agri_fy4b_l1.yaml b/satpy/etc/readers/agri_fy4b_l1.yaml
new file mode 100644
index 0000000000..b1ff44189d
--- /dev/null
+++ b/satpy/etc/readers/agri_fy4b_l1.yaml
@@ -0,0 +1,331 @@
+# References:
+# - L1_SDR Data of FY4A Advanced Geostationary Radiation Imager
+# - http://fy4.nsmc.org.cn/data/en/data/realtime.html
+
+reader:
+ name: agri_fy4b_l1
+ description: FY-4B AGRI instrument HDF5 reader
+ sensors: [agri]
+ default_channels:
+ reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
+
+file_types:
+ agri_l1_0500m:
+ file_reader: !!python/name:satpy.readers.agri_l1.HDF_AGRI_L1
+ file_patterns: ['{platform_id:4s}-_{instrument:4s}--_N_{observation_type:s}_{longitude:5s}_L1-_FDI-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_0500M_{version:s}.HDF']
+ agri_l1_1000m:
+ file_reader: !!python/name:satpy.readers.agri_l1.HDF_AGRI_L1
+ file_patterns: ['{platform_id:4s}-_{instrument:4s}--_N_{observation_type:s}_{longitude:5s}_L1-_FDI-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_1000M_{version:s}.HDF']
+ agri_l1_2000m:
+ file_reader: !!python/name:satpy.readers.agri_l1.HDF_AGRI_L1
+ file_patterns: ['{platform_id:4s}-_{instrument:4s}--_N_{observation_type:s}_{longitude:5s}_L1-_FDI-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_2000M_{version:s}.HDF']
+ agri_l1_4000m:
+ file_reader: !!python/name:satpy.readers.agri_l1.HDF_AGRI_L1
+ file_patterns: ['{platform_id:4s}-_{instrument:4s}--_N_{observation_type:s}_{longitude:5s}_L1-_FDI-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_4000M_{version:s}.HDF']
+ agri_l1_4000m_geo:
+ file_reader: !!python/name:satpy.readers.agri_l1.HDF_AGRI_L1
+ file_patterns: ['{platform_id:4s}-_{instrument:4s}--_N_{observation_type:s}_{longitude:5s}_L1-_GEO-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_4000M_{version:s}.HDF']
+
+datasets:
+ C01:
+ name: C01
+ wavelength: [0.45, 0.47, 0.49]
+ resolution:
+ 1000: {file_type: agri_l1_1000m}
+ 2000: {file_type: agri_l1_2000m}
+ 4000: {file_type: agri_l1_4000m}
+ calibration:
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ counts:
+ standard_name: counts
+ units: "1"
+ file_key: NOMChannel01
+ lut_key: CALChannel01
+
+ C02:
+ name: C02
+ wavelength: [0.55, 0.65, 0.75]
+ resolution:
+ 500: {file_type: agri_l1_0500m}
+ 1000: {file_type: agri_l1_1000m}
+ 2000: {file_type: agri_l1_2000m}
+ 4000: {file_type: agri_l1_4000m}
+ calibration:
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ counts:
+ standard_name: counts
+ units: "1"
+ file_key: NOMChannel02
+ lut_key: CALChannel02
+
+ C03:
+ name: C03
+ wavelength: [0.75, 0.83, 0.90]
+ resolution:
+ 1000: {file_type: agri_l1_1000m}
+ 2000: {file_type: agri_l1_2000m}
+ 4000: {file_type: agri_l1_4000m}
+ calibration:
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ counts:
+ standard_name: counts
+ units: "1"
+ file_key: NOMChannel03
+ lut_key: CALChannel03
+
+ C04:
+ name: C04
+ wavelength: [1.36, 1.37, 1.39]
+ resolution:
+ 2000: {file_type: agri_l1_2000m}
+ 4000: {file_type: agri_l1_4000m}
+ calibration:
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ counts:
+ standard_name: counts
+ units: "1"
+ file_key: NOMChannel04
+ lut_key: CALChannel04
+
+ C05:
+ name: C05
+ wavelength: [1.58, 1.61, 1.64]
+ resolution:
+ 2000: {file_type: agri_l1_2000m}
+ 4000: {file_type: agri_l1_4000m}
+ calibration:
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ counts:
+ standard_name: counts
+ units: "1"
+ file_key: NOMChannel05
+ lut_key: CALChannel05
+
+ C06:
+ name: C06
+ wavelength: [2.10, 2.22, 2.35]
+ resolution:
+ 2000: {file_type: agri_l1_2000m}
+ 4000: {file_type: agri_l1_4000m}
+ calibration:
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ counts:
+ standard_name: counts
+ units: "1"
+ file_key: NOMChannel06
+ lut_key: CALChannel06
+
+ C07:
+ name: C07
+ wavelength: [3.5, 3.72, 4.0]
+ resolution:
+ 2000: {file_type: agri_l1_2000m}
+ 4000: {file_type: agri_l1_4000m}
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: "mW/ (m2 cm-1 sr)"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: "K"
+ counts:
+ standard_name: counts
+ units: "1"
+ file_key: NOMChannel07
+ lut_key: CALChannel07
+
+ C08:
+ name: C08
+ wavelength: [3.5, 3.72, 4.0]
+ resolution: 4000
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: "mW/ (m2 cm-1 sr)"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: "K"
+ counts:
+ standard_name: counts
+ units: "1"
+ file_key: NOMChannel08
+ lut_key: CALChannel08
+ file_type: agri_l1_4000m
+
+ C09:
+ name: C09
+ wavelength: [5.69, 6.25, 6.81]
+ resolution: 4000
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: "mW/ (m2 cm-1 sr)"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: "K"
+ counts:
+ standard_name: counts
+ units: "1"
+ file_key: NOMChannel09
+ lut_key: CALChannel09
+ file_type: agri_l1_4000m
+
+ C10:
+ name: C10
+ wavelength: [6.67, 6.95, 7.21]
+ resolution: 4000
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: "mW/ (m2 cm-1 sr)"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: "K"
+ counts:
+ standard_name: counts
+ units: "1"
+ file_key: NOMChannel10
+ lut_key: CALChannel10
+ file_type: agri_l1_4000m
+
+ C11:
+ name: C11
+ wavelength: [7.19, 7.42, 7.70]
+ resolution: 4000
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: "mW/ (m2 cm-1 sr)"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: "K"
+ counts:
+ standard_name: counts
+ units: "1"
+ file_key: NOMChannel11
+ lut_key: CALChannel11
+ file_type: agri_l1_4000m
+
+ C12:
+ name: C12
+ wavelength: [8.0, 8.5, 9.0]
+ resolution: 4000
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: "mW/ (m2 cm-1 sr)"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: "K"
+ counts:
+ standard_name: counts
+ units: "1"
+ file_key: NOMChannel12
+ lut_key: CALChannel12
+ file_type: agri_l1_4000m
+
+ C13:
+ name: C13
+ wavelength: [10.3, 10.8, 11.1]
+ resolution: 4000
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: "mW/ (m2 cm-1 sr)"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: "K"
+ counts:
+ standard_name: counts
+ units: "1"
+ file_key: NOMChannel13
+ lut_key: CALChannel13
+ file_type: agri_l1_4000m
+
+ C14:
+ name: C14
+ wavelength: [11.5, 12.0, 12.5]
+ resolution: 4000
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: "mW/ (m2 cm-1 sr)"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: "K"
+ counts:
+ standard_name: counts
+ units: "1"
+ file_key: NOMChannel14
+ lut_key: CALChannel14
+ file_type: agri_l1_4000m
+
+ C15:
+ name: C15
+ wavelength: [13.2, 13.5, 13.8]
+ resolution: 4000
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: "mW/ (m2 cm-1 sr)"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: "K"
+ counts:
+ standard_name: counts
+ units: "1"
+ file_key: NOMChannel15
+ lut_key: CALChannel15
+ file_type: agri_l1_4000m
+
+ solar_zenith_angle:
+ name: solar_zenith_angle
+ units: degree
+ standard_name: solar_zenith_angle
+ resolution: 4000
+ file_type: agri_l1_4000m_geo
+ file_key: NOMSunZenith
+
+ solar_azimuth_angle:
+ name: solar_azimuth_angle
+ units: degree
+ standard_name: solar_azimuth_angle
+ resolution: 4000
+ file_type: agri_l1_4000m_geo
+ file_key: NOMSunAzimuth
+
+ solar_glint_angle:
+ name: solar_glint_angle
+ units: degree
+ standard_name: solar_glint_angle
+ resolution: 4000
+ file_type: agri_l1_4000m_geo
+ file_key: NOMSunGlintAngle
+
+ satellite_zenith_angle:
+ name: satellite_zenith_angle
+ units: degree
+ standard_name: satellite_zenith_angle
+ resolution: 4000
+ file_type: agri_l1_4000m_geo
+ file_key: NOMSatelliteZenith
+
+ satellite_azimuth_angle:
+ name: satellite_azimuth_angle
+ units: degree
+ standard_name: satellite_azimuth_angle
+ resolution: 4000
+ file_type: agri_l1_4000m_geo
+ file_key: NOMSatelliteAzimuth
diff --git a/satpy/etc/readers/ahi_hrit.yaml b/satpy/etc/readers/ahi_hrit.yaml
index 22d45a33b8..979ea3c0eb 100644
--- a/satpy/etc/readers/ahi_hrit.yaml
+++ b/satpy/etc/readers/ahi_hrit.yaml
@@ -5,8 +5,10 @@
reader:
name: ahi_hrit
short_name: AHI HRIT
- long_name: Himawari AHI Level 1 (HRIT)
+ long_name: Himawari (8 + 9) AHI Level 1 (HRIT)
description: Reader for the JMA Himawari AHI Level 1 data in HRIT format
+ status: Nominal
+ supports_fsspec: false
sensors: [ahi]
reader: !!python/name:satpy.readers.yaml_reader.GEOSegmentYAMLReader
# file pattern keys to sort files by with 'satpy.utils.group_files'
@@ -17,6 +19,7 @@ file_types:
file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler
file_patterns:
- 'IMG_DK{area:02d}B01_{start_time:%Y%m%d%H%M}_{segment:03d}'
+ - 'IMG_DK{area:02d}B01_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2'
expected_segments: 10
hrit_b01_fd:
@@ -28,6 +31,7 @@ file_types:
file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler
file_patterns:
- 'IMG_DK{area:02d}B02_{start_time:%Y%m%d%H%M}_{segment:03d}'
+ - 'IMG_DK{area:02d}B02_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2'
expected_segments: 10
hrit_b02_fd:
@@ -39,6 +43,7 @@ file_types:
file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler
file_patterns:
- 'IMG_DK{area:02d}VIS_{start_time:%Y%m%d%H%M}_{segment:03d}'
+ - 'IMG_DK{area:02d}VIS_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2'
expected_segments: 10
hrit_b03_fd:
@@ -50,6 +55,7 @@ file_types:
file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler
file_patterns:
- 'IMG_DK{area:02d}B04_{start_time:%Y%m%d%H%M}_{segment:03d}'
+ - 'IMG_DK{area:02d}B04_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2'
expected_segments: 10
hrit_b04_fd:
@@ -61,6 +67,7 @@ file_types:
file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler
file_patterns:
- 'IMG_DK{area:02d}B05_{start_time:%Y%m%d%H%M}_{segment:03d}'
+ - 'IMG_DK{area:02d}B05_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2'
expected_segments: 10
hrit_b05_fd:
@@ -72,6 +79,7 @@ file_types:
file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler
file_patterns:
- 'IMG_DK{area:02d}B06_{start_time:%Y%m%d%H%M}_{segment:03d}'
+ - 'IMG_DK{area:02d}B06_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2'
expected_segments: 10
hrit_b06_fd:
@@ -86,6 +94,7 @@ file_types:
# https://www.data.jma.go.jp/mscweb/en/himawari89/himawari_cast/note/HimawariCast_dataset_20150624_en.pdf
file_patterns:
- 'IMG_DK{area:02d}B07_{start_time:%Y%m%d%H%M}_{segment:03d}'
+ - 'IMG_DK{area:02d}B07_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2'
expected_segments: 10
hrit_b07_fd:
@@ -100,6 +109,7 @@ file_types:
file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler
file_patterns:
- 'IMG_DK{area:02d}IR4_{start_time:%Y%m%d%H%M}_{segment:03d}'
+ - 'IMG_DK{area:02d}IR4_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2'
expected_segments: 10
hrit_b07_ir4_fd:
@@ -111,6 +121,7 @@ file_types:
file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler
file_patterns:
- 'IMG_DK{area:02d}IR3_{start_time:%Y%m%d%H%M}_{segment:03d}'
+ - 'IMG_DK{area:02d}IR3_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2'
expected_segments: 10
hrit_b08_fd:
@@ -122,6 +133,7 @@ file_types:
file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler
file_patterns:
- 'IMG_DK{area:02d}B09_{start_time:%Y%m%d%H%M}_{segment:03d}'
+ - 'IMG_DK{area:02d}B09_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2'
expected_segments: 10
hrit_b09_fd:
@@ -133,6 +145,7 @@ file_types:
file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler
file_patterns:
- 'IMG_DK{area:02d}B10_{start_time:%Y%m%d%H%M}_{segment:03d}'
+ - 'IMG_DK{area:02d}B10_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2'
expected_segments: 10
hrit_b10_fd:
@@ -144,6 +157,7 @@ file_types:
file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler
file_patterns:
- 'IMG_DK{area:02d}B11_{start_time:%Y%m%d%H%M}_{segment:03d}'
+ - 'IMG_DK{area:02d}B11_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2'
expected_segments: 10
hrit_b11_fd:
@@ -155,6 +169,7 @@ file_types:
file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler
file_patterns:
- 'IMG_DK{area:02d}B12_{start_time:%Y%m%d%H%M}_{segment:03d}'
+ - 'IMG_DK{area:02d}B12_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2'
expected_segments: 10
hrit_b12_fd:
@@ -166,6 +181,7 @@ file_types:
file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler
file_patterns:
- 'IMG_DK{area:02d}IR1_{start_time:%Y%m%d%H%M}_{segment:03d}'
+ - 'IMG_DK{area:02d}IR1_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2'
expected_segments: 10
hrit_b13_fd:
@@ -177,6 +193,7 @@ file_types:
file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler
file_patterns:
- 'IMG_DK{area:02d}B14_{start_time:%Y%m%d%H%M}_{segment:03d}'
+ - 'IMG_DK{area:02d}B14_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2'
expected_segments: 10
hrit_b14_fd:
@@ -188,6 +205,7 @@ file_types:
file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler
file_patterns:
- 'IMG_DK{area:02d}IR2_{start_time:%Y%m%d%H%M}_{segment:03d}'
+ - 'IMG_DK{area:02d}IR2_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2'
expected_segments: 10
hrit_b15_fd:
@@ -199,6 +217,7 @@ file_types:
file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler
file_patterns:
- 'IMG_DK{area:02d}B16_{start_time:%Y%m%d%H%M}_{segment:03d}'
+ - 'IMG_DK{area:02d}B16_{start_time:%Y%m%d%H%M}_{segment:03d}.bz2'
expected_segments: 10
hrit_b16_fd:
diff --git a/satpy/etc/readers/ahi_hsd.yaml b/satpy/etc/readers/ahi_hsd.yaml
index e0b4eb6b77..03d5ee3863 100644
--- a/satpy/etc/readers/ahi_hsd.yaml
+++ b/satpy/etc/readers/ahi_hsd.yaml
@@ -4,8 +4,10 @@
reader:
name: ahi_hsd
short_name: AHI HSD
- long_name: Himawari AHI Level 1b (HSD)
+ long_name: Himawari (8 + 9) AHI Level 1b (HSD)
description: Reader for the JMA Himawari AHI Level 1 data in HSD format
+ status: Nominal
+ supports_fsspec: false
reader: !!python/name:satpy.readers.yaml_reader.GEOSegmentYAMLReader
sensors: [ahi]
# file pattern keys to sort files by with 'satpy.utils.group_files'
@@ -288,65 +290,65 @@ datasets:
file_types:
hsd_b01:
file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler
- file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B01_{area}_R10_S{segment:2d}{total_segments:2d}.DAT',
+ file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B01_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT',
'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B01_{area}_R10_S{segment:2d}{total_segments:2d}.DAT.bz2']
hsd_b02:
file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler
- file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B02_{area}_R10_S{segment:2d}{total_segments:2d}.DAT',
+ file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B02_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT',
'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B02_{area}_R10_S{segment:2d}{total_segments:2d}.DAT.bz2']
hsd_b03:
file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler
- file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B03_{area}_R05_S{segment:2d}{total_segments:2d}.DAT',
+ file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B03_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT',
'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B03_{area}_R05_S{segment:2d}{total_segments:2d}.DAT.bz2']
hsd_b04:
file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler
- file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B04_{area}_R10_S{segment:2d}{total_segments:2d}.DAT',
+ file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B04_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT',
'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B04_{area}_R10_S{segment:2d}{total_segments:2d}.DAT.bz2']
hsd_b05:
file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler
- file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B05_{area}_R20_S{segment:2d}{total_segments:2d}.DAT',
+ file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B05_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT',
'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B05_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2']
hsd_b06:
file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler
- file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B06_{area}_R20_S{segment:2d}{total_segments:2d}.DAT',
+ file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B06_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT',
'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B06_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2']
hsd_b07:
file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler
- file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B07_{area}_R20_S{segment:2d}{total_segments:2d}.DAT',
+ file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B07_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT',
'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B07_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2']
hsd_b08:
file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler
- file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B08_{area}_R20_S{segment:2d}{total_segments:2d}.DAT',
+ file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B08_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT',
'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B08_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2']
hsd_b09:
file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler
- file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B09_{area}_R20_S{segment:2d}{total_segments:2d}.DAT',
+ file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B09_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT',
'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B09_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2']
hsd_b10:
file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler
- file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B10_{area}_R20_S{segment:2d}{total_segments:2d}.DAT',
+ file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B10_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT',
'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B10_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2']
hsd_b11:
file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler
- file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B11_{area}_R20_S{segment:2d}{total_segments:2d}.DAT',
+ file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B11_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT',
'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B11_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2']
hsd_b12:
file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler
- file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B12_{area}_R20_S{segment:2d}{total_segments:2d}.DAT',
+ file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B12_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT',
'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B12_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2']
hsd_b13:
file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler
- file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B13_{area}_R20_S{segment:2d}{total_segments:2d}.DAT',
+ file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B13_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT',
'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B13_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2']
hsd_b14:
file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler
- file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B14_{area}_R20_S{segment:2d}{total_segments:2d}.DAT',
+ file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B14_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT',
'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B14_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2']
hsd_b15:
file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler
- file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B15_{area}_R20_S{segment:2d}{total_segments:2d}.DAT',
+ file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B15_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT',
'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B15_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2']
hsd_b16:
file_reader: !!python/name:satpy.readers.ahi_hsd.AHIHSDFileHandler
- file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B16_{area}_R20_S{segment:2d}{total_segments:2d}.DAT',
+ file_patterns: ['HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B16_{area}_R{res_id:d}_S{segment:2d}{total_segments:2d}.DAT',
'HS_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B16_{area}_R20_S{segment:2d}{total_segments:2d}.DAT.bz2']
diff --git a/satpy/etc/readers/ahi_l1b_gridded_bin.yaml b/satpy/etc/readers/ahi_l1b_gridded_bin.yaml
index 550a5021b1..647d31cad0 100644
--- a/satpy/etc/readers/ahi_l1b_gridded_bin.yaml
+++ b/satpy/etc/readers/ahi_l1b_gridded_bin.yaml
@@ -4,9 +4,11 @@
reader:
name: ahi_l1b_gridded_bin
short_name: AHI Gridded
- long_name: Himawari AHI Level 1b (gridded)
+ long_name: Himawari (8 + 9) AHI Level 1b (gridded)
description: Reader for the JMA Himawari AHI Level 1 data in gridded format, downloadable from
http://www.cr.chiba-u.jp/databases/GEO/H8_9/FD/index_jp.html
+ status: Nominal
+ supports_fsspec: false
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
sensors: [ahi]
# file pattern keys to sort files by with 'satpy.utils.group_files'
diff --git a/satpy/etc/readers/ami_l1b.yaml b/satpy/etc/readers/ami_l1b.yaml
index 952fea35f7..8366f12117 100644
--- a/satpy/etc/readers/ami_l1b.yaml
+++ b/satpy/etc/readers/ami_l1b.yaml
@@ -7,6 +7,8 @@ reader:
instrument are described on KMA's website
`here `_.
sensors: [ami]
+ status: Beta
+ supports_fsspec: false
default_channels:
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
# file pattern keys to sort files by with 'satpy.utils.group_files'
@@ -338,4 +340,3 @@ datasets:
units: K
file_type: ir133
file_key: image_pixel_values
-
diff --git a/satpy/etc/readers/amsr2_l1b.yaml b/satpy/etc/readers/amsr2_l1b.yaml
index 91d5d1a20a..9d38f1257c 100644
--- a/satpy/etc/readers/amsr2_l1b.yaml
+++ b/satpy/etc/readers/amsr2_l1b.yaml
@@ -1,6 +1,10 @@
reader:
name: amsr2_l1b
+ short_name: AMSR2 l1b
+ long_name: GCOM-W1 AMSR2 data in HDF5 format
description: GCOM-W1 AMSR2 instrument HDF5 reader
+ status: Nominal
+ supports_fsspec: false
# could this be a python hook ?
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
sensors: [amsr2]
diff --git a/satpy/etc/readers/amsr2_l2.yaml b/satpy/etc/readers/amsr2_l2.yaml
index 6342d627cf..c1295773d6 100644
--- a/satpy/etc/readers/amsr2_l2.yaml
+++ b/satpy/etc/readers/amsr2_l2.yaml
@@ -6,6 +6,8 @@ reader:
HDF5 reader for GCOM-W1 AMSR2 Level 2 files from JAXA. See
https://suzaku.eorc.jaxa.jp/GCOM_W/data/data_w_product-2.html for more
information.
+ status: Beta
+ supports_fsspec: false
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
sensors: [amsr2]
diff --git a/satpy/etc/readers/amsr2_l2_gaasp.yaml b/satpy/etc/readers/amsr2_l2_gaasp.yaml
index af8fd6ac0c..2b073ed1e5 100644
--- a/satpy/etc/readers/amsr2_l2_gaasp.yaml
+++ b/satpy/etc/readers/amsr2_l2_gaasp.yaml
@@ -6,6 +6,8 @@ reader:
NetCDF4 reader for GCOM-W1 AMSR2 Level 2 files processed using the GAASP
software distributed by NOAA. See
https://www.star.nesdis.noaa.gov/jpss/gcom.php for more information.
+ status: Beta
+ supports_fsspec: false
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
sensors: [amsr2]
diff --git a/satpy/etc/readers/amsub_l1c_aapp.yaml b/satpy/etc/readers/amsub_l1c_aapp.yaml
new file mode 100644
index 0000000000..03bf8b127a
--- /dev/null
+++ b/satpy/etc/readers/amsub_l1c_aapp.yaml
@@ -0,0 +1,170 @@
+reader:
+ name: amsub_l1c_aapp
+ short_name: AMSU-B l1c
+ long_name: AAPP L1C AMSU-B format
+ description: AAPP l1c Reader for AMSU-B data
+ status: Beta
+ supports_fsspec: false
+ reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
+ sensors: [amsub,]
+ default_channels: [16, 17, 18, 19, 20]
+
+ data_identification_keys:
+ name:
+ required: true
+ frequency_double_sideband:
+ type: !!python/name:satpy.readers.aapp_mhs_amsub_l1c.FrequencyDoubleSideBand
+ frequency_range:
+ type: !!python/name:satpy.readers.aapp_mhs_amsub_l1c.FrequencyRange
+ resolution:
+ polarization:
+ enum:
+ - H
+ - V
+ calibration:
+ enum:
+ - brightness_temperature
+ transitive: true
+ modifiers:
+ required: true
+ default: []
+ type: !!python/name:satpy.dataset.ModifierTuple
+
+datasets:
+ '16':
+ name: '16'
+ frequency_range:
+ central: 89.
+ bandwidth: 1.0
+ unit: GHz
+ polarization: 'V'
+ resolution: 16000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ coordinates:
+ - longitude
+ - latitude
+ file_type: amsub_aapp_l1c
+ '17':
+ name: '17'
+ frequency_range:
+ central: 150.
+ bandwidth: 1.0
+ unit: GHz
+ polarization: 'V'
+ resolution: 16000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ coordinates:
+ - longitude
+ - latitude
+ file_type: amsub_aapp_l1c
+ '18':
+ name: '18'
+ frequency_double_sideband:
+ unit: GHz
+ central: 183.31
+ side: 1.0
+ bandwidth: 0.5
+ polarization: 'V'
+ resolution: 16000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ coordinates:
+ - longitude
+ - latitude
+ file_type: amsub_aapp_l1c
+ '19':
+ name: '19'
+ frequency_double_sideband:
+ unit: GHz
+ central: 183.31
+ side: 3.0
+ bandwidth: 1.0
+ polarization: 'V'
+ resolution: 16000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ coordinates:
+ - longitude
+ - latitude
+ file_type: amsub_aapp_l1c
+ '20':
+ name: '20'
+ frequency_double_sideband:
+ unit: GHz
+ central: 183.31
+ side: 7.0
+ bandwidth: 2.0
+ polarization: 'V'
+ resolution: 16000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ coordinates:
+ - longitude
+ - latitude
+ file_type: amsub_aapp_l1c
+
+ solar_zenith_angle:
+ name: solar_zenith_angle
+ resolution: 16000
+ coordinates:
+ - longitude
+ - latitude
+ file_type: amsub_aapp_l1c
+ standard_name: solar_zenith_angle
+ units: degrees
+
+ solar_azimuth_angle:
+ name: solar_azimuth_angle
+ resolution: 16000
+ coordinates:
+ - longitude
+ - latitude
+ file_type: amsub_aapp_l1c
+ standard_name: solar_azimuth_angle
+ units: degrees
+
+ sensor_zenith_angle:
+ name: sensor_zenith_angle
+ resolution: 16000
+ coordinates:
+ - longitude
+ - latitude
+ file_type: amsub_aapp_l1c
+ standard_name: sensor_zenith_angle
+ units: degrees
+
+ sensor_azimuth_angle:
+ name: sensor_azimuth_angle
+ resolution: 16000
+ coordinates:
+ - longitude
+ - latitude
+ file_type: amsub_aapp_l1c
+ standard_name: sensor_azimuth_angle
+ units: degrees
+
+ latitude:
+ name: latitude
+ resolution: 16000
+ file_type: amsub_aapp_l1c
+ standard_name: latitude
+ units: degrees_north
+
+ longitude:
+ name: longitude
+ resolution: 16000
+ file_type: amsub_aapp_l1c
+ standard_name: longitude
+ units: degrees_east
+
+file_types:
+ amsub_aapp_l1c:
+ file_reader: !!python/name:satpy.readers.aapp_mhs_amsub_l1c.MHS_AMSUB_AAPPL1CFile
+ file_patterns: ['mhsl1c_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit_number:05d}.l1c']
diff --git a/satpy/etc/readers/ascat_l2_soilmoisture_bufr.yaml b/satpy/etc/readers/ascat_l2_soilmoisture_bufr.yaml
index 22aada9fe2..35b074eb65 100644
--- a/satpy/etc/readers/ascat_l2_soilmoisture_bufr.yaml
+++ b/satpy/etc/readers/ascat_l2_soilmoisture_bufr.yaml
@@ -4,6 +4,8 @@ reader:
long_name: METOP ASCAT Level 2 SOILMOISTURE BUFR
description: >
Reader for ASCAT L2 SOIL MOISUTRE FILES
+ status: Defunct
+ supports_fsspec: false
sensors: [scatterometer]
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
default_datasets:
@@ -11,7 +13,7 @@ reader:
file_types:
ascat_l2_soilmoisture_bufr:
file_reader: !!python/name:satpy.readers.ascat_l2_soilmoisture_bufr.AscatSoilMoistureBufr
- file_patterns: ["W_XX-EUMETSAT-{reception_location},SOUNDING+SATELLITE,{platform}+{instrument}_C_EUMC_{start_time:%Y%m%d%H%M%S}_{perigee}_eps_o_{species}_{level}.bin"]
+ file_patterns: ["W_XX-EUMETSAT-{reception_location},SOUNDING+SATELLITE,{platform}+{instrument}_C_{header}_{start_time:%Y%m%d%H%M%S}_{perigee}_eps_o_{species}_ssm_l2.bin"]
datasets:
@@ -49,4 +51,3 @@ datasets:
coordinates: [longitude, latitude]
key: soilMoistureQuality
fill_value: -1.e+100
-
diff --git a/satpy/etc/readers/atms_l1b_nc.yaml b/satpy/etc/readers/atms_l1b_nc.yaml
new file mode 100644
index 0000000000..12c3cc6e39
--- /dev/null
+++ b/satpy/etc/readers/atms_l1b_nc.yaml
@@ -0,0 +1,396 @@
+reader:
+ name: atms_l1b_nc
+ short_name: ATMS L1B NetCDF4
+ long_name: S-NPP and JPSS-1 ATMS L1B (NetCDF4)
+ description: >
+ Reader for the S-NPP and JPSS-1 Advanced Technology Microwave Sounder Level 1B files in NetCDF4.
+ status: Beta
+ sensors: [atms]
+ reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
+ supports_fsspec: false
+
+ data_identification_keys:
+ name:
+ required: true
+ frequency_quadruple_sideband:
+ type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyQuadrupleSideBand
+ frequency_double_sideband:
+ type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyDoubleSideBand
+ frequency_range:
+ type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyRange
+ polarization:
+ enum:
+ - QH
+ - QV
+ calibration:
+ enum:
+ - brightness_temperature
+ transitive: true
+
+file_types:
+ atms_l1b_nc:
+ file_reader: !!python/name:satpy.readers.atms_l1b_nc.AtmsL1bNCFileHandler
+ file_patterns: ['{project_name:s}.{platform_name:s}.ATMS.{start_time:%Y%m%dT%H%M}.m{duration:2d}.g{granule_number:3d}.L1B.std.{version:s}.{producer:1s}.{creation_timestamp:d}.nc']
+ antenna_temperature: antenna_temp
+
+datasets:
+
+# --- Coordinates ---
+ lat:
+ name: lat
+ file_type: atms_l1b_nc
+ standard_name: latitude
+ units: degrees_north
+
+ lon:
+ name: lon
+ file_type: atms_l1b_nc
+ standard_name: longitude
+ units: degrees_east
+
+# --- Measurement data ---
+ '1':
+ name: '1'
+ file_type: atms_l1b_nc
+ coordinates: [lon, lat]
+ frequency_range:
+ central: 23.8
+ bandwidth: 0.27
+ unit: GHz
+ polarization: QV
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+ '2':
+ name: '2'
+ file_type: atms_l1b_nc
+ coordinates: [lon, lat]
+ frequency_range:
+ central: 31.4
+ bandwidth: 0.18
+ unit: GHz
+ polarization: QV
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+ '3':
+ name: '3'
+ file_type: atms_l1b_nc
+ coordinates: [lon, lat]
+ frequency_range:
+ central: 50.3
+ bandwidth: 0.18
+ unit: GHz
+ polarization: QH
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+ '4':
+ name: '4'
+ file_type: atms_l1b_nc
+ coordinates: [lon, lat]
+ frequency_range:
+ central: 51.76
+ bandwidth: 0.4
+ unit: GHz
+ polarization: QH
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+ '5':
+ name: '5'
+ file_type: atms_l1b_nc
+ coordinates: [lon, lat]
+ frequency_range:
+ central: 52.8
+ bandwidth: 0.4
+ unit: GHz
+ polarization: QH
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+ '6':
+ name: '6'
+ file_type: atms_l1b_nc
+ coordinates: [lon, lat]
+ frequency_double_sideband:
+ central: 53.596
+ side: 0.115
+ bandwidth: 0.17
+ unit: GHz
+ polarization: QH
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+ '7':
+ name: '7'
+ file_type: atms_l1b_nc
+ coordinates: [lon, lat]
+ frequency_range:
+ central: 54.4
+ bandwidth: 0.4
+ unit: GHz
+ polarization: QH
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+ '8':
+ name: '8'
+ file_type: atms_l1b_nc
+ coordinates: [lon, lat]
+ frequency_range:
+ central: 54.94
+ bandwidth: 0.4
+ unit: GHz
+ polarization: QH
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+ '9':
+ name: '9'
+ file_type: atms_l1b_nc
+ coordinates: [lon, lat]
+ frequency_range:
+ central: 55.5
+ bandwidth: 0.33
+ unit: GHz
+ polarization: QH
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+ '10':
+ name: '10'
+ file_type: atms_l1b_nc
+ coordinates: [lon, lat]
+ frequency_range:
+ central: 57.290344
+ bandwidth: 0.33
+ unit: GHz
+ polarization: QH
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+ '11':
+ name: '11'
+ file_type: atms_l1b_nc
+ coordinates: [lon, lat]
+ frequency_double_sideband:
+ central: 57.290344
+ side: 0.217
+ bandwidth: 0.078
+ unit: GHz
+ polarization: QH
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+ '12':
+ name: '12'
+ file_type: atms_l1b_nc
+ coordinates: [lon, lat]
+ frequency_quadruple_sideband:
+ central: 57.290344
+ side: 0.3222
+ sideside: 0.048
+ bandwidth: 0.036
+ unit: GHz
+ polarization: QH
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+ '13':
+ name: '13'
+ file_type: atms_l1b_nc
+ coordinates: [lon, lat]
+ frequency_quadruple_sideband:
+ central: 57.290344
+ side: 0.3222
+ sideside: 0.022
+ bandwidth: 0.016
+ unit: GHz
+ polarization: QH
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+ '14':
+ name: '14'
+ file_type: atms_l1b_nc
+ coordinates: [lon, lat]
+ frequency_quadruple_sideband:
+ central: 57.290344
+ side: 0.3222
+ sideside: 0.010
+ bandwidth: 0.008
+ unit: GHz
+ polarization: QH
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+ '15':
+ name: '15'
+ file_type: atms_l1b_nc
+ coordinates: [lon, lat]
+ frequency_quadruple_sideband:
+ central: 57.290344
+ side: 0.3222
+ sideside: 0.0045
+ bandwidth: 0.003
+ unit: GHz
+ polarization: QH
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+ '16':
+ name: '16'
+ file_type: atms_l1b_nc
+ coordinates: [lon, lat]
+ frequency_range:
+ central: 88.2
+ bandwidth: 2.0
+ unit: GHz
+ polarization: QH
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+ '17':
+ name: '17'
+ file_type: atms_l1b_nc
+ coordinates: [lon, lat]
+ frequency_range:
+ central: 165.5
+ bandwidth: 3.0
+ unit: GHz
+ polarization: QH
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+ '18':
+ name: '18'
+ file_type: atms_l1b_nc
+ coordinates: [lon, lat]
+ frequency_double_sideband:
+ central: 183.31
+ side: 7.0
+ bandwidth: 2.0
+ unit: GHz
+ polarization: QH
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+ '19':
+ name: '19'
+ file_type: atms_l1b_nc
+ coordinates: [lon, lat]
+ frequency_double_sideband:
+ central: 183.31
+ side: 4.5
+ bandwidth: 2.0
+ unit: GHz
+ polarization: QH
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+ '20':
+ name: '20'
+ file_type: atms_l1b_nc
+ coordinates: [lon, lat]
+ frequency_double_sideband:
+ central: 183.31
+ side: 3.0
+ bandwidth: 1.0
+ unit: GHz
+ polarization: QH
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+ '21':
+ name: '21'
+ file_type: atms_l1b_nc
+ coordinates: [lon, lat]
+ frequency_double_sideband:
+ central: 183.31
+ side: 1.8
+ bandwidth: 1.0
+ unit: GHz
+ polarization: QH
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+ '22':
+ name: '22'
+ file_type: atms_l1b_nc
+ coordinates: [lon, lat]
+ frequency_double_sideband:
+ central: 183.31
+ side: 1.0
+ bandwidth: 0.5
+ unit: GHz
+ polarization: QH
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+# --- Navigation data ---
+ obs_time_tai93:
+ name: obs_time_tai93
+ standard_name: time
+ coordinates: [lon, lat]
+ file_type: atms_l1b_nc
+
+ sol_azi:
+ name: sol_azi
+ standard_name: solar_azimuth_angle
+ coordinates: [lon, lat]
+ file_type: atms_l1b_nc
+
+ sol_zen:
+ name: sol_zen
+ standard_name: solar_zenith_angle
+ coordinates: [lon, lat]
+ file_type: atms_l1b_nc
+
+ sat_azi:
+ name: sat_azi
+ standard_name: satellite_azimuth_angle
+ coordinates: [lon, lat]
+ file_type: atms_l1b_nc
+
+ sat_zen:
+ name: sat_zen
+ standard_name: satellite_zenith_angle
+ coordinates: [lon, lat]
+ file_type: atms_l1b_nc
+
+# --- Land surface data ---
+ land_frac:
+ name: land_frac
+ standard_name: land_area_fraction
+ coordinates: [lon, lat]
+ file_type: atms_l1b_nc
+
+ surf_alt:
+ name: surf_alt
+ standard_name: surface_altitude
+ coordinates: [lon, lat]
+ file_type: atms_l1b_nc
diff --git a/satpy/etc/readers/avhrr_l1b_aapp.yaml b/satpy/etc/readers/avhrr_l1b_aapp.yaml
index 3c00190a0c..e801c477b9 100644
--- a/satpy/etc/readers/avhrr_l1b_aapp.yaml
+++ b/satpy/etc/readers/avhrr_l1b_aapp.yaml
@@ -1,6 +1,10 @@
reader:
name: avhrr_l1b_aapp
+ short_name: AVHRR l1b
+ long_name: NOAA 15 to 19, Metop A to C AVHRR data in AAPP format
description: AAPP l1b Reader for AVHRR
+ status: Nominal
+ supports_fsspec: false
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
sensors: [avhrr-3,]
default_channels: [1, 2, 3a, 3b, 4, 5]
diff --git a/satpy/etc/readers/avhrr_l1b_eps.yaml b/satpy/etc/readers/avhrr_l1b_eps.yaml
index dba7c350c7..7bfa0e7160 100644
--- a/satpy/etc/readers/avhrr_l1b_eps.yaml
+++ b/satpy/etc/readers/avhrr_l1b_eps.yaml
@@ -1,6 +1,10 @@
reader:
name: avhrr_l1b_eps
+ short_name: AVHRR l1b eps
+ long_name: Metop A to C AVHRR in native level 1 format
description: EPS Reader for AVHRR
+ status: Nominal
+ supports_fsspec: false
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
sensors: [avhrr-3]
default_channels: [1, 2, 3a, 3b, 4, 5]
diff --git a/satpy/etc/readers/avhrr_l1b_gaclac.yaml b/satpy/etc/readers/avhrr_l1b_gaclac.yaml
index 4a45447084..484bed6797 100644
--- a/satpy/etc/readers/avhrr_l1b_gaclac.yaml
+++ b/satpy/etc/readers/avhrr_l1b_gaclac.yaml
@@ -1,6 +1,10 @@
reader:
name: avhrr_l1b_gaclac
+ short_name: AVHRR l1b gaclac
+ long_name: Tiros-N, NOAA 7 to 19 AVHRR data in GAC and LAC format
description: AAPP l1b Reader for AVHRR
+ status: Nominal
+ supports_fsspec: false
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
sensors: [avhrr-3, avhrr-2, avhrr-1]
@@ -178,4 +182,6 @@ file_types:
gac_lac_l1b:
file_reader: !!python/name:satpy.readers.avhrr_l1b_gaclac.GACLACFile
#NSS.GHRR.NJ.D95056.S1116.E1303.B0080506.GC
- file_patterns: ['{creation_site:3s}.{transfer_mode:4s}.{platform_id:2s}.D{start_time:%y%j.S%H%M}.E{end_time:%H%M}.B{orbit_number:05d}{end_orbit_last_digits:02d}.{station:2s}']
+ file_patterns:
+ - '{creation_site:3s}.{transfer_mode:4s}.{platform_id:2s}.D{start_time:%y%j.S%H%M}.E{end_time:%H%M}.B{orbit_number:05d}{end_orbit_last_digits:02d}.{station:2s}'
+ - '{subscription_prefix:10d}.{creation_site:3s}.{transfer_mode:4s}.{platform_id:2s}.D{start_time:%y%j.S%H%M}.E{end_time:%H%M}.B{orbit_number:05d}{end_orbit_last_digits:02d}.{station:2s}'
diff --git a/satpy/etc/readers/avhrr_l1b_hrpt.yaml b/satpy/etc/readers/avhrr_l1b_hrpt.yaml
index ff0040663b..27882a27ca 100644
--- a/satpy/etc/readers/avhrr_l1b_hrpt.yaml
+++ b/satpy/etc/readers/avhrr_l1b_hrpt.yaml
@@ -1,6 +1,10 @@
reader:
name: avhrr_l1b_hrpt
+ short_name: AVHRR l1b hrpt
+ long_name: NOAA 15 to 19 AVHRR data in raw HRPT format
description: HRPT Reader for AVHRR
+ status: Alpha
+ supports_fsspec: false
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
sensors: [avhrr-3, avhrr-2]
default_channels: [1, 2, 3a, 3b, 4, 5]
diff --git a/satpy/etc/readers/avhrr_l1c_eum_gac_fdr_nc.yaml b/satpy/etc/readers/avhrr_l1c_eum_gac_fdr_nc.yaml
index 5403e4a64f..92ac8acee2 100644
--- a/satpy/etc/readers/avhrr_l1c_eum_gac_fdr_nc.yaml
+++ b/satpy/etc/readers/avhrr_l1c_eum_gac_fdr_nc.yaml
@@ -3,6 +3,8 @@ reader:
short_name: EUMETSAT_GAC_FDR
long_name: EUMETCSAT GAC FDR NetCDF4
description: NetCDF4 reader for EUMETCSAT GAC FDR AVHRR L1c
+ status: Defunct
+ supports_fsspec: false
sensors: [avhrr-3, avhrr-2, avhrr-1]
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
@@ -202,4 +204,4 @@ datasets:
equator_crossing_time:
name: equator_crossing_time
file_type: eumetsat_gac_fdr
- nc_key: 'equator_crossing_time'
\ No newline at end of file
+ nc_key: 'equator_crossing_time'
diff --git a/satpy/etc/readers/caliop_l2_cloud.yaml b/satpy/etc/readers/caliop_l2_cloud.yaml
index fcc919c1c9..b9fc8e978a 100644
--- a/satpy/etc/readers/caliop_l2_cloud.yaml
+++ b/satpy/etc/readers/caliop_l2_cloud.yaml
@@ -1,8 +1,12 @@
reader:
- default_datasets: []
- description: CALIOP Level 2 Cloud Layer Version 3 HDF4 reader
name: caliop_l2_cloud
+ short_name: CALIOP l2
+ long_name: Callipso Caliop Level 2 Cloud Layer data (v3) in EOS-hdf4 format
+ description: CALIOP Level 2 Cloud Layer Version 3 HDF4 reader
+ status: Alpha
+ supports_fsspec: false
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
+ default_datasets: []
sensors: [caliop]
datasets:
diff --git a/satpy/etc/readers/clavrx.yaml b/satpy/etc/readers/clavrx.yaml
index c755225a66..179c376484 100644
--- a/satpy/etc/readers/clavrx.yaml
+++ b/satpy/etc/readers/clavrx.yaml
@@ -1,19 +1,30 @@
reader:
- description: CLAVR-X Reader
name: clavrx
+ short_name: CLAVR-X
+ long_name: The Clouds from AVHRR Extended (CLAVR-x)
+ description: CLAVR-X Reader
+ status: Nominal
+ supports_fsspec: false
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
- sensors: [viirs, modis, avhrr, ahi]
+ sensors: [viirs, modis, avhrr, ahi, abi]
file_types:
- level2:
- file_reader: !!python/name:satpy.readers.clavrx.CLAVRXFileHandler
-# clavrx_npp_d20170520_t2055235_e2056477_b28822.level2.hdf'
-# clavrx_H08_20180719_1300.level2.hdf
+ clavrx_hdf4:
+ # clavrx_npp_d20170520_t2055235_e2056477_b28822.level2.hdf'
+ # clavrx_H08_20180719_1300.level2.hdf
+ file_reader: !!python/name:satpy.readers.clavrx.CLAVRXHDF4FileHandler
+ file_patterns:
+ - 'clavrx_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}.level2.hdf'
+ - 'clavrx_{platform_shortname}.{start_time:%y%j.%H%M}.{resolution:s}.level2.hdf'
+ - 'clavrx_hrpt_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit}.level2.hdf'
+ - 'clavrx_{platform_shortname}_{start_time:%Y%m%d_%H%M}.level2.hdf'
+ clavrx_nc:
+ # clavrx_OR_ABI-L1b-RadF-M6C01_G16_s20211101600189.level2.nc
+ # clavrx_H08_20210322_0300_B01_FLDK_R.level2.nc
+ file_reader: !!python/name:satpy.readers.clavrx.CLAVRXNetCDFFileHandler
file_patterns:
- - 'clavrx_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}.level2.hdf'
- - 'clavrx_{platform_shortname}.{start_time:%y%j.%H%M}.{resolution:s}.level2.hdf'
- - 'clavrx_hrpt_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit}.level2.hdf'
- - 'clavrx_{platform_shortname}_{start_time:%Y%m%d_%H%M}.level2.hdf'
+ - 'clavrx_OR_{sensor}-L1b-Rad{sector}-{mode}C{channel_number}_{platform_shortname}_s{start_time:%Y%j%H%M%S%f}.level2.nc'
+ - 'clavrx_{platform_shortname}_{start_time:%Y%m%d_%H%M}_B{channel_number}_{sector}_R.level2.nc'
#datasets:
# longitude:
diff --git a/satpy/etc/readers/cmsaf-claas2_l2_nc.yaml b/satpy/etc/readers/cmsaf-claas2_l2_nc.yaml
index 320e5e8a09..07868db4f4 100644
--- a/satpy/etc/readers/cmsaf-claas2_l2_nc.yaml
+++ b/satpy/etc/readers/cmsaf-claas2_l2_nc.yaml
@@ -1,12 +1,15 @@
reader:
+ name: cmsaf-claas2_l2_nc
+ short_name: CMSAF CLAAS-2 data
+ long_name: CMSAF CLAAS-2 data for SEVIRI-derived cloud products
description: >
Reader for Spinning Enhanced Visible and Infrared Imager (SEVIRI)
L2 data as produced by the Climate Monitoring Satellite Application
Facility (CMSAF) in its CLoud property dAtAset using SEVIRI (CLAAS)
dataset, edition 2, doi:10.5676/EUM_SAF_CM/CLAAS/V002. Information on
CMSAF and its products can be found at https://www.cmsaf.eu/.
- name: cmsaf-claas2_l2_nc
- long_name: CMSAF CLAAS-2 data for SEVIRI-derived cloud products
+ status: Beta
+ supports_fsspec: false
sensors: [seviri]
doi: doi:10.5676/EUM_SAF_CM/CLAAS/V002.
default_channels: []
diff --git a/satpy/etc/readers/electrol_hrit.yaml b/satpy/etc/readers/electrol_hrit.yaml
index 316194478f..27e0040fe7 100644
--- a/satpy/etc/readers/electrol_hrit.yaml
+++ b/satpy/etc/readers/electrol_hrit.yaml
@@ -1,6 +1,10 @@
reader:
- description: MSG HRIT Reader
name: electrol_hrit
+ short_name: Electro-L N2
+ long_name: Electro-L N2 MSU-GS data in HRIT format
+ description: Reader for Electro-L N2 MSU-GS HRIT data
+ status: Nominal
+ supports_fsspec: false
sensors: [msu-gs]
default_channels: []
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
diff --git a/satpy/etc/readers/fci_l1c_fdhsi.yaml b/satpy/etc/readers/fci_l1c_fdhsi.yaml
deleted file mode 100644
index c3c243c4ff..0000000000
--- a/satpy/etc/readers/fci_l1c_fdhsi.yaml
+++ /dev/null
@@ -1,390 +0,0 @@
-reader:
- name: fci_l1c_fdhsi
- short_name: FCI L1C FDHSI
- long_name: MTG FCI Level 1C FDHSI
- description: >
- Reader for FCI FDSHI data in NetCDF4 format.
- Used to read Meteosat Third Generation (MTG) Flexible
- Combined Imager (FCI) Full Disk High Spectral Imagery (FDHSI) data.
- reader: !!python/name:satpy.readers.yaml_reader.GEOSegmentYAMLReader
- sensors: [fci]
-
-datasets:
-
- vis_04:
- name: vis_04
- sensor: fci
- wavelength: [0.384, 0.444, 0.504]
- resolution: 1000
- calibration:
- counts:
- standard_name: counts
- units: "count"
- reflectance:
- standard_name: toa_bidirectional_reflectance
- units: "%"
- radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
- file_type: fci_l1c_fdhsi
-
- vis_05:
- name: vis_05
- sensor: fci
- wavelength: [0.470, 0.510, 0.550]
- resolution: 1000
- calibration:
- counts:
- standard_name: counts
- units: "count"
- reflectance:
- standard_name: toa_bidirectional_reflectance
- units: "%"
- radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
- file_type: fci_l1c_fdhsi
-
- vis_06:
- name: vis_06
- sensor: fci
- wavelength: [0.590, 0.640, 0.690]
- resolution: 1000
- calibration:
- counts:
- standard_name: counts
- units: "count"
- reflectance:
- standard_name: toa_bidirectional_reflectance
- units: "%"
- radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
- file_type: fci_l1c_fdhsi
-
- vis_08:
- name: vis_08
- sensor: fci
- wavelength: [0.815, 0.865, 0.915]
- resolution: 1000
- calibration:
- counts:
- standard_name: counts
- units: "count"
- reflectance:
- standard_name: toa_bidirectional_reflectance
- units: "%"
- radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
- file_type: fci_l1c_fdhsi
-
- vis_09:
- name: vis_09
- sensor: fci
- wavelength: [0.894, 0.914, 0.934]
- resolution: 1000
- calibration:
- counts:
- standard_name: counts
- units: "count"
- reflectance:
- standard_name: toa_bidirectional_reflectance
- units: "%"
- radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
- file_type: fci_l1c_fdhsi
-
- nir_13:
- name: nir_13
- sensor: fci
- wavelength: [1.350, 1.380, 1.410]
- resolution: 1000
- calibration:
- counts:
- standard_name: counts
- units: "count"
- reflectance:
- standard_name: toa_bidirectional_reflectance
- units: "%"
- radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
- file_type: fci_l1c_fdhsi
-
- nir_16:
- name: nir_16
- sensor: fci
- wavelength: [1.560, 1.610, 1.660]
- resolution: 1000
- calibration:
- counts:
- standard_name: counts
- units: "count"
- reflectance:
- standard_name: toa_bidirectional_reflectance
- units: "%"
- radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
- file_type: fci_l1c_fdhsi
-
- nir_22:
- name: nir_22
- sensor: fci
- wavelength: [2.200, 2.250, 2.300]
- resolution: 1000
- calibration:
- counts:
- standard_name: counts
- units: "count"
- reflectance:
- standard_name: toa_bidirectional_reflectance
- units: "%"
- radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
- file_type: fci_l1c_fdhsi
-
- ir_38:
- name: ir_38
- sensor: fci
- wavelength: [3.400, 3.800, 4.200]
- resolution: 2000
- calibration:
- counts:
- standard_name: counts
- units: "count"
- brightness_temperature:
- standard_name: toa_brightness_temperature
- units: "K"
- radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
- file_type: fci_l1c_fdhsi
-
- wv_63:
- name: wv_63
- sensor: fci
- wavelength: [5.300, 6.300, 7.300]
- resolution: 2000
- calibration:
- counts:
- standard_name: counts
- units: "count"
- brightness_temperature:
- standard_name: toa_brightness_temperature
- units: "K"
- radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
- file_type: fci_l1c_fdhsi
-
- wv_73:
- name: wv_73
- sensor: fci
- wavelength: [6.850, 7.350, 7.850]
- resolution: 2000
- calibration:
- counts:
- standard_name: counts
- units: "count"
- brightness_temperature:
- standard_name: toa_brightness_temperature
- units: "K"
- radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
- file_type: fci_l1c_fdhsi
-
- ir_87:
- name: ir_87
- sensor: fci
- wavelength: [8.300, 8.700, 9.100]
- resolution: 2000
- calibration:
- counts:
- standard_name: counts
- units: "count"
- brightness_temperature:
- standard_name: toa_brightness_temperature
- units: "K"
- radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
- file_type: fci_l1c_fdhsi
-
- ir_97:
- name: ir_97
- sensor: fci
- wavelength: [9.360, 9.660, 9.960]
- resolution: 2000
- calibration:
- counts:
- standard_name: counts
- units: "count"
- brightness_temperature:
- standard_name: toa_brightness_temperature
- units: "K"
- radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
- file_type: fci_l1c_fdhsi
-
- ir_105:
- name: ir_105
- sensor: fci
- wavelength: [9.800, 10.500, 11.200]
- resolution: 2000
- calibration:
- counts:
- standard_name: counts
- units: "count"
- brightness_temperature:
- standard_name: toa_brightness_temperature
- units: "K"
- radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
- file_type: fci_l1c_fdhsi
-
- ir_123:
- name: ir_123
- sensor: fci
- wavelength: [11.800, 12.300, 12.800]
- resolution: 2000
- calibration:
- counts:
- standard_name: counts
- units: "count"
- brightness_temperature:
- standard_name: toa_brightness_temperature
- units: "K"
- radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
- file_type: fci_l1c_fdhsi
-
- ir_133:
- name: ir_133
- sensor: fci
- wavelength: [12.700, 13.300, 13.900]
- resolution: 2000
- calibration:
- counts:
- standard_name: counts
- units: "count"
- brightness_temperature:
- standard_name: toa_brightness_temperature
- units: "K"
- radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
- file_type: fci_l1c_fdhsi
-
- vis_04_pixel_quality:
- name: vis_04_pixel_quality
- sensor: fci
- resolution: 1000
- file_type: fci_l1c_fdhsi
-
- vis_05_pixel_quality:
- name: vis_05_pixel_quality
- sensor: fci
- resolution: 1000
- file_type: fci_l1c_fdhsi
-
- vis_06_pixel_quality:
- name: vis_06_pixel_quality
- sensor: fci
- resolution: 1000
- file_type: fci_l1c_fdhsi
-
- vis_08_pixel_quality:
- name: vis_08_pixel_quality
- sensor: fci
- resolution: 1000
- file_type: fci_l1c_fdhsi
-
- vis_09_pixel_quality:
- name: vis_09_pixel_quality
- sensor: fci
- resolution: 1000
- file_type: fci_l1c_fdhsi
-
- nir_13_pixel_quality:
- name: nir_13_pixel_quality
- sensor: fci
- resolution: 1000
- file_type: fci_l1c_fdhsi
-
- nir_16_pixel_quality:
- name: nir_16_pixel_quality
- sensor: fci
- resolution: 1000
- file_type: fci_l1c_fdhsi
-
- nir_22_pixel_quality:
- name: nir_22_pixel_quality
- sensor: fci
- resolution: 1000
- file_type: fci_l1c_fdhsi
-
- ir_38_pixel_quality:
- name: ir_38_pixel_quality
- sensor: fci
- resolution: 2000
- file_type: fci_l1c_fdhsi
-
- wv_63_pixel_quality:
- name: wv_63_pixel_quality
- sensor: fci
- resolution: 2000
- file_type: fci_l1c_fdhsi
-
- wv_73_pixel_quality:
- name: wv_73_pixel_quality
- sensor: fci
- resolution: 2000
- file_type: fci_l1c_fdhsi
-
- ir_87_pixel_quality:
- name: ir_87_pixel_quality
- sensor: fci
- resolution: 2000
- file_type: fci_l1c_fdhsi
-
- ir_97_pixel_quality:
- name: ir_97_pixel_quality
- sensor: fci
- resolution: 2000
- file_type: fci_l1c_fdhsi
-
- ir_105_pixel_quality:
- name: ir_105_pixel_quality
- sensor: fci
- resolution: 2000
- file_type: fci_l1c_fdhsi
-
- ir_123_pixel_quality:
- name: ir_123_pixel_quality
- sensor: fci
- resolution: 2000
- file_type: fci_l1c_fdhsi
-
- ir_133_pixel_quality:
- name: ir_133_pixel_quality
- sensor: fci
- resolution: 2000
- file_type: fci_l1c_fdhsi
-
-# Source: MTG FCI L1 Product User Guide [FCIL1PUG]
-# ftp://ftp.eumetsat.int/pub/OPS/out/test-data/Test-data-for-External-Users/MTG/MTG_FCI_L1C_Enhanced_and_Non-Nominal_Test_Data/PDF_MTG_FCI_L1_PUG.pdf
-# and Example Products for Pytroll Workshop Package Description,
-# EUM/MTG/DOC/19/1079228
-file_types:
- fci_l1c_fdhsi:
- file_reader: !!python/name:satpy.readers.fci_l1c_fdhsi.FCIFDHSIFileHandler
- file_patterns: ['{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-{processing_level}-{type}-{subtype}-{coverage}-{subsetting}-{component1}-BODY-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc']
- expected_segments: 40
diff --git a/satpy/etc/readers/fci_l1c_nc.yaml b/satpy/etc/readers/fci_l1c_nc.yaml
new file mode 100644
index 0000000000..75e396b56d
--- /dev/null
+++ b/satpy/etc/readers/fci_l1c_nc.yaml
@@ -0,0 +1,1575 @@
+reader:
+ name: fci_l1c_nc
+ short_name: FCI L1c NC
+ long_name: MTG FCI Level-1c NetCDF
+ description: >
+ Reader for FCI L1c data in NetCDF4 format.
+ Used to read Meteosat Third Generation (MTG) Flexible
+ Combined Imager (FCI) L1c data.
+ status: Beta for FDHSI, HRFI not supported yet
+ supports_fsspec: false
+ reader: !!python/name:satpy.readers.yaml_reader.GEOVariableSegmentYAMLReader
+ sensors: [ fci ]
+
+# Source: MTG FCI L1 Product User Guide [FCIL1PUG]
+# https://www.eumetsat.int/media/45923
+file_types:
+ fci_l1c_fdhsi:
+ file_reader: !!python/name:satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler
+ file_patterns: [ '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-1C-RRAD-FDHSI-{coverage}-{subsetting}-{component1}-BODY-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc' ]
+ expected_segments: 40
+
+
+datasets:
+ vis_04:
+ name: vis_04
+ sensor: fci
+ wavelength: [ 0.384, 0.444, 0.504 ]
+ resolution: 1000
+ calibration:
+ counts:
+ standard_name: counts
+ units: "count"
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ file_type: fci_l1c_fdhsi
+
+ vis_05:
+ name: vis_05
+ sensor: fci
+ wavelength: [0.470, 0.510, 0.550]
+ resolution: 1000
+ calibration:
+ counts:
+ standard_name: counts
+ units: "count"
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ file_type: fci_l1c_fdhsi
+
+ vis_06:
+ name: vis_06
+ sensor: fci
+ wavelength: [0.590, 0.640, 0.690]
+ resolution: 1000
+ calibration:
+ counts:
+ standard_name: counts
+ units: "count"
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ file_type: fci_l1c_fdhsi
+
+ vis_08:
+ name: vis_08
+ sensor: fci
+ wavelength: [0.815, 0.865, 0.915]
+ resolution: 1000
+ calibration:
+ counts:
+ standard_name: counts
+ units: "count"
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ file_type: fci_l1c_fdhsi
+
+ vis_09:
+ name: vis_09
+ sensor: fci
+ wavelength: [0.894, 0.914, 0.934]
+ resolution: 1000
+ calibration:
+ counts:
+ standard_name: counts
+ units: "count"
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ file_type: fci_l1c_fdhsi
+
+ nir_13:
+ name: nir_13
+ sensor: fci
+ wavelength: [1.350, 1.380, 1.410]
+ resolution: 1000
+ calibration:
+ counts:
+ standard_name: counts
+ units: "count"
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ file_type: fci_l1c_fdhsi
+
+ nir_16:
+ name: nir_16
+ sensor: fci
+ wavelength: [1.560, 1.610, 1.660]
+ resolution: 1000
+ calibration:
+ counts:
+ standard_name: counts
+ units: "count"
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ file_type: fci_l1c_fdhsi
+
+ nir_22:
+ name: nir_22
+ sensor: fci
+ wavelength: [2.200, 2.250, 2.300]
+ resolution: 1000
+ calibration:
+ counts:
+ standard_name: counts
+ units: "count"
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ file_type: fci_l1c_fdhsi
+
+ ir_38:
+ name: ir_38
+ sensor: fci
+ wavelength: [3.400, 3.800, 4.200]
+ resolution: 2000
+ calibration:
+ counts:
+ standard_name: counts
+ units: "count"
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: "K"
+ file_type: fci_l1c_fdhsi
+
+ wv_63:
+ name: wv_63
+ sensor: fci
+ wavelength: [5.300, 6.300, 7.300]
+ resolution: 2000
+ calibration:
+ counts:
+ standard_name: counts
+ units: "count"
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: "K"
+ file_type: fci_l1c_fdhsi
+
+ wv_73:
+ name: wv_73
+ sensor: fci
+ wavelength: [6.850, 7.350, 7.850]
+ resolution: 2000
+ calibration:
+ counts:
+ standard_name: counts
+ units: "count"
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: "K"
+ file_type: fci_l1c_fdhsi
+
+ ir_87:
+ name: ir_87
+ sensor: fci
+ wavelength: [8.300, 8.700, 9.100]
+ resolution: 2000
+ calibration:
+ counts:
+ standard_name: counts
+ units: "count"
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: "K"
+ file_type: fci_l1c_fdhsi
+
+ ir_97:
+ name: ir_97
+ sensor: fci
+ wavelength: [9.360, 9.660, 9.960]
+ resolution: 2000
+ calibration:
+ counts:
+ standard_name: counts
+ units: "count"
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: "K"
+ file_type: fci_l1c_fdhsi
+
+ ir_105:
+ name: ir_105
+ sensor: fci
+ wavelength: [9.800, 10.500, 11.200]
+ resolution: 2000
+ calibration:
+ counts:
+ standard_name: counts
+ units: "count"
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: "K"
+ file_type: fci_l1c_fdhsi
+
+ ir_123:
+ name: ir_123
+ sensor: fci
+ wavelength: [11.800, 12.300, 12.800]
+ resolution: 2000
+ calibration:
+ counts:
+ standard_name: counts
+ units: "count"
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: "K"
+ file_type: fci_l1c_fdhsi
+
+ ir_133:
+ name: ir_133
+ sensor: fci
+ wavelength: [12.700, 13.300, 13.900]
+ resolution: 2000
+ calibration:
+ counts:
+ standard_name: counts
+ units: "count"
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: "K"
+ file_type: fci_l1c_fdhsi
+
+ vis_04_pixel_quality:
+ name: vis_04_pixel_quality
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_05_pixel_quality:
+ name: vis_05_pixel_quality
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_06_pixel_quality:
+ name: vis_06_pixel_quality
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_08_pixel_quality:
+ name: vis_08_pixel_quality
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_09_pixel_quality:
+ name: vis_09_pixel_quality
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ nir_13_pixel_quality:
+ name: nir_13_pixel_quality
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ nir_16_pixel_quality:
+ name: nir_16_pixel_quality
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ nir_22_pixel_quality:
+ name: nir_22_pixel_quality
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ ir_38_pixel_quality:
+ name: ir_38_pixel_quality
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ wv_63_pixel_quality:
+ name: wv_63_pixel_quality
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ wv_73_pixel_quality:
+ name: wv_73_pixel_quality
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_87_pixel_quality:
+ name: ir_87_pixel_quality
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_97_pixel_quality:
+ name: ir_97_pixel_quality
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_105_pixel_quality:
+ name: ir_105_pixel_quality
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_123_pixel_quality:
+ name: ir_123_pixel_quality
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_133_pixel_quality:
+ name: ir_133_pixel_quality
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ vis_04_index_map:
+ name: vis_04_index_map
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_05_index_map:
+ name: vis_05_index_map
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_06_index_map:
+ name: vis_06_index_map
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_08_index_map:
+ name: vis_08_index_map
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_09_index_map:
+ name: vis_09_index_map
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ nir_13_index_map:
+ name: nir_13_index_map
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ nir_16_index_map:
+ name: nir_16_index_map
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ nir_22_index_map:
+ name: nir_22_index_map
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ ir_38_index_map:
+ name: ir_38_index_map
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ wv_63_index_map:
+ name: wv_63_index_map
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ wv_73_index_map:
+ name: wv_73_index_map
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_87_index_map:
+ name: ir_87_index_map
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_97_index_map:
+ name: ir_97_index_map
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_105_index_map:
+ name: ir_105_index_map
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_123_index_map:
+ name: ir_123_index_map
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_133_index_map:
+ name: ir_133_index_map
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ vis_04_time:
+ name: vis_04_time
+ units: s
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_05_time:
+ name: vis_05_time
+ units: s
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_06_time:
+ name: vis_06_time
+ units: s
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_08_time:
+ name: vis_08_time
+ units: s
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_09_time:
+ name: vis_09_time
+ units: s
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ nir_13_time:
+ name: nir_13_time
+ units: s
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ nir_16_time:
+ name: nir_16_time
+ units: s
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ nir_22_time:
+ name: nir_22_time
+ units: s
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ ir_38_time:
+ name: ir_38_time
+ units: s
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ wv_63_time:
+ name: wv_63_time
+ units: s
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ wv_73_time:
+ name: wv_73_time
+ units: s
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_87_time:
+ name: ir_87_time
+ units: s
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_97_time:
+ name: ir_97_time
+ units: s
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_105_time:
+ name: ir_105_time
+ units: s
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_123_time:
+ name: ir_123_time
+ units: s
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_133_time:
+ name: ir_133_time
+ units: s
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ vis_04_swath_direction:
+ name: vis_04_swath_direction
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_05_swath_direction:
+ name: vis_05_swath_direction
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_06_swath_direction:
+ name: vis_06_swath_direction
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_08_swath_direction:
+ name: vis_08_swath_direction
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_09_swath_direction:
+ name: vis_09_swath_direction
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ nir_13_swath_direction:
+ name: nir_13_swath_direction
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ nir_16_swath_direction:
+ name: nir_16_swath_direction
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ nir_22_swath_direction:
+ name: nir_22_swath_direction
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ ir_38_swath_direction:
+ name: ir_38_swath_direction
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ wv_63_swath_direction:
+ name: wv_63_swath_direction
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ wv_73_swath_direction:
+ name: wv_73_swath_direction
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_87_swath_direction:
+ name: ir_87_swath_direction
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_97_swath_direction:
+ name: ir_97_swath_direction
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_105_swath_direction:
+ name: ir_105_swath_direction
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_123_swath_direction:
+ name: ir_123_swath_direction
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_133_swath_direction:
+ name: ir_133_swath_direction
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ vis_04_swath_number:
+ name: vis_04_swath_number
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_05_swath_number:
+ name: vis_05_swath_number
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_06_swath_number:
+ name: vis_06_swath_number
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_08_swath_number:
+ name: vis_08_swath_number
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_09_swath_number:
+ name: vis_09_swath_number
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ nir_13_swath_number:
+ name: nir_13_swath_number
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ nir_16_swath_number:
+ name: nir_16_swath_number
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ nir_22_swath_number:
+ name: nir_22_swath_number
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ ir_38_swath_number:
+ name: ir_38_swath_number
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ wv_63_swath_number:
+ name: wv_63_swath_number
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ wv_73_swath_number:
+ name: wv_73_swath_number
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_87_swath_number:
+ name: ir_87_swath_number
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_97_swath_number:
+ name: ir_97_swath_number
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_105_swath_number:
+ name: ir_105_swath_number
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_123_swath_number:
+ name: ir_123_swath_number
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_133_swath_number:
+ name: ir_133_swath_number
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ vis_04_subsatellite_latitude:
+ name: vis_04_subsatellite_latitude
+ units: deg
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_05_subsatellite_latitude:
+ name: vis_05_subsatellite_latitude
+ units: deg
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_06_subsatellite_latitude:
+ name: vis_06_subsatellite_latitude
+ units: deg
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_08_subsatellite_latitude:
+ name: vis_08_subsatellite_latitude
+ units: deg
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_09_subsatellite_latitude:
+ name: vis_09_subsatellite_latitude
+ units: deg
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ nir_13_subsatellite_latitude:
+ name: nir_13_subsatellite_latitude
+ units: deg
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ nir_16_subsatellite_latitude:
+ name: nir_16_subsatellite_latitude
+ units: deg
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ nir_22_subsatellite_latitude:
+ name: nir_22_subsatellite_latitude
+ units: deg
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ ir_38_subsatellite_latitude:
+ name: ir_38_subsatellite_latitude
+ units: deg
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ wv_63_subsatellite_latitude:
+ name: wv_63_subsatellite_latitude
+ units: deg
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ wv_73_subsatellite_latitude:
+ name: wv_73_subsatellite_latitude
+ units: deg
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_87_subsatellite_latitude:
+ name: ir_87_subsatellite_latitude
+ units: deg
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_97_subsatellite_latitude:
+ name: ir_97_subsatellite_latitude
+ units: deg
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_105_subsatellite_latitude:
+ name: ir_105_subsatellite_latitude
+ units: deg
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_123_subsatellite_latitude:
+ name: ir_123_subsatellite_latitude
+ units: deg
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_133_subsatellite_latitude:
+ name: ir_133_subsatellite_latitude
+ units: deg
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ vis_04_subsatellite_longitude:
+ name: vis_04_subsatellite_longitude
+ units: deg
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_05_subsatellite_longitude:
+ name: vis_05_subsatellite_longitude
+ units: deg
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_06_subsatellite_longitude:
+ name: vis_06_subsatellite_longitude
+ units: deg
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_08_subsatellite_longitude:
+ name: vis_08_subsatellite_longitude
+ units: deg
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_09_subsatellite_longitude:
+ name: vis_09_subsatellite_longitude
+ units: deg
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ nir_13_subsatellite_longitude:
+ name: nir_13_subsatellite_longitude
+ units: deg
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ nir_16_subsatellite_longitude:
+ name: nir_16_subsatellite_longitude
+ units: deg
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ nir_22_subsatellite_longitude:
+ name: nir_22_subsatellite_longitude
+ units: deg
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ ir_38_subsatellite_longitude:
+ name: ir_38_subsatellite_longitude
+ units: deg
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ wv_63_subsatellite_longitude:
+ name: wv_63_subsatellite_longitude
+ units: deg
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ wv_73_subsatellite_longitude:
+ name: wv_73_subsatellite_longitude
+ units: deg
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_87_subsatellite_longitude:
+ name: ir_87_subsatellite_longitude
+ units: deg
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_97_subsatellite_longitude:
+ name: ir_97_subsatellite_longitude
+ units: deg
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_105_subsatellite_longitude:
+ name: ir_105_subsatellite_longitude
+ units: deg
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_123_subsatellite_longitude:
+ name: ir_123_subsatellite_longitude
+ units: deg
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_133_subsatellite_longitude:
+ name: ir_133_subsatellite_longitude
+ units: deg
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ vis_04_subsolar_latitude:
+ name: vis_04_subsolar_latitude
+ units: deg
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_05_subsolar_latitude:
+ name: vis_05_subsolar_latitude
+ units: deg
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_06_subsolar_latitude:
+ name: vis_06_subsolar_latitude
+ units: deg
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_08_subsolar_latitude:
+ name: vis_08_subsolar_latitude
+ units: deg
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_09_subsolar_latitude:
+ name: vis_09_subsolar_latitude
+ units: deg
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ nir_13_subsolar_latitude:
+ name: nir_13_subsolar_latitude
+ units: deg
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ nir_16_subsolar_latitude:
+ name: nir_16_subsolar_latitude
+ units: deg
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ nir_22_subsolar_latitude:
+ name: nir_22_subsolar_latitude
+ units: deg
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ ir_38_subsolar_latitude:
+ name: ir_38_subsolar_latitude
+ units: deg
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ wv_63_subsolar_latitude:
+ name: wv_63_subsolar_latitude
+ units: deg
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ wv_73_subsolar_latitude:
+ name: wv_73_subsolar_latitude
+ units: deg
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_87_subsolar_latitude:
+ name: ir_87_subsolar_latitude
+ units: deg
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_97_subsolar_latitude:
+ name: ir_97_subsolar_latitude
+ units: deg
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_105_subsolar_latitude:
+ name: ir_105_subsolar_latitude
+ units: deg
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_123_subsolar_latitude:
+ name: ir_123_subsolar_latitude
+ units: deg
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_133_subsolar_latitude:
+ name: ir_133_subsolar_latitude
+ units: deg
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ vis_04_subsolar_longitude:
+ name: vis_04_subsolar_longitude
+ units: deg
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_05_subsolar_longitude:
+ name: vis_05_subsolar_longitude
+ units: deg
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_06_subsolar_longitude:
+ name: vis_06_subsolar_longitude
+ units: deg
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_08_subsolar_longitude:
+ name: vis_08_subsolar_longitude
+ units: deg
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_09_subsolar_longitude:
+ name: vis_09_subsolar_longitude
+ units: deg
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ nir_13_subsolar_longitude:
+ name: nir_13_subsolar_longitude
+ units: deg
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ nir_16_subsolar_longitude:
+ name: nir_16_subsolar_longitude
+ units: deg
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ nir_22_subsolar_longitude:
+ name: nir_22_subsolar_longitude
+ units: deg
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ ir_38_subsolar_longitude:
+ name: ir_38_subsolar_longitude
+ units: deg
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ wv_63_subsolar_longitude:
+ name: wv_63_subsolar_longitude
+ units: deg
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ wv_73_subsolar_longitude:
+ name: wv_73_subsolar_longitude
+ units: deg
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_87_subsolar_longitude:
+ name: ir_87_subsolar_longitude
+ units: deg
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_97_subsolar_longitude:
+ name: ir_97_subsolar_longitude
+ units: deg
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_105_subsolar_longitude:
+ name: ir_105_subsolar_longitude
+ units: deg
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_123_subsolar_longitude:
+ name: ir_123_subsolar_longitude
+ units: deg
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_133_subsolar_longitude:
+ name: ir_133_subsolar_longitude
+ units: deg
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+
+ vis_04_platform_altitude:
+ name: vis_04_platform_altitude
+ units: m
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_05_platform_altitude:
+ name: vis_05_platform_altitude
+ units: m
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_06_platform_altitude:
+ name: vis_06_platform_altitude
+ units: m
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_08_platform_altitude:
+ name: vis_08_platform_altitude
+ units: m
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_09_platform_altitude:
+ name: vis_09_platform_altitude
+ units: m
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ nir_13_platform_altitude:
+ name: nir_13_platform_altitude
+ units: m
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ nir_16_platform_altitude:
+ name: nir_16_platform_altitude
+ units: m
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ nir_22_platform_altitude:
+ name: nir_22_platform_altitude
+ units: m
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ ir_38_platform_altitude:
+ name: ir_38_platform_altitude
+ units: m
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ wv_63_platform_altitude:
+ name: wv_63_platform_altitude
+ units: m
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ wv_73_platform_altitude:
+ name: wv_73_platform_altitude
+ units: m
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_87_platform_altitude:
+ name: ir_87_platform_altitude
+ units: m
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_97_platform_altitude:
+ name: ir_97_platform_altitude
+ units: m
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_105_platform_altitude:
+ name: ir_105_platform_altitude
+ units: m
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_123_platform_altitude:
+ name: ir_123_platform_altitude
+ units: m
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_133_platform_altitude:
+ name: ir_133_platform_altitude
+ units: m
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ vis_04_earth_sun_distance:
+ name: vis_04_earth_sun_distance
+ units: km
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_05_earth_sun_distance:
+ name: vis_05_earth_sun_distance
+ units: km
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_06_earth_sun_distance:
+ name: vis_06_earth_sun_distance
+ units: km
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_08_earth_sun_distance:
+ name: vis_08_earth_sun_distance
+ units: km
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_09_earth_sun_distance:
+ name: vis_09_earth_sun_distance
+ units: km
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ nir_13_earth_sun_distance:
+ name: nir_13_earth_sun_distance
+ units: km
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ nir_16_earth_sun_distance:
+ name: nir_16_earth_sun_distance
+ units: km
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ nir_22_earth_sun_distance:
+ name: nir_22_earth_sun_distance
+ units: km
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ ir_38_earth_sun_distance:
+ name: ir_38_earth_sun_distance
+ units: km
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ wv_63_earth_sun_distance:
+ name: wv_63_earth_sun_distance
+ units: km
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ wv_73_earth_sun_distance:
+ name: wv_73_earth_sun_distance
+ units: km
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_87_earth_sun_distance:
+ name: ir_87_earth_sun_distance
+ units: km
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_97_earth_sun_distance:
+ name: ir_97_earth_sun_distance
+ units: km
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_105_earth_sun_distance:
+ name: ir_105_earth_sun_distance
+ units: km
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_123_earth_sun_distance:
+ name: ir_123_earth_sun_distance
+ units: km
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_133_earth_sun_distance:
+ name: ir_133_earth_sun_distance
+ units: km
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ vis_04_sun_satellite_distance:
+ name: vis_04_sun_satellite_distance
+ units: km
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_05_sun_satellite_distance:
+ name: vis_05_sun_satellite_distance
+ units: km
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_06_sun_satellite_distance:
+ name: vis_06_sun_satellite_distance
+ units: km
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_08_sun_satellite_distance:
+ name: vis_08_sun_satellite_distance
+ units: km
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ vis_09_sun_satellite_distance:
+ name: vis_09_sun_satellite_distance
+ units: km
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ nir_13_sun_satellite_distance:
+ name: nir_13_sun_satellite_distance
+ units: km
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ nir_16_sun_satellite_distance:
+ name: nir_16_sun_satellite_distance
+ units: km
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ nir_22_sun_satellite_distance:
+ name: nir_22_sun_satellite_distance
+ units: km
+ sensor: fci
+ resolution: 1000
+ file_type: fci_l1c_fdhsi
+
+ ir_38_sun_satellite_distance:
+ name: ir_38_sun_satellite_distance
+ units: km
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ wv_63_sun_satellite_distance:
+ name: wv_63_sun_satellite_distance
+ units: km
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ wv_73_sun_satellite_distance:
+ name: wv_73_sun_satellite_distance
+ units: km
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_87_sun_satellite_distance:
+ name: ir_87_sun_satellite_distance
+ units: km
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_97_sun_satellite_distance:
+ name: ir_97_sun_satellite_distance
+ units: km
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_105_sun_satellite_distance:
+ name: ir_105_sun_satellite_distance
+ units: km
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_123_sun_satellite_distance:
+ name: ir_123_sun_satellite_distance
+ units: km
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
+
+ ir_133_sun_satellite_distance:
+ name: ir_133_sun_satellite_distance
+ units: km
+ sensor: fci
+ resolution: 2000
+ file_type: fci_l1c_fdhsi
diff --git a/satpy/etc/readers/fci_l2_nc.yaml b/satpy/etc/readers/fci_l2_nc.yaml
index ed7e8a9325..20d9935682 100644
--- a/satpy/etc/readers/fci_l2_nc.yaml
+++ b/satpy/etc/readers/fci_l2_nc.yaml
@@ -1,1050 +1,2736 @@
reader:
name: fci_l2_nc
short_name: FCI L2 NetCDF4
- long_name: MTG FCI L2 (NetCDF4)
+ long_name: MTG FCI L2 data in netCDF4 format
description: Reader for EUMETSAT MTG FCI L2 files in NetCDF4 format.
+ status: Alpha
+ supports_fsspec: false
sensors: [fci]
- reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
+ reader: !!python/name:satpy.readers.yaml_reader.GEOFlippableFileYAMLReader
file_types:
- # Filename examples
- # FCI_SIM_OCA_2L_2KM_{creation_time:%Y%m%d}_1700.nc
- # W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-2-ASR--FD------NC4E_C_EUMT_20201105031219_L2PF_DEV_20170410171000_20170410172000_N__T_0104_0000.nc
+ nc_fci_clm:
+ file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCFileHandler
+ file_patterns:
+ - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-CLM-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc'
- nc_fci_oca:
+ nc_fci_ct:
file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCFileHandler
- file_patterns: ['FCI_SIM_OCA_2L_2KM_{creation_time:%Y%m%d}_1700.nc',
- 'W_XX-EUMETSAT-{reception_location},{instrument},{long_platform_id}+{processing_location}-{level}-OCA--{temp_str}_C_EUMT_{creation_time:%Y%m%d%H%M%S}_L2PF_{env}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_N__T_{rep_cycle_in_day}_{rep_cycle_count}.nc']
+ file_patterns:
+ - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-CT-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc'
- nc_fci_clm:
+ nc_fci_ctth:
file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCFileHandler
- file_patterns: ['FCI_SIM_CLM_2KM_{creation_time:%Y%m%d}_1700.nc',
- 'W_XX-EUMETSAT-{reception_location},{instrument},{long_platform_id}+{processing_location}-{level}-CLM--{temp_str}_C_EUMT_{creation_time:%Y%m%d%H%M%S}_L2PF_{env}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_N__T_{rep_cycle_in_day}_{rep_cycle_count}.nc']
+ file_patterns:
+ - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-CTTH-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc'
- nc_fci_test_clm:
+ nc_fci_oca:
file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCFileHandler
- file_patterns: [ 'W_XX-EUMETSAT-{reception_location},{instrument},{long_platform_id}+{processing_location}-{level}-CLMTest-{temp_str}_C_EUMT_{creation_time:%Y%m%d%H%M%S}_L2PF_{env}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_N__T_{rep_cycle_in_day}_{rep_cycle_count}.nc' ]
+ file_patterns:
+ - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-OCA-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc'
- nc_fci_ct:
+ nc_fci_fir:
file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCFileHandler
- file_patterns: ['W_XX-EUMETSAT-{reception_location},{instrument},{long_platform_id}+{processing_location}-{level}-CT--{temp_str}_C_EUMT_{creation_time:%Y%m%d%H%M%S}_L2PF_{env}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_N__T_{rep_cycle_in_day}_{rep_cycle_count}.nc']
+ file_patterns:
+ - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-FIR-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc'
- nc_fci_cloud:
+ nc_fci_olr:
file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCFileHandler
- file_patterns: ['W_XX-EUMETSAT-{reception_location},{instrument},{long_platform_id}+{processing_location}-{level}-CTTH--{temp_str}_C_EUMT_{creation_time:%Y%m%d%H%M%S}_L2PF_{env}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_N__T_{rep_cycle_in_day}_{rep_cycle_count}.nc' ]
+ file_patterns:
+ - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-OLR-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc'
- nc_fci_asr:
- file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCSegmentFileHandler
- file_patterns: [ "W_XX-EUMETSAT-{reception_location},{instrument},{long_platform_id}+{processing_location}-{level}-ASR--{temp_str}_C_EUMT_{creation_time:%Y%m%d%H%M%S}_L2PF_{env}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_N__T_{rep_cycle_in_day}_{rep_cycle_count}.nc"]
+ nc_fci_crm:
+ file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCFileHandler
+ file_patterns:
+ - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-CRM-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc'
nc_fci_gii:
file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCSegmentFileHandler
- file_patterns: ["W_XX-EUMETSAT-{reception_location},{instrument},{long_platform_id}+{processing_location}-{level}-GII--{temp_str}_C_EUMT_{creation_time:%Y%m%d%H%M%S}_L2PF_{env}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_N__T_{rep_cycle_in_day}_{rep_cycle_count}.nc"]
+ file_patterns:
+ - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-GII-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc'
nc_fci_toz:
file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCSegmentFileHandler
- file_patterns: [ "W_XX-EUMETSAT-{reception_location},{instrument},{long_platform_id}+{processing_location}-{level}-TOZ--{temp_str}_C_EUMT_{creation_time:%Y%m%d%H%M%S}_L2PF_{env}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_N__T_{rep_cycle_in_day}_{rep_cycle_count}.nc" ]
+ file_patterns:
+ - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-TOZ-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc'
+
+ nc_fci_test_clm:
+ file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCFileHandler
+ file_patterns:
+ - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-PAD-CLMTest-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc'
+
+ nc_fci_asr:
+ file_reader: !!python/name:satpy.readers.fci_l2_nc.FciL2NCSegmentFileHandler
+ file_patterns:
+ - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-ASR-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.nc'
datasets:
+# CLM
+ cloud_state:
+ name: cloud_state
+ resolution: 2000
+ file_type: nc_fci_clm
+ file_key: cloud_state
+ long_name: cloud_mask_classification
+
+ quality_illumination_clm:
+ name: quality_illumination_clm
+ resolution: 2000
+ file_type: nc_fci_clm
+ file_key: quality_illumination
+ long_name: illumination_classification
+
+ quality_nwp_parameters_clm:
+ name: quality_nwp_parameters_clm
+ resolution: 2000
+ file_type: nc_fci_clm
+ file_key: quality_nwp_parameters
+ long_name: quality_index
+
+ quality_MTG_parameters_clm:
+ name: quality_MTG_parameters_clm
+ resolution: 2000
+ file_type: nc_fci_clm
+ file_key: quality_MTG_parameters
+ long_name: quality_index
+ fill_value: -127
+
+ quality_overall_processing_clm:
+ name: quality_overall_processing_clm
+ resolution: 2000
+ file_type: nc_fci_clm
+ file_key: quality_overall_processing
+ long_name: quality_index
+
+ product_quality_clm:
+ name: product_quality_clm
+ file_type: nc_fci_clm
+ file_key: product_quality
+ long_name: product_quality_index
+
+ product_completeness_clm:
+ name: product_completeness_clm
+ file_type: nc_fci_clm
+ file_key: product_completeness
+ long_name: product_completeness_index
+
+ product_timeliness_clm:
+ name: product_timeliness_clm
+ file_type: nc_fci_clm
+ file_key: product_timeliness
+ long_name: product_timeliness_index
+
+# FCI CT L2
+ cloud_phase:
+ name: cloud_phase
+ resolution: 2000
+ file_type: nc_fci_ct
+ file_key: cloud_phase
+ long_name: cloud_phase
+
+ cloud_type:
+ name: cloud_type
+ resolution: 2000
+ file_type: nc_fci_ct
+ file_key: cloud_type
+ long_name: cloud_type
+
+ quality_illumination_ct:
+ name: quality_illumination_ct
+ resolution: 2000
+ file_type: nc_fci_ct
+ file_key: quality_illumination
+ long_name: illumination_classification
+
+ quality_nwp_parameters_ct:
+ name: quality_nwp_parameters_ct
+ resolution: 2000
+ file_type: nc_fci_ct
+ file_key: quality_nwp_parameters
+ long_name: quality_index
+
+ quality_MTG_parameters_ct:
+ name: quality_MTG_parameters_ct
+ resolution: 2000
+ file_type: nc_fci_ct
+ file_key: quality_MTG_parameters
+ long_name: quality_index
+
+ quality_overall_processing_ct:
+ name: quality_overall_processing_ct
+ resolution: 2000
+ file_type: nc_fci_ct
+ file_key: quality_overall_processing
+ long_name: quality_index
+
+ product_quality_ct:
+ name: product_quality_ct
+ file_type: nc_fci_ct
+ file_key: product_quality
+ long_name: product_quality_index
+
+ product_completeness_ct:
+ name: product_completeness_ct
+ file_type: nc_fci_ct
+ file_key: product_completeness
+ long_name: product_completeness_index
+
+ product_timeliness_ct:
+ name: product_timeliness_ct
+ file_type: nc_fci_ct
+ file_key: product_timeliness
+ long_name: product_timeliness_index
+
+ # FCI CTTH Product
+ cloud_top_aviation_height:
+ name: cloud_top_aviation_height
+ resolution: 2000
+ file_type: nc_fci_ctth
+ file_key: cloud_top_aviation_height
+
+ cloud_top_height:
+ name: cloud_top_height
+ resolution: 2000
+ file_type: nc_fci_ctth
+ file_key: cloud_top_height
+ fill_value: 32769
+
+ cloud_top_pressure:
+ name: cloud_top_pressure
+ resolution: 2000
+ file_type: nc_fci_ctth
+ file_key: cloud_top_pressure
+ fill_value: 3276.9001
+
+ cloud_top_temperature:
+ name: cloud_top_temperature
+ resolution: 2000
+ file_type: nc_fci_ctth
+ file_key: cloud_top_temperature
+ fill_value: 327.69
+
+ effective_cloudiness:
+ name: effective_cloudiness
+ resolution: 2000
+ file_type: nc_fci_ctth
+ file_key: effective_cloudiness
+
+ quality_status_ctth:
+ name: quality_status_ctth
+ resolution: 2000
+ file_type: nc_fci_ctth
+ file_key: quality_status
+
+ quality_rtm_ctth:
+ name: quality_rtm_ctth
+ resolution: 2000
+ file_type: nc_fci_ctth
+ file_key: quality_rtm
+
+ quality_method_ctth:
+ name: quality_method_ctth
+ resolution: 2000
+ file_type: nc_fci_ctth
+ file_key: quality_method
+
+ quality_nwp_parameters_ctth:
+ name: quality_nwp_parameters_ctth
+ resolution: 2000
+ file_type: nc_fci_ctth
+ file_key: quality_nwp_parameters
+
+ quality_MTG_parameters_ctth:
+ name: quality_MTG_parameters_ctth
+ resolution: 2000
+ file_type: nc_fci_ctth
+ file_key: quality_MTG_parameters
+ fill_value: -127
+
+ quality_overall_processing_ctth:
+ name: quality_overall_processing_ctth
+ resolution: 2000
+ file_type: nc_fci_ctth
+ file_key: quality_overall_processing
+
+ quality_overall_processing_aviation_ctth:
+ name: quality_overall_processing_aviation_ctth
+ resolution: 2000
+ file_type: nc_fci_ctth
+ file_key: quality_overall_processing_aviation
+
+ product_quality_ctth:
+ name: product_quality_ctth
+ file_type: nc_fci_ctth
+ file_key: product_quality
+ long_name: product_quality_index
+
+ product_completeness_ctth:
+ name: product_completeness_ctth
+ file_type: nc_fci_ctth
+ file_key: product_completeness
+ long_name: product_completeness_index
+
+ product_timeliness_ctth:
+ name: product_timeliness_ctth
+ file_type: nc_fci_ctth
+ file_key: product_timeliness
+ long_name: product_timeliness_index
+
+ # OCA
retrieved_cloud_phase:
name: retrieved_cloud_phase
+ resolution: 2000
file_type: nc_fci_oca
file_key: retrieved_cloud_phase
standard_name: thermodynamic_phase_of_cloud_water_particles_at_cloud_top
- fill_value: -999.
+
+ retrieved_cloud_optical_thickness:
+ name: retrieved_cloud_optical_thickness
+ resolution: 2000
+ file_type: nc_fci_oca
+ file_key: retrieved_cloud_optical_thickness
+ long_name: cloud_optical_depth
retrieved_cloud_optical_thickness_upper_layer:
name: retrieved_cloud_optical_thickness_upper_layer
+ resolution: 2000
file_type: nc_fci_oca
file_key: retrieved_cloud_optical_thickness
layer: 0
- standard_name: cloud_optical_depth
- fill_value: -999.
+ long_name: cloud_optical_depth
retrieved_cloud_optical_thickness_lower_layer:
name: retrieved_cloud_optical_thickness_lower_layer
+ resolution: 2000
file_type: nc_fci_oca
file_key: retrieved_cloud_optical_thickness
layer: 1
- standard_name: cloud_optical_depth
- fill_value: -999.
+ long_name: cloud_optical_depth
retrieved_cloud_particle_effective_radius:
name: retrieved_cloud_particle_effective_radius
+ resolution: 2000
file_type: nc_fci_oca
file_key: retrieved_cloud_particle_effective_radius
standard_name: effective_radius_of_cloud_condensed_water_particles_at_cloud_top
- fill_value: -999.
retrieved_cloud_top_temperature:
name: retrieved_cloud_top_temperature
+ resolution: 2000
file_type: nc_fci_oca
file_key: retrieved_cloud_top_temperature
standard_name: air_temperature_at_cloud_top
- fill_value: -999.
retrieved_cloud_top_pressure_upper_layer:
name: retrieved_cloud_top_pressure_upper_layer
+ resolution: 2000
file_type: nc_fci_oca
file_key: retrieved_cloud_top_pressure
layer: 0
standard_name: air_pressure_at_cloud_top
- fill_value: -999.
retrieved_cloud_top_pressure_lower_layer:
name: retrieved_cloud_top_pressure_lower_layer
+ resolution: 2000
file_type: nc_fci_oca
file_key: retrieved_cloud_top_pressure
layer: 1
standard_name: air_pressure_at_cloud_top
- fill_value: -999.
retrieved_cloud_top_height:
name: retrieved_cloud_top_height
+ resolution: 2000
file_type: nc_fci_oca
file_key: retrieved_cloud_top_height
standard_name: height_at_cloud_top
- fill_value: -999.
retrieval_error_cloud_optical_thickness_upper_layer:
name: retrieval_error_cloud_optical_thickness_upper_layer
+ resolution: 2000
file_type: nc_fci_oca
file_key: retrieval_error_cloud_optical_thickness
layer: 0
- standard_name: cloud_optical_depth
- fill_value: -999.
+ long_name: cloud_optical_depth
retrieval_error_cloud_optical_thickness_lower_layer:
name: retrieval_error_cloud_optical_thickness_lower_layer
+ resolution: 2000
file_type: nc_fci_oca
file_key: retrieval_error_cloud_optical_thickness
layer: 1
- standard_name: cloud_optical_depth
- fill_value: -999.
+ long_name: cloud_optical_depth
retrieval_error_cloud_particle_effective_radius:
name: retrieval_error_cloud_particle_effective_radius
+ resolution: 2000
file_type: nc_fci_oca
file_key: retrieval_error_cloud_particle_effective_radius
- standard_name: effective_radius_of_cloud_condensed_water_particles_at_cloud_top
- fill_value: -999.
+ standard_name: effective_radius_of_cloud_condensed_water_particles_at_cloud_top_standard_error
retrieval_error_cloud_top_pressure_upper_layer:
name: retrieval_error_cloud_top_pressure_upper_layer
+ resolution: 2000
file_type: nc_fci_oca
file_key: retrieval_error_cloud_top_pressure
layer: 0
- standard_name: air_pressure_at_cloud_top
- fill_value: -999.
+ standard_name: air_pressure_at_cloud_top_standard_error
retrieval_error_cloud_top_pressure_lower_layer:
name: retrieval_error_cloud_top_pressure_lower_layer
+ resolution: 2000
file_type: nc_fci_oca
file_key: retrieval_error_cloud_top_pressure
layer: 1
- standard_name: air_pressure_at_cloud_top
- fill_value: -999.
+ standard_name: air_pressure_at_cloud_top_standard_error
quality_jmeas:
name: quality_jmeas
+ resolution: 2000
file_type: nc_fci_oca
file_key: quality_jmeas
- standard_name: cost_function
- fill_value: -999.
-
- cloud_state:
- name: cloud_state
- file_type: nc_fci_clm
- file_key: cloud_state
- standard_name: cloud_mask_classification
- fill_value: -999
- mask_value: 0
+ long_name: cost_function
- quality_illumination:
- name: quality_illumination
- file_type: nc_fci_clm
- file_key: quality_illumination
- standard_name: illumination_classification
- fill_value: -999
- mask_value: 0
+ product_quality_oca:
+ name: product_quality_oca
+ file_type: nc_fci_oca
+ file_key: product_quality
+ long_name: product_quality_index
- quality_nwp_parameters:
- name: quality_nwp_parameters
- file_type: nc_fci_clm
- file_key: quality_nwp_parameters
- standard_name: quality_index
- fill_value: -999
- mask_value: 0
+ product_completeness_oca:
+ name: product_completeness_oca
+ file_type: nc_fci_oca
+ file_key: product_completeness
+ long_name: product_completeness_index
- quality_MTG_parameters:
- name: quality_MTG_parameters
- file_type: nc_fci_clm
- file_key: quality_MTG_parameters
- standard_name: quality_index
- fill_value: -999
- mask_value: 0
+ product_timeliness_oca:
+ name: product_timeliness_oca
+ file_type: nc_fci_oca
+ file_key: product_timeliness
+ long_name: product_timeliness_index
+
+ # FIR
+ fire_probability:
+ name: fire_probability
+ resolution: 2000
+ file_type: nc_fci_fir
+ file_key: fire_probability
+
+ fire_result:
+ name: fire_result
+ resolution: 2000
+ file_type: nc_fci_fir
+ file_key: fire_result
+
+ product_quality_fir:
+ name: product_quality_fir
+ file_type: nc_fci_fir
+ file_key: product_quality
+ long_name: product_quality_index
+
+ product_completeness_fir:
+ name: product_completeness_fir
+ file_type: nc_fci_fir
+ file_key: product_completeness
+ long_name: product_completeness_index
+
+ product_timeliness_fir:
+ name: product_timeliness_fir
+ file_type: nc_fci_fir
+ file_key: product_timeliness
+ long_name: product_timeliness_index
+
+ # OLR
+ olr:
+ name: olr
+ resolution: 2000
+ file_type: nc_fci_olr
+ file_key: olr_value
+ long_name: outgoing_longwave_radiation
+
+ cloud_type_olr:
+ name: cloud_type_olr
+ resolution: 2000
+ file_type: nc_fci_olr
+ file_key: cloud_type
+ long_name: cloud_type_olr
- quality_overall_processing:
- name: quality_overall_processing
- file_type: nc_fci_clm
+ quality_overall_processing_olr:
+ name: quality_overall_processing_olr
+ resolution: 2000
+ file_type: nc_fci_olr
file_key: quality_overall_processing
- standard_name: quality_index
- fill_value: -999
- mask_value: 0
+ long_name: quality_index
+
+ product_quality_olr:
+ name: product_quality_olr
+ file_type: nc_fci_olr
+ file_key: product_quality
+ long_name: product_quality_index
+
+ product_completeness_olr:
+ name: product_completeness_olr
+ file_type: nc_fci_olr
+ file_key: product_completeness
+ long_name: product_completeness_index
+
+ product_timeliness_olr:
+ name: product_timeliness_olr
+ file_type: nc_fci_olr
+ file_key: product_timeliness
+ long_name: product_timeliness_index
+
+ # CRM
+ crm:
+ name: crm
+ resolution: 1000
+ file_type: nc_fci_crm
+ file_key: mean_clear_sky_reflectance
+ long_name: mean_clear_sky_reflectance
+
+ crm_vis04:
+ name: crm_vis04
+ resolution: 1000
+ wavelength: [0.384, 0.444, 0.504]
+ file_type: nc_fci_crm
+ file_key: mean_clear_sky_reflectance
+ long_name: mean_clear_sky_reflectance_vis04
+ vis_channel_id: 0
+
+ crm_vis05:
+ name: crm_vis05
+ resolution: 1000
+ wavelength: [0.47, 0.51, 0.55]
+ file_type: nc_fci_crm
+ file_key: mean_clear_sky_reflectance
+ long_name: mean_clear_sky_reflectance_vis05
+ vis_channel_id: 1
+
+ crm_vis06:
+ name: crm_vis06
+ resolution: 1000
+ wavelength: [0.59, 0.64, 0.69]
+ file_type: nc_fci_crm
+ file_key: mean_clear_sky_reflectance
+ long_name: mean_clear_sky_reflectance_vis06
+ vis_channel_id: 2
+
+ crm_vis08:
+ name: crm_vis08
+ resolution: 1000
+ wavelength: [0.815, 0.865, 0.915]
+ file_type: nc_fci_crm
+ file_key: mean_clear_sky_reflectance
+ long_name: mean_clear_sky_reflectance_vis08
+ vis_channel_id: 3
+
+ crm_vis09:
+ name: crm_vis09
+ resolution: 1000
+ wavelength: [0.894, 0.914, 0.934]
+ file_type: nc_fci_crm
+ file_key: mean_clear_sky_reflectance
+ long_name: mean_clear_sky_reflectance_vis09
+ vis_channel_id: 4
+
+ crm_nir13:
+ name: crm_nir13
+ resolution: 1000
+ wavelength: [1.35, 1.38, 1.41]
+ file_type: nc_fci_crm
+ file_key: mean_clear_sky_reflectance
+ long_name: mean_clear_sky_reflectance_nir13
+ vis_channel_id: 5
+
+ crm_nir16:
+ name: crm_nir16
+ resolution: 1000
+ wavelength: [1.56, 1.61, 1.66]
+ file_type: nc_fci_crm
+ file_key: mean_clear_sky_reflectance
+ long_name: mean_clear_sky_reflectance_nir16
+ vis_channel_id: 6
+
+ crm_nir22:
+ name: crm_nir22
+ resolution: 1000
+ wavelength: [2.2, 2.25, 2.3]
+ file_type: nc_fci_crm
+ file_key: mean_clear_sky_reflectance
+ long_name: mean_clear_sky_reflectance_nir22
+ vis_channel_id: 7
+
+ mean_sza:
+ name: mean_sza
+ resolution: 1000
+ file_type: nc_fci_crm
+ file_key: mean_solar_zenith
+ long_name: mean_solar_zenith_angle
+
+ mean_rel_azi:
+ name: mean_rel_azi
+ resolution: 1000
+ file_type: nc_fci_crm
+ file_key: mean_rel_solar_sat_azimuth
+ long_name: mean_relative_solar_satellite_azimuth_angle
+
+ n_acc:
+ name: n_acc
+ resolution: 1000
+ file_type: nc_fci_crm
+ file_key: number_of_accumulations
+ long_name: number_of_accumulations
+
+ historical_data:
+ name: historical_data
+ resolution: 1000
+ file_type: nc_fci_crm
+ file_key: historical_data
+ long_name: historical_data
+
+ product_quality_crm:
+ name: product_quality_crm
+ file_type: nc_fci_crm
+ file_key: product_quality
+ long_name: product_quality_index
+
+ product_completeness_crm:
+ name: product_completeness_crm
+ file_type: nc_fci_crm
+ file_key: product_completeness
+ long_name: product_completeness_index
+
+ product_timeliness_crm:
+ name: product_timeliness_crm
+ file_type: nc_fci_crm
+ file_key: product_timeliness
+ long_name: product_timeliness_index
+
+
+ # LAT/LON FOR SEGMENTED PRODUCTS
+ latitude:
+ name: latitude
+ file_key: latitude
+ resolution: [6000, 6000, 32000]
+ file_type: [ nc_fci_gii, nc_fci_toz, nc_fci_asr ]
+ standard_name: latitude
+ units: degree_north
+
+ longitude:
+ name: longitude
+ file_key: longitude
+ resolution: [6000, 6000, 32000]
+ file_type: [ nc_fci_gii, nc_fci_toz, nc_fci_asr ]
+ standard_name: longitude
+ units: degree_east
-# CLM Test
+
+ # GII
+ k_index:
+ name: k_index
+ resolution: 6000
+ file_type: nc_fci_gii
+ file_key: k_index
+ long_name: k_index
+ coordinates:
+ - longitude
+ - latitude
+
+ lifted_index:
+ name: lifted_index
+ resolution: 6000
+ file_type: nc_fci_gii
+ file_key: lifted_index
+ long_name: lifted_index
+ coordinates:
+ - longitude
+ - latitude
+
+ prec_water_high:
+ name: prec_water_high
+ resolution: 6000
+ file_type: nc_fci_gii
+ file_key: prec_water_high
+ long_name: prec_water_high
+ coordinates:
+ - longitude
+ - latitude
+
+ prec_water_low:
+ name: prec_water_low
+ resolution: 6000
+ file_type: nc_fci_gii
+ file_key: prec_water_low
+ long_name: prec_water_low
+ coordinates:
+ - longitude
+ - latitude
+
+ prec_water_mid:
+ name: prec_water_mid
+ resolution: 6000
+ file_type: nc_fci_gii
+ file_key: prec_water_mid
+ long_name: prec_water_mid
+ coordinates:
+ - longitude
+ - latitude
+
+ prec_water_total:
+ name: prec_water_total
+ resolution: 6000
+ file_type: nc_fci_gii
+ file_key: prec_water_total
+ long_name: prec_water_total
+ coordinates:
+ - longitude
+ - latitude
+
+ percent_cloud_free_gii:
+ name: percent_cloud_free_gii
+ resolution: 6000
+ file_type: nc_fci_gii
+ file_key: percent_cloud_free
+ long_name: percent_cloud_free
+ coordinates:
+ - longitude
+ - latitude
+
+ number_of_iterations_gii:
+ name: number_of_iterations_gii
+ resolution: 6000
+ file_type: nc_fci_gii
+ file_key: number_of_iterations
+ long_name: number_of_iterations
+ coordinates:
+ - longitude
+ - latitude
+
+ product_quality_gii:
+ name: product_quality_gii
+ file_type: nc_fci_gii
+ file_key: product_quality
+ long_name: product_quality_index
+
+ product_completeness_gii:
+ name: product_completeness_gii
+ file_type: nc_fci_gii
+ file_key: product_completeness
+ long_name: product_completeness_index
+
+ product_timeliness_gii:
+ name: product_timeliness_gii
+ file_type: nc_fci_gii
+ file_key: product_timeliness
+ long_name: product_timeliness_index
+
+
+# TOZ
+ total_ozone:
+ name: total_ozone
+ resolution: 6000
+ file_type: nc_fci_toz
+ file_key: total_ozone
+ long_name: total_ozone
+ coordinates:
+ - longitude
+ - latitude
+
+ percent_pixels_toz:
+ name: percent_pixels_toz
+ resolution: 6000
+ file_type: nc_fci_toz
+ file_key: percent_pixels
+ long_name: percent_pixels
+ coordinates:
+ - longitude
+ - latitude
+
+ number_of_iterations_toz:
+ name: number_of_iterations_toz
+ resolution: 6000
+ file_type: nc_fci_toz
+ file_key: number_of_iterations
+ long_name: number_of_iterations
+ coordinates:
+ - longitude
+ - latitude
+
+ retrieval_type_toz:
+ name: retrieval_type_toz
+ resolution: 6000
+ file_type: nc_fci_toz
+ file_key: retrieval_type
+ long_name: retrieval_type
+ coordinates:
+ - longitude
+ - latitude
+
+ product_quality_toz:
+ name: product_quality_toz
+ file_type: nc_fci_toz
+ file_key: product_quality
+ long_name: product_quality_index
+
+ product_completeness_toz:
+ name: product_completeness_toz
+ file_type: nc_fci_toz
+ file_key: product_completeness
+ long_name: product_completeness_index
+
+ product_timeliness_toz:
+ name: product_timeliness_toz
+ file_type: nc_fci_toz
+ file_key: product_timeliness
+ long_name: product_timeliness_index
+
+
+
+ # CLM Test
cloud_test_sit1_flag:
name: cloud_test_sit1_flag
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_flag
- standard_name: cloud_mask_test_sit1_flag
+ long_name: cloud_mask_test_sit1_flag
extract_byte: 0
- fill_value: -999
- mask_value: 0
cloud_test_cmt1_flag:
name: cloud_test_cmt1_flag
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_flag
- standard_name: cloud_mask_test_cmt1_flag
+ long_name: cloud_mask_test_cmt1_flag
extract_byte: 1
- fill_value: -999
- mask_value: 0
cloud_test_cmt2_flag:
name: cloud_test_cmt2_flag
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_flag
- standard_name: cloud_mask_test_cmt2_flag
+ long_name: cloud_mask_test_cmt2_flag
extract_byte: 2
- fill_value: -999
- mask_value: 0
cloud_test_cmt3_flag:
name: cloud_test_cmt3_flag
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_flag
- standard_name: cloud_mask_test_cmt3_flag
+ long_name: cloud_mask_test_cmt3_flag
extract_byte: 3
- fill_value: -999
- mask_value: 0
cloud_test_cmt4_flag:
name: cloud_test_cmt4_flag
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_flag
- standard_name: cloud_mask_test_cmt4_flag
+ long_name: cloud_mask_test_cmt4_flag
extract_byte: 4
- fill_value: -999
- mask_value: 0
cloud_test_cmt5_flag:
name: cloud_test_cmt5_flag
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_flag
- standard_name: cloud_mask_test_cmt5_flag
+ long_name: cloud_mask_test_cmt5_flag
extract_byte: 5
- fill_value: -999
- mask_value: 0
cloud_test_cmt6_flag:
name: cloud_test_cmt6_flag
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_flag
- standard_name: cloud_mask_test_cmt6_flag
+ long_name: cloud_mask_test_cmt6_flag
extract_byte: 6
- fill_value: -999
- mask_value: 0
cloud_test_cmt7_flag:
name: cloud_test_cmt7_flag
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_flag
- standard_name: cloud_mask_test_cmt7_flag
+ long_name: cloud_mask_test_cmt7_flag
extract_byte: 7
- fill_value: -999
- mask_value: 0
cloud_test_cmt8_flag:
name: cloud_test_cmt8_flag
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_flag
- standard_name: cloud_mask_test_cmt8_flag
+ long_name: cloud_mask_test_cmt8_flag
extract_byte: 8
- fill_value: -999
- mask_value: 0
cloud_test_cmt9_flag:
name: cloud_test_cmt9_flag
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_flag
- standard_name: cloud_mask_test_cmt9_flag
+ long_name: cloud_mask_test_cmt9_flag
extract_byte: 9
- fill_value: -999
- mask_value: 0
cloud_test_cmt10_flag:
name: cloud_test_cmt10_flag
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_flag
- standard_name: cloud_mask_test_cmt0_flag
+ long_name: cloud_mask_test_cmt0_flag
extract_byte: 10
- fill_value: -999
- mask_value: 0
cloud_test_cmt11_flag:
name: cloud_test_cmt11_flag
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_flag
- standard_name: cloud_mask_test_cmt11_flag
+ long_name: cloud_mask_test_cmt11_flag
extract_byte: 11
- fill_value: -999
- mask_value: 0
cloud_test_cmt12_flag:
name: cloud_test_cmt12_flag
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_flag
- standard_name: cloud_mask_test_cmt12_flag
+ long_name: cloud_mask_test_cmt12_flag
extract_byte: 12
- fill_value: -999
- mask_value: 0
cloud_test_cmt13_flag:
name: cloud_test_cmt13_flag
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_flag
- standard_name: cloud_mask_test_cmt13_flag
+ long_name: cloud_mask_test_cmt13_flag
extract_byte: 13
- fill_value: -999
- mask_value: 0
cloud_test_cmt14_flag:
name: cloud_test_cmt14_flag
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_flag
- standard_name: cloud_mask_test_cmt14_flag
+ long_name: cloud_mask_test_cmt14_flag
extract_byte: 14
- fill_value: -999
- mask_value: 0
cloud_test_opqt_flag:
name: cloud_test_opqt_flag
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_flag
- standard_name: cloud_mask_test_opqt_flag
+ long_name: cloud_mask_test_opqt_flag
extract_byte: 15
- fill_value: -999
- mask_value: 0
cloud_test_cmrt1_flag:
name: cloud_test_cmrt1_flag
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_flag
- standard_name: cloud_mask_test_cmrt1_flag
+ long_name: cloud_mask_test_cmrt1_flag
extract_byte: 16
- fill_value: -999
- mask_value: 0
cloud_test_cmrt2_flag:
name: cloud_test_cmrt2_flag
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_flag
- standard_name: cloud_mask_test_cmrt2_flag
+ long_name: cloud_mask_test_cmrt2_flag
extract_byte: 17
- fill_value: -999
- mask_value: 0
cloud_test_cmrt3_flag:
name: cloud_test_cmrt3_flag
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_flag
- standard_name: cloud_mask_test_cmrt3_flag
+ long_name: cloud_mask_test_cmrt3_flag
extract_byte: 18
- fill_value: -999
- mask_value: 0
cloud_test_cmrt4_flag:
name: cloud_test_cmrt4_flag
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_flag
- standard_name: cloud_mask_test_cmrt4_flag
+ long_name: cloud_mask_test_cmrt4_flag
extract_byte: 19
- fill_value: -999
- mask_value: 0
cloud_test_cmrt5_flag:
name: cloud_test_cmrt5_flag
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_flag
- standard_name: cloud_mask_test_cmrt5_flag
+ long_name: cloud_mask_test_cmrt5_flag
extract_byte: 20
- fill_value: -999
- mask_value: 0
cloud_test_cmrt6_flag:
name: cloud_test_cmrt6_flag
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_flag
- standard_name: cloud_mask_test_cmrt6_flag
+ long_name: cloud_mask_test_cmrt6_flag
extract_byte: 21
- fill_value: -999
- mask_value: 0
cloud_test_dust_flag:
name: cloud_test_dust_flag
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_flag
- standard_name: cloud_mask_test_dust_flag
+ long_name: cloud_mask_test_dust_flag
extract_byte: 22
- fill_value: -999
- mask_value: 0
cloud_test_ash_flag:
name: cloud_test_ash_flag
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_flag
- standard_name: cloud_mask_test_ash_flag
+ long_name: cloud_mask_test_ash_flag
extract_byte: 23
- fill_value: -999
- mask_value: 0
cloud_test_dust_ash_flag:
name: cloud_test_dust_ash_flag
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_flag
- standard_name: cloud_mask_test_dust_ash_flag
+ long_name: cloud_mask_test_dust_ash_flag
extract_byte: 24
- fill_value: -999
- mask_value: 0
cloud_test_sit1:
name: cloud_test_sit1
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_result
- standard_name: cloud_mask_test_sit1
+ long_name: cloud_mask_test_sit1
extract_byte: 0
- fill_value: -999
- mask_value: 0
cloud_test_cmt1:
name: cloud_test_cmt1
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_result
- standard_name: cloud_mask_test_cmt1
+ long_name: cloud_mask_test_cmt1
extract_byte: 1
- fill_value: -999
- mask_value: 0
cloud_test_cmt2:
name: cloud_test_cmt2
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_result
- standard_name: cloud_mask_test_cmt2
+ long_name: cloud_mask_test_cmt2
extract_byte: 2
- fill_value: -999
- mask_value: 0
cloud_test_cmt3:
name: cloud_test_cmt3
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_result
- standard_name: cloud_mask_test_cmt3
+ long_name: cloud_mask_test_cmt3
extract_byte: 3
- fill_value: -999
- mask_value: 0
cloud_test_cmt4:
name: cloud_test_cmt4
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_result
- standard_name: cloud_mask_test_cmt4
+ long_name: cloud_mask_test_cmt4
extract_byte: 4
- fill_value: -999
- mask_value: 0
cloud_test_cmt5:
name: cloud_test_cmt5
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_result
- standard_name: cloud_mask_test_cmt5
+ long_name: cloud_mask_test_cmt5
extract_byte: 5
- fill_value: -999
- mask_value: 0
cloud_test_cmt6:
name: cloud_test_cmt6
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_result
- standard_name: cloud_mask_test_cmt6
+ long_name: cloud_mask_test_cmt6
extract_byte: 6
- fill_value: -999
- mask_value: 0
cloud_test_cmt7:
name: cloud_test_cmt7
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_result
- standard_name: cloud_mask_test_cmt7
+ long_name: cloud_mask_test_cmt7
extract_byte: 7
- fill_value: -999
- mask_value: 0
-
cloud_test_cmt8:
name: cloud_test_cmt8
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_result
- standard_name: cloud_mask_test_cmt8
+ long_name: cloud_mask_test_cmt8
extract_byte: 8
- fill_value: -999
- mask_value: 0
cloud_test_cmt9:
name: cloud_test_cmt9
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_result
- standard_name: cloud_mask_test_cmt9
+ long_name: cloud_mask_test_cmt9
extract_byte: 9
- fill_value: -999
- mask_value: 0
cloud_test_cmt10:
name: cloud_test_cmt10
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_result
- standard_name: cloud_mask_test_cmt10
+ long_name: cloud_mask_test_cmt10
extract_byte: 10
- fill_value: -999
- mask_value: 0
cloud_test_cmt11:
name: cloud_test_cmt11
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_result
- standard_name: cloud_mask_test_cmt11
+ long_name: cloud_mask_test_cmt11
extract_byte: 11
- fill_value: -999
- mask_value: 0
cloud_test_cmt12:
name: cloud_test_cmt12
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_result
- standard_name: cloud_mask_test_cmt12
+ long_name: cloud_mask_test_cmt12
extract_byte: 12
- fill_value: -999
- mask_value: 0
cloud_test_cmt13:
name: cloud_test_cmt13
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_result
- standard_name: cloud_mask_test_cmt13
+ long_name: cloud_mask_test_cmt13
extract_byte: 13
- fill_value: -999
- mask_value: 0
cloud_test_cmt14:
name: cloud_test_cmt14
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_result
- standard_name: cloud_mask_test_cmt14
+ long_name: cloud_mask_test_cmt14
extract_byte: 14
- fill_value: -999
- mask_value: 0
cloud_test_opqt:
name: cloud_test_opqt
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_result
- standard_name: cloud_mask_test_opqt
+ long_name: cloud_mask_test_opqt
extract_byte: 15
- fill_value: -999
- mask_value: 0
cloud_test_cmrt1:
name: cloud_test_cmrt1
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_result
- standard_name: cloud_mask_test_cmrt1
+ long_name: cloud_mask_test_cmrt1
extract_byte: 16
- fill_value: -999
- mask_value: 0
cloud_test_cmrt2:
name: cloud_test_cmrt2
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_result
- standard_name: cloud_mask_test_cmrt2
+ long_name: cloud_mask_test_cmrt2
extract_byte: 17
- fill_value: -999
- mask_value: 0
cloud_test_cmrt3:
name: cloud_test_cmrt3
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_result
- standard_name: cloud_mask_test_cmrt3
+ long_name: cloud_mask_test_cmrt3
extract_byte: 18
- fill_value: -999
- mask_value: 0
cloud_test_cmrt4:
name: cloud_test_cmrt4
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_result
- standard_name: cloud_mask_test_cmrt4
+ long_name: cloud_mask_test_cmrt4
extract_byte: 19
- fill_value: -999
- mask_value: 0
cloud_test_cmrt5:
name: cloud_test_cmrt5
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_result
- standard_name: cloud_mask_test_cmrt5
+ long_name: cloud_mask_test_cmrt5
extract_byte: 20
- fill_value: -999
- mask_value: 0
-
- cloud_test_cmrt6:
- name: cloud_test_cmrt6
- file_type: nc_fci_test_clm
- file_key: cloud_mask_test_result
- standard_name: cloud_mask_test_cmrt6
- extract_byte: 21
- fill_value: -999
- mask_value: 0
cloud_test_dust:
name: cloud_test_dust
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_result
- standard_name: cloud_mask_test_dust
- extract_byte: 22
- fill_value: -999
- mask_value: 0
+ long_name: cloud_mask_test_dust
+ extract_byte: 21
cloud_test_ash:
name: cloud_test_ash
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_result
- standard_name: cloud_mask_test_ash
- extract_byte: 23
- fill_value: -999
- mask_value: 0
+ long_name: cloud_mask_test_ash
+ extract_byte: 22
cloud_test_dust_ash:
name: cloud_test_dust_ash
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_test_result
- standard_name: cloud_mask_test_dust_ash
- extract_byte: 24
- fill_value: -999
- mask_value: 0
+ long_name: cloud_mask_test_dust_ash
+ extract_byte: 23
- cloud_mask_cmrt6_result:
- name: cloud_mask_cmrt6_result
+ cloud_test_cmrt6:
+ name: cloud_test_cmrt6
+ resolution: 2000
file_type: nc_fci_test_clm
file_key: cloud_mask_cmrt6_test_result
- standard_name: cloud_mask_cmrt6_result
- extract_byte: 0
-# fill_value: -999
- mask_value: 0
-
- latitude:
- name: latitude
- file_key: 'latitude'
-# resolution:
- file_type: [nc_fci_gii, nc_fci_asr, nc_fci_toz]
- standard_name: latitude
- fill_value: -32767
- mask_value: -32767
- units: degree_north
+ long_name: cloud_mask_cmrt6_result
+ product_quality_clmtest:
+ name: product_quality_clmtest
+ file_type: nc_fci_test_clm
+ file_key: product_quality
+ long_name: product_quality_index
- longitude:
- name: longitude
- file_key: 'longitude'
-# resolution:
- file_type: [nc_fci_gii, nc_fci_asr, nc_fci_toz]
- standard_name: longitude
- fill_value: -32767
- mask_value: -32767
- units: degree_east
+ product_completeness_clmtest:
+ name: product_completeness_clmtest
+ file_type: nc_fci_test_clm
+ file_key: product_completeness
+ long_name: product_completeness_index
+ product_timeliness_clmtest:
+ name: product_timeliness_clmtest
+ file_type: nc_fci_test_clm
+ file_key: product_timeliness
+ long_name: product_timeliness_index
- # GII
- k_index:
- name: k_index
- file_type: nc_fci_gii
- file_key: k_index
- standard_name: k_index
- fill_value: -32767
- mask_value: -32767
- coordinates:
- - longitude
- - latitude
- lifted_index:
- name: lifted_index
- file_type: nc_fci_gii
- file_key: lifted_index
- standard_name: lifted_index
- fill_value: -32767
- mask_value: -32767
+ # ASR
+ bt_max:
+ name: bt_max
+ resolution: 32000
+ file_type: nc_fci_asr
+ file_key: bt_max
+ long_name: maximum_brightness_temperature_in_segment
coordinates:
- - longitude
- - latitude
+ - longitude
+ - latitude
- percent_cloud_free:
- name: percent_cloud_free
- file_type: nc_fci_gii
- file_key: percent_cloud_free
- standard_name: percent_cloud_free
- fill_value: -127
- mask_value: -127
+ bt_mean:
+ name: bt_mean
+ resolution: 32000
+ file_type: nc_fci_asr
+ file_key: bt_mean
+ long_name: mean_brightness_temperature_in_segment
coordinates:
- - longitude
- - latitude
-
- prec_water_high:
- name: prec_water_high
- file_type: nc_fci_gii
- file_key: prec_water_high
- standard_name: prec_water_high
- fill_value: 65535
- mask_value: 65535
- coordinates:
- - longitude
- - latitude
-
- prec_water_low:
- name: prec_water_low
- file_type: nc_fci_gii
- file_key: prec_water_low
- standard_name: prec_water_low
- fill_value: 65535
- mask_value: 65535
- coordinates:
- - longitude
- - latitude
-
- prec_water_mid:
- name: prec_water_mid
- file_type: nc_fci_gii
- file_key: prec_water_mid
- standard_name: prec_water_mid
- fill_value: 65535
- mask_value: 65535
- coordinates:
- - longitude
- - latitude
-
- prec_water_total:
- name: prec_water_total
- file_type: nc_fci_gii
- file_key: prec_water_total
- standard_name: prec_water_total
- fill_value: 65535
- mask_value: 65535
- coordinates:
- - longitude
- - latitude
-
-# FCI CT L2
- cloud_phase:
- name: cloud_phase
- file_type: nc_fci_ct
- file_key: cloud_phase
-# standard_name: cloud_phase
- fill_value: 0
- mask_value: 0
-
- cloud_type:
- name: cloud_type
- file_type: nc_fci_ct
- file_key: cloud_type
-# standard_name: cloud_type
- fill_value: 0
- mask_value: 0
-
-# FCI CTTH Product
- cloud_top_aviation_height:
- name: cloud_top_aviation_height
- file_type: nc_fci_cloud
- file_key: cloud_top_aviation_height
- fill_value: 0
- mask_value: 0
-
- cloud_top_height:
- name: cloud_top_height
- file_type: nc_fci_cloud
- file_key: cloud_top_height
- fill_value: 0
- mask_value: 0
-
- cloud_top_pressure:
- name: cloud_top_pressure
- file_type: nc_fci_cloud
- file_key: cloud_top_pressure
- fill_value: 0
- mask_value: 0
-
- cloud_top_temperature:
- name: cloud_top_temperature
- file_type: nc_fci_cloud
- file_key: cloud_top_temperature
- fill_value: 0
- mask_value: 0
-
- effective_cloudiness:
- name: effective_cloudiness
- file_type: nc_fci_cloud
- file_key: effective_cloudiness
- fill_value: 0
- mask_value: 0
-
-# ASR
- bt_max:
- name: bt_max
- file_type: nc_fci_asr
- file_key: bt_max
- standard_name: bt_max
- fill_value: 65535
- mask_value: 65535
- coordinates:
- - longitude
- - latitude
-
- bt_mean:
- name: bt_mean
- file_type: nc_fci_asr
- file_key: bt_mean
- standard_name: bt_mean
- fill_value: 65535
- mask_value: 65535
- coordinates:
- - longitude
- - latitude
+ - longitude
+ - latitude
bt_min:
name: bt_min
+ resolution: 32000
file_type: nc_fci_asr
file_key: bt_min
- standard_name: bt_min
- fill_value: 65535
- mask_value: 65535
+ long_name: minimum_brightness_temperature_in_segment
coordinates:
- - longitude
- - latitude
+ - longitude
+ - latitude
bt_std:
name: bt_std
+ resolution: 32000
file_type: nc_fci_asr
file_key: bt_std
- standard_name: bt_std
- fill_value: 65535
- mask_value: 65535
+ long_name: brightness_temperature_standard_deviation_in_segment
coordinates:
- - longitude
- - latitude
+ - longitude
+ - latitude
radiance_max:
name: radiance_max
+ resolution: 32000
file_type: nc_fci_asr
file_key: radiance_max
- standard_name: radiance_max
- fill_value: 65535
- mask_value: 65535
+ long_name: maximum_radiance_in_segment
coordinates:
- - longitude
- - latitude
+ - longitude
+ - latitude
radiance_mean:
name: radiance_mean
+ resolution: 32000
file_type: nc_fci_asr
file_key: radiance_mean
- standard_name: radiance_mean
- fill_value: 65535
- mask_value: 65535
+ long_name: mean_radiance_in_segment
coordinates:
- - longitude
- - latitude
+ - longitude
+ - latitude
radiance_min:
name: radiance_min
+ resolution: 32000
file_type: nc_fci_asr
file_key: radiance_min
- standard_name: radiance_min
- fill_value: 65535
- mask_value: 65535
+ long_name: minimum_radiance_in_segment
coordinates:
- - longitude
- - latitude
+ - longitude
+ - latitude
radiance_std:
name: radiance_std
+ resolution: 32000
file_type: nc_fci_asr
file_key: radiance_std
- standard_name: radiance_std
- fill_value: 65535
- mask_value: 65535
+ long_name: radiance_standard_deviation_in_segment
coordinates:
- - longitude
- - latitude
+ - longitude
+ - latitude
reflectance_max:
name: reflectance_max
+ resolution: 32000
file_type: nc_fci_asr
file_key: reflectance_max
- standard_name: reflectance_max
- fill_value: 65535
- mask_value: 65535
+ long_name: maximum_reflectance_in_segment
coordinates:
- - longitude
- - latitude
+ - longitude
+ - latitude
reflectance_mean:
name: reflectance_mean
+ resolution: 32000
file_type: nc_fci_asr
file_key: reflectance_mean
- standard_name: reflectance_mean
- fill_value: 65535
- mask_value: 65535
+ long_name: mean_reflectance_in_segment
coordinates:
- - longitude
- - latitude
+ - longitude
+ - latitude
reflectance_min:
name: reflectance_min
+ resolution: 32000
file_type: nc_fci_asr
file_key: reflectance_min
- standard_name: reflectance_min
- fill_value: 65535
- mask_value: 65535
+ long_name: minimum_reflectance_in_segment
coordinates:
- - longitude
- - latitude
+ - longitude
+ - latitude
reflectance_std:
name: reflectance_std
+ resolution: 32000
file_type: nc_fci_asr
file_key: reflectance_std
- standard_name: reflectance_std
- fill_value: 65535
- mask_value: 65535
+ long_name: reflectance_standard_deviation_in_segment
coordinates:
- - longitude
- - latitude
+ - longitude
+ - latitude
quality_bt:
name: quality_bt
+ resolution: 32000
file_type: nc_fci_asr
file_key: quality_bt
- standard_name: quality_bt
- fill_value: 65535
- mask_value: 65535
+ long_name: brightness_temperature_quality
+ fill_value: -1
coordinates:
- - longitude
- - latitude
+ - longitude
+ - latitude
quality_reflectance:
name: quality_reflectance
+ resolution: 32000
file_type: nc_fci_asr
file_key: quality_reflectance
- standard_name: quality_reflectance
- fill_value: 65535
- mask_value: 65535
+ long_name: reflectance_quality
+ fill_value: -1
coordinates:
- - longitude
- - latitude
+ - longitude
+ - latitude
quality_radiance:
name: quality_radiance
+ resolution: 32000
file_type: nc_fci_asr
file_key: quality_radiance
- standard_name: quality_radiance
- fill_value: 65535
- mask_value: 65535
+ long_name: radiance_quality
+ fill_value: -1
coordinates:
- - longitude
- - latitude
+ - longitude
+ - latitude
land_pixel_percent:
name: land_pixel_percent
+ resolution: 32000
file_type: nc_fci_asr
file_key: land_pixel_percent
- standard_name: land_pixel_percent
- fill_value: 65535
- mask_value: 65535
+ long_name: land_pixel_percentage_in_segment
coordinates:
- - longitude
- - latitude
+ - longitude
+ - latitude
water_pixel_percent:
name: water_pixel_percent
+ resolution: 32000
file_type: nc_fci_asr
file_key: water_pixel_percent
- standard_name: water_pixel_percent
- fill_value: 65535
- mask_value: 65535
+ long_name: water_pixel_percentage_in_segment
coordinates:
- - longitude
- - latitude
+ - longitude
+ - latitude
pixel_percentage:
name: pixel_percentage
+ resolution: 32000
file_type: nc_fci_asr
file_key: pixel_percentage
- standard_name: pixel_percentage
- fill_value: 65535
- mask_value: 65535
+ long_name: pixel_percentage_used_in_segment
coordinates:
- - longitude
- - latitude
+ - longitude
+ - latitude
- percent_pixels:
- name: percent_pixels
- file_type: nc_fci_toz
- file_key: percent_pixels
- standard_name: percent_pixels
- fill_value: 65535
- mask_value: 65535
+ reflectance_mean_all_vis04:
+ name: reflectance_mean_all_vis04
+ resolution: 32000
+ wavelength: [0.384, 0.444, 0.504]
+ file_type: nc_fci_asr
+ file_key: reflectance_mean
+ long_name: reflectance_mean_all
+ vis_channel_id: 0
+ category_id: 0
coordinates:
- - longitude
- - latitude
+ - longitude
+ - latitude
- number_of_iterations:
- name: number_of_iterations
- file_type: nc_fci_toz
- file_key: number_of_iterations
- standard_name: number_of_iterations
- fill_value: 65535
- mask_value: 65535
+ reflectance_mean_clear_vis04:
+ name: reflectance_mean_clear_vis04
+ resolution: 32000
+ wavelength: [0.384, 0.444, 0.504]
+ file_type: nc_fci_asr
+ file_key: reflectance_mean
+ long_name: reflectance_mean_clear
+ vis_channel_id: 0
+ category_id: 1
coordinates:
- - longitude
- - latitude
+ - longitude
+ - latitude
- retrieval_type:
- name: retrieval_type
- file_type: nc_fci_toz
- file_key: retrieval_type
- standard_name: retrieval_type
- fill_value: 65535
- mask_value: 65535
+ reflectance_mean_cloudy_vis04:
+ name: reflectance_mean_cloudy_vis04
+ resolution: 32000
+ wavelength: [0.384, 0.444, 0.504]
+ file_type: nc_fci_asr
+ file_key: reflectance_mean
+ long_name: reflectance_mean_cloudy
+ vis_channel_id: 0
+ category_id: 2
coordinates:
- - longitude
- - latitude
+ - longitude
+ - latitude
- total_ozone:
- name: total_ozone
- file_type: nc_fci_toz
- file_key: total_ozone
- standard_name: total_ozone
- fill_value: 65535
- mask_value: 65535
+ reflectance_mean_all_vis05:
+ name: reflectance_mean_all_vis05
+ resolution: 32000
+ wavelength: [0.47, 0.51, 0.55]
+ file_type: nc_fci_asr
+ file_key: reflectance_mean
+ long_name: reflectance_mean_all
+ vis_channel_id: 1
+ category_id: 0
+ coordinates:
+ - longitude
+ - latitude
+
+ reflectance_mean_clear_vis05:
+ name: reflectance_mean_clear_vis05
+ resolution: 32000
+ wavelength: [0.47, 0.51, 0.55]
+ file_type: nc_fci_asr
+ file_key: reflectance_mean
+ long_name: reflectance_mean_clear
+ vis_channel_id: 1
+ category_id: 1
+ coordinates:
+ - longitude
+ - latitude
+
+ reflectance_mean_cloudy_vis05:
+ name: reflectance_mean_cloudy_vis05
+ resolution: 32000
+ wavelength: [0.47, 0.51, 0.55]
+ file_type: nc_fci_asr
+ file_key: reflectance_mean
+ long_name: reflectance_mean_cloudy
+ vis_channel_id: 1
+ category_id: 2
+ coordinates:
+ - longitude
+ - latitude
+
+ reflectance_mean_all_vis06:
+ name: reflectance_mean_all_vis06
+ resolution: 32000
+ wavelength: [0.59, 0.64, 0.69]
+ file_type: nc_fci_asr
+ file_key: reflectance_mean
+ long_name: reflectance_mean_all
+ vis_channel_id: 2
+ category_id: 0
+ coordinates:
+ - longitude
+ - latitude
+
+ reflectance_mean_clear_vis06:
+ name: reflectance_mean_clear_vis06
+ resolution: 32000
+ wavelength: [0.59, 0.64, 0.69]
+ file_type: nc_fci_asr
+ file_key: reflectance_mean
+ long_name: reflectance_mean_clear
+ vis_channel_id: 2
+ category_id: 1
+ coordinates:
+ - longitude
+ - latitude
+
+ reflectance_mean_cloudy_vis06:
+ name: reflectance_mean_cloudy_vis06
+ resolution: 32000
+ wavelength: [0.59, 0.64, 0.69]
+ file_type: nc_fci_asr
+ file_key: reflectance_mean
+ long_name: reflectance_mean_cloudy
+ vis_channel_id: 2
+ category_id: 2
+ coordinates:
+ - longitude
+ - latitude
+
+ reflectance_mean_all_vis08:
+ name: reflectance_mean_all_vis08
+ resolution: 32000
+ wavelength: [0.815, 0.865, 0.915]
+ file_type: nc_fci_asr
+ file_key: reflectance_mean
+ long_name: reflectance_mean_all
+ vis_channel_id: 3
+ category_id: 0
+ coordinates:
+ - longitude
+ - latitude
+
+ reflectance_mean_clear_vis08:
+ name: reflectance_mean_clear_vis08
+ resolution: 32000
+ wavelength: [0.815, 0.865, 0.915]
+ file_type: nc_fci_asr
+ file_key: reflectance_mean
+ long_name: reflectance_mean_clear
+ vis_channel_id: 3
+ category_id: 1
+ coordinates:
+ - longitude
+ - latitude
+
+ reflectance_mean_cloudy_vis08:
+ name: reflectance_mean_cloudy_vis08
+ resolution: 32000
+ wavelength: [0.815, 0.865, 0.915]
+ file_type: nc_fci_asr
+ file_key: reflectance_mean
+ long_name: reflectance_mean_cloudy
+ vis_channel_id: 3
+ category_id: 2
+ coordinates:
+ - longitude
+ - latitude
+
+ reflectance_mean_all_vis09:
+ name: reflectance_mean_all_vis09
+ resolution: 32000
+ wavelength: [0.894, 0.914, 0.934]
+ file_type: nc_fci_asr
+ file_key: reflectance_mean
+ long_name: reflectance_mean_all
+ vis_channel_id: 4
+ category_id: 0
+ coordinates:
+ - longitude
+ - latitude
+
+ reflectance_mean_clear_vis09:
+ name: reflectance_mean_clear_vis09
+ resolution: 32000
+ wavelength: [0.894, 0.914, 0.934]
+ file_type: nc_fci_asr
+ file_key: reflectance_mean
+ long_name: reflectance_mean_clear
+ vis_channel_id: 4
+ category_id: 1
+ coordinates:
+ - longitude
+ - latitude
+
+ reflectance_mean_cloudy_vis09:
+ name: reflectance_mean_cloudy_vis09
+ resolution: 32000
+ wavelength: [0.894, 0.914, 0.934]
+ file_type: nc_fci_asr
+ file_key: reflectance_mean
+ long_name: reflectance_mean_cloudy
+ vis_channel_id: 4
+ category_id: 2
+ coordinates:
+ - longitude
+ - latitude
+
+ reflectance_mean_all_nir13:
+ name: reflectance_mean_all_nir13
+ resolution: 32000
+ wavelength: [1.35, 1.38, 1.41]
+ file_type: nc_fci_asr
+ file_key: reflectance_mean
+ long_name: reflectance_mean_all
+ vis_channel_id: 5
+ category_id: 0
+ coordinates:
+ - longitude
+ - latitude
+
+ reflectance_mean_clear_nir13:
+ name: reflectance_mean_clear_nir13
+ resolution: 32000
+ wavelength: [1.35, 1.38, 1.41]
+ file_type: nc_fci_asr
+ file_key: reflectance_mean
+ long_name: reflectance_mean_clear
+ vis_channel_id: 5
+ category_id: 1
+ coordinates:
+ - longitude
+ - latitude
+
+ reflectance_mean_cloudy_nir13:
+ name: reflectance_mean_cloudy_nir13
+ resolution: 32000
+ wavelength: [1.35, 1.38, 1.41]
+ file_type: nc_fci_asr
+ file_key: reflectance_mean
+ long_name: reflectance_mean_cloudy
+ vis_channel_id: 5
+ category_id: 2
+ coordinates:
+ - longitude
+ - latitude
+
+ reflectance_mean_all_nir16:
+ name: reflectance_mean_all_nir16
+ resolution: 32000
+ wavelength: [1.56, 1.61, 1.66]
+ file_type: nc_fci_asr
+ file_key: reflectance_mean
+ long_name: reflectance_mean_all
+ vis_channel_id: 6
+ category_id: 0
+ coordinates:
+ - longitude
+ - latitude
+
+ reflectance_mean_clear_nir16:
+ name: reflectance_mean_clear_nir16
+ resolution: 32000
+ wavelength: [1.56, 1.61, 1.66]
+ file_type: nc_fci_asr
+ file_key: reflectance_mean
+ long_name: reflectance_mean_clear
+ vis_channel_id: 6
+ category_id: 1
+ coordinates:
+ - longitude
+ - latitude
+
+ reflectance_mean_cloudy_nir16:
+ name: reflectance_mean_cloudy_nir16
+ resolution: 32000
+ wavelength: [1.56, 1.61, 1.66]
+ file_type: nc_fci_asr
+ file_key: reflectance_mean
+ long_name: reflectance_mean_cloudy
+ vis_channel_id: 6
+ category_id: 2
+ coordinates:
+ - longitude
+ - latitude
+
+ reflectance_mean_all_nir22:
+ name: reflectance_mean_all_nir22
+ resolution: 32000
+ wavelength: [2.2, 2.25, 2.3]
+ file_type: nc_fci_asr
+ file_key: reflectance_mean
+ long_name: reflectance_mean_all
+ vis_channel_id: 7
+ category_id: 0
+ coordinates:
+ - longitude
+ - latitude
+
+ reflectance_mean_clear_nir22:
+ name: reflectance_mean_clear_nir22
+ resolution: 32000
+ wavelength: [2.2, 2.25, 2.3]
+ file_type: nc_fci_asr
+ file_key: reflectance_mean
+ long_name: reflectance_mean_clear
+ vis_channel_id: 7
+ category_id: 1
+ coordinates:
+ - longitude
+ - latitude
+
+ reflectance_mean_cloudy_nir22:
+ name: reflectance_mean_cloudy_nir22
+ resolution: 32000
+ wavelength: [2.2, 2.25, 2.3]
+ file_type: nc_fci_asr
+ file_key: reflectance_mean
+ long_name: reflectance_mean_cloudy
+ vis_channel_id: 7
+ category_id: 2
+ coordinates:
+ - longitude
+ - latitude
+
+ bt_mean_all_ir38:
+ name: bt_mean_all_ir38
+ resolution: 32000
+ wavelength: [3.4, 3.8, 4.2]
+ file_type: nc_fci_asr
+ file_key: bt_mean
+ long_name: bt_mean_all
+ ir_channel_id: 0
+ category_id: 0
+ coordinates:
+ - longitude
+ - latitude
+
+ bt_mean_clear_ir38:
+ name: bt_mean_clear_ir38
+ resolution: 32000
+ wavelength: [3.4, 3.8, 4.2]
+ file_type: nc_fci_asr
+ file_key: bt_mean
+ long_name: bt_mean_clear
+ ir_channel_id: 0
+ category_id: 1
+ coordinates:
+ - longitude
+ - latitude
+
+ bt_mean_cloudy_ir38:
+ name: bt_mean_cloudy_ir38
+ resolution: 32000
+ wavelength: [3.4, 3.8, 4.2]
+ file_type: nc_fci_asr
+ file_key: bt_mean
+ long_name: bt_mean_cloudy
+ ir_channel_id: 0
+ category_id: 2
coordinates:
- - longitude
- - latitude
\ No newline at end of file
+ - longitude
+ - latitude
+
+ bt_mean_all_wv63:
+ name: bt_mean_all_wv63
+ resolution: 32000
+ wavelength: [5.3, 6.3, 7.3]
+ file_type: nc_fci_asr
+ file_key: bt_mean
+ long_name: bt_mean_all
+ ir_channel_id: 1
+ category_id: 0
+ coordinates:
+ - longitude
+ - latitude
+
+ bt_mean_clear_wv63:
+ name: bt_mean_clear_wv63
+ resolution: 32000
+ wavelength: [5.3, 6.3, 7.3]
+ file_type: nc_fci_asr
+ file_key: bt_mean
+ long_name: bt_mean_clear
+ ir_channel_id: 1
+ category_id: 1
+ coordinates:
+ - longitude
+ - latitude
+
+ bt_mean_cloudy_wv63:
+ name: bt_mean_cloudy_wv63
+ resolution: 32000
+ wavelength: [5.3, 6.3, 7.3]
+ file_type: nc_fci_asr
+ file_key: bt_mean
+ long_name: bt_mean_cloudy
+ ir_channel_id: 1
+ category_id: 2
+ coordinates:
+ - longitude
+ - latitude
+
+ bt_mean_all_wv73:
+ name: bt_mean_all_wv73
+ resolution: 32000
+ wavelength: [6.85, 7.35, 7.85]
+ file_type: nc_fci_asr
+ file_key: bt_mean
+ long_name: bt_mean_all
+ ir_channel_id: 2
+ category_id: 0
+ coordinates:
+ - longitude
+ - latitude
+
+ bt_mean_clear_wv73:
+ name: bt_mean_clear_wv73
+ resolution: 32000
+ wavelength: [6.85, 7.35, 7.85]
+ file_type: nc_fci_asr
+ file_key: bt_mean
+ long_name: bt_mean_clear
+ ir_channel_id: 2
+ category_id: 1
+ coordinates:
+ - longitude
+ - latitude
+
+ bt_mean_cloudy_wv73:
+ name: bt_mean_cloudy_wv73
+ resolution: 32000
+ wavelength: [6.85, 7.35, 7.85]
+ file_type: nc_fci_asr
+ file_key: bt_mean
+ long_name: bt_mean_cloudy
+ ir_channel_id: 2
+ category_id: 2
+ coordinates:
+ - longitude
+ - latitude
+
+ bt_mean_all_ir87:
+ name: bt_mean_all_ir87
+ resolution: 32000
+ wavelength: [8.3, 8.7, 9.1]
+ file_type: nc_fci_asr
+ file_key: bt_mean
+ long_name: bt_mean_all
+ ir_channel_id: 3
+ category_id: 0
+ coordinates:
+ - longitude
+ - latitude
+
+ bt_mean_clear_ir87:
+ name: bt_mean_clear_ir87
+ resolution: 32000
+ wavelength: [8.3, 8.7, 9.1]
+ file_type: nc_fci_asr
+ file_key: bt_mean
+ long_name: bt_mean_clear
+ ir_channel_id: 3
+ category_id: 1
+ coordinates:
+ - longitude
+ - latitude
+
+ bt_mean_cloudy_ir87:
+ name: bt_mean_cloudy_ir87
+ resolution: 32000
+ wavelength: [8.3, 8.7, 9.1]
+ file_type: nc_fci_asr
+ file_key: bt_mean
+ long_name: bt_mean_cloudy
+ ir_channel_id: 3
+ category_id: 2
+ coordinates:
+ - longitude
+ - latitude
+
+ bt_mean_all_ir97:
+ name: bt_mean_all_ir97
+ resolution: 32000
+ wavelength: [9.36, 9.66, 9.96]
+ file_type: nc_fci_asr
+ file_key: bt_mean
+ long_name: bt_mean_all
+ ir_channel_id: 4
+ category_id: 0
+ coordinates:
+ - longitude
+ - latitude
+
+ bt_mean_clear_ir97:
+ name: bt_mean_clear_ir97
+ resolution: 32000
+ wavelength: [9.36, 9.66, 9.96]
+ file_type: nc_fci_asr
+ file_key: bt_mean
+ long_name: bt_mean_clear
+ ir_channel_id: 4
+ category_id: 1
+ coordinates:
+ - longitude
+ - latitude
+
+ bt_mean_cloudy_ir97:
+ name: bt_mean_cloudy_ir97
+ resolution: 32000
+ wavelength: [9.36, 9.66, 9.96]
+ file_type: nc_fci_asr
+ file_key: bt_mean
+ long_name: bt_mean_cloudy
+ ir_channel_id: 4
+ category_id: 2
+ coordinates:
+ - longitude
+ - latitude
+
+ bt_mean_all_ir105:
+ name: bt_mean_all_ir105
+ resolution: 32000
+ wavelength: [9.8, 10.5, 11.2]
+ file_type: nc_fci_asr
+ file_key: bt_mean
+ long_name: bt_mean_all
+ ir_channel_id: 5
+ category_id: 0
+ coordinates:
+ - longitude
+ - latitude
+
+ bt_mean_clear_ir105:
+ name: bt_mean_clear_ir105
+ resolution: 32000
+ wavelength: [9.8, 10.5, 11.2]
+ file_type: nc_fci_asr
+ file_key: bt_mean
+ long_name: bt_mean_clear
+ ir_channel_id: 5
+ category_id: 1
+ coordinates:
+ - longitude
+ - latitude
+
+ bt_mean_cloudy_ir105:
+ name: bt_mean_cloudy_ir105
+ resolution: 32000
+ wavelength: [9.8, 10.5, 11.2]
+ file_type: nc_fci_asr
+ file_key: bt_mean
+ long_name: bt_mean_cloudy
+ ir_channel_id: 5
+ category_id: 2
+ coordinates:
+ - longitude
+ - latitude
+
+ bt_mean_all_ir123:
+ name: bt_mean_all_ir123
+ resolution: 32000
+ wavelength: [11.8, 12.3, 12.8]
+ file_type: nc_fci_asr
+ file_key: bt_mean
+ long_name: bt_mean_all
+ ir_channel_id: 6
+ category_id: 0
+ coordinates:
+ - longitude
+ - latitude
+
+ bt_mean_clear_ir123:
+ name: bt_mean_clear_ir123
+ resolution: 32000
+ wavelength: [11.8, 12.3, 12.8]
+ file_type: nc_fci_asr
+ file_key: bt_mean
+ long_name: bt_mean_clear
+ ir_channel_id: 6
+ category_id: 1
+ coordinates:
+ - longitude
+ - latitude
+
+ bt_mean_cloudy_ir123:
+ name: bt_mean_cloudy_ir123
+ resolution: 32000
+ wavelength: [11.8, 12.3, 12.8]
+ file_type: nc_fci_asr
+ file_key: bt_mean
+ long_name: bt_mean_cloudy
+ ir_channel_id: 6
+ category_id: 2
+ coordinates:
+ - longitude
+ - latitude
+
+ bt_mean_all_ir133:
+ name: bt_mean_all_ir133
+ resolution: 32000
+ wavelength: [12.7, 13.3, 13.9]
+ file_type: nc_fci_asr
+ file_key: bt_mean
+ long_name: bt_mean_all
+ ir_channel_id: 7
+ category_id: 0
+ coordinates:
+ - longitude
+ - latitude
+
+ bt_mean_clear_ir133:
+ name: bt_mean_clear_ir133
+ resolution: 32000
+ wavelength: [12.7, 13.3, 13.9]
+ file_type: nc_fci_asr
+ file_key: bt_mean
+ long_name: bt_mean_clear
+ ir_channel_id: 7
+ category_id: 1
+ coordinates:
+ - longitude
+ - latitude
+
+ bt_mean_cloudy_ir133:
+ name: bt_mean_cloudy_ir133
+ resolution: 32000
+ wavelength: [12.7, 13.3, 13.9]
+ file_type: nc_fci_asr
+ file_key: bt_mean
+ long_name: bt_mean_cloudy
+ ir_channel_id: 7
+ category_id: 2
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_reflectance_all_vis04:
+ name: quality_reflectance_all_vis04
+ resolution: 32000
+ wavelength: [0.384, 0.444, 0.504]
+ file_type: nc_fci_asr
+ file_key: quality_reflectance
+ long_name: quality_reflectance_all
+ vis_channel_id: 0
+ category_id: 0
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_reflectance_clear_vis04:
+ name: quality_reflectance_clear_vis04
+ resolution: 32000
+ wavelength: [0.384, 0.444, 0.504]
+ file_type: nc_fci_asr
+ file_key: quality_reflectance
+ long_name: quality_reflectance_clear
+ vis_channel_id: 0
+ category_id: 1
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_reflectance_cloudy_vis04:
+ name: quality_reflectance_cloudy_vis04
+ resolution: 32000
+ wavelength: [0.384, 0.444, 0.504]
+ file_type: nc_fci_asr
+ file_key: quality_reflectance
+ long_name: quality_reflectance_cloudy
+ vis_channel_id: 0
+ category_id: 2
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_reflectance_all_vis05:
+ name: quality_reflectance_all_vis05
+ resolution: 32000
+ wavelength: [0.47, 0.51, 0.55]
+ file_type: nc_fci_asr
+ file_key: quality_reflectance
+ long_name: quality_reflectance_all
+ vis_channel_id: 1
+ category_id: 0
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_reflectance_clear_vis05:
+ name: quality_reflectance_clear_vis05
+ resolution: 32000
+ wavelength: [0.47, 0.51, 0.55]
+ file_type: nc_fci_asr
+ file_key: quality_reflectance
+ long_name: quality_reflectance_clear
+ vis_channel_id: 1
+ category_id: 1
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_reflectance_cloudy_vis05:
+ name: quality_reflectance_cloudy_vis05
+ resolution: 32000
+ wavelength: [0.47, 0.51, 0.55]
+ file_type: nc_fci_asr
+ file_key: quality_reflectance
+ long_name: quality_reflectance_cloudy
+ vis_channel_id: 1
+ category_id: 2
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_reflectance_all_vis06:
+ name: quality_reflectance_all_vis06
+ resolution: 32000
+ wavelength: [0.59, 0.64, 0.69]
+ file_type: nc_fci_asr
+ file_key: quality_reflectance
+ long_name: quality_reflectance_all
+ vis_channel_id: 2
+ category_id: 0
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_reflectance_clear_vis06:
+ name: quality_reflectance_clear_vis06
+ resolution: 32000
+ wavelength: [0.59, 0.64, 0.69]
+ file_type: nc_fci_asr
+ file_key: quality_reflectance
+ long_name: quality_reflectance_clear
+ vis_channel_id: 2
+ category_id: 1
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_reflectance_cloudy_vis06:
+ name: quality_reflectance_cloudy_vis06
+ resolution: 32000
+ wavelength: [0.59, 0.64, 0.69]
+ file_type: nc_fci_asr
+ file_key: quality_reflectance
+ long_name: quality_reflectance_cloudy
+ vis_channel_id: 2
+ category_id: 2
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_reflectance_all_vis08:
+ name: quality_reflectance_all_vis08
+ resolution: 32000
+ wavelength: [0.815, 0.865, 0.915]
+ file_type: nc_fci_asr
+ file_key: quality_reflectance
+ long_name: quality_reflectance_all
+ vis_channel_id: 3
+ category_id: 0
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_reflectance_clear_vis08:
+ name: quality_reflectance_clear_vis08
+ resolution: 32000
+ wavelength: [0.815, 0.865, 0.915]
+ file_type: nc_fci_asr
+ file_key: quality_reflectance
+ long_name: quality_reflectance_clear
+ vis_channel_id: 3
+ category_id: 1
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_reflectance_cloudy_vis08:
+ name: quality_reflectance_cloudy_vis08
+ resolution: 32000
+ wavelength: [0.815, 0.865, 0.915]
+ file_type: nc_fci_asr
+ file_key: quality_reflectance
+ long_name: quality_reflectance_cloudy
+ vis_channel_id: 3
+ category_id: 2
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_reflectance_all_vis09:
+ name: quality_reflectance_all_vis09
+ resolution: 32000
+ wavelength: [0.894, 0.914, 0.934]
+ file_type: nc_fci_asr
+ file_key: quality_reflectance
+ long_name: quality_reflectance_all
+ vis_channel_id: 4
+ category_id: 0
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_reflectance_clear_vis09:
+ name: quality_reflectance_clear_vis09
+ resolution: 32000
+ wavelength: [0.894, 0.914, 0.934]
+ file_type: nc_fci_asr
+ file_key: quality_reflectance
+ long_name: quality_reflectance_clear
+ vis_channel_id: 4
+ category_id: 1
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_reflectance_cloudy_vis09:
+ name: quality_reflectance_cloudy_vis09
+ resolution: 32000
+ wavelength: [0.894, 0.914, 0.934]
+ file_type: nc_fci_asr
+ file_key: quality_reflectance
+ long_name: quality_reflectance_cloudy
+ vis_channel_id: 4
+ category_id: 2
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_reflectance_all_nir13:
+ name: quality_reflectance_all_nir13
+ resolution: 32000
+ wavelength: [1.35, 1.38, 1.41]
+ file_type: nc_fci_asr
+ file_key: quality_reflectance
+ long_name: quality_reflectance_all
+ vis_channel_id: 5
+ category_id: 0
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_reflectance_clear_nir13:
+ name: quality_reflectance_clear_nir13
+ resolution: 32000
+ wavelength: [1.35, 1.38, 1.41]
+ file_type: nc_fci_asr
+ file_key: quality_reflectance
+ long_name: quality_reflectance_clear
+ vis_channel_id: 5
+ category_id: 1
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_reflectance_cloudy_nir13:
+ name: quality_reflectance_cloudy_nir13
+ resolution: 32000
+ wavelength: [1.35, 1.38, 1.41]
+ file_type: nc_fci_asr
+ file_key: quality_reflectance
+ long_name: quality_reflectance_cloudy
+ vis_channel_id: 5
+ category_id: 2
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_reflectance_all_nir16:
+ name: quality_reflectance_all_nir16
+ resolution: 32000
+ wavelength: [1.56, 1.61, 1.66]
+ file_type: nc_fci_asr
+ file_key: quality_reflectance
+ long_name: quality_reflectance_all
+ vis_channel_id: 6
+ category_id: 0
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_reflectance_clear_nir16:
+ name: quality_reflectance_clear_nir16
+ resolution: 32000
+ wavelength: [1.56, 1.61, 1.66]
+ file_type: nc_fci_asr
+ file_key: quality_reflectance
+ long_name: quality_reflectance_clear
+ vis_channel_id: 6
+ category_id: 1
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_reflectance_cloudy_nir16:
+ name: quality_reflectance_cloudy_nir16
+ resolution: 32000
+ wavelength: [1.56, 1.61, 1.66]
+ file_type: nc_fci_asr
+ file_key: quality_reflectance
+ long_name: quality_reflectance_cloudy
+ vis_channel_id: 6
+ category_id: 2
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_reflectance_all_nir22:
+ name: quality_reflectance_all_nir22
+ resolution: 32000
+ wavelength: [2.2, 2.25, 2.3]
+ file_type: nc_fci_asr
+ file_key: quality_reflectance
+ long_name: quality_reflectance_all
+ vis_channel_id: 7
+ category_id: 0
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_reflectance_clear_nir22:
+ name: quality_reflectance_clear_nir22
+ resolution: 32000
+ wavelength: [2.2, 2.25, 2.3]
+ file_type: nc_fci_asr
+ file_key: quality_reflectance
+ long_name: quality_reflectance_clear
+ vis_channel_id: 7
+ category_id: 1
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_reflectance_cloudy_nir22:
+ name: quality_reflectance_cloudy_nir22
+ resolution: 32000
+ wavelength: [2.2, 2.25, 2.3]
+ file_type: nc_fci_asr
+ file_key: quality_reflectance
+ long_name: quality_reflectance_cloudy
+ vis_channel_id: 7
+ category_id: 2
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_bt_all_ir38:
+ name: quality_bt_all_ir38
+ resolution: 32000
+ wavelength: [3.4, 3.8, 4.2]
+ file_type: nc_fci_asr
+ file_key: quality_bt
+ long_name: quality_bt_all
+ ir_channel_id: 0
+ category_id: 0
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_bt_clear_ir38:
+ name: quality_bt_clear_ir38
+ resolution: 32000
+ wavelength: [3.4, 3.8, 4.2]
+ file_type: nc_fci_asr
+ file_key: quality_bt
+ long_name: quality_bt_clear
+ ir_channel_id: 0
+ category_id: 1
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_bt_cloudy_ir38:
+ name: quality_bt_cloudy_ir38
+ resolution: 32000
+ wavelength: [3.4, 3.8, 4.2]
+ file_type: nc_fci_asr
+ file_key: quality_bt
+ long_name: quality_bt_cloudy
+ ir_channel_id: 0
+ category_id: 2
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_bt_all_wv63:
+ name: quality_bt_all_wv63
+ resolution: 32000
+ wavelength: [5.3, 6.3, 7.3]
+ file_type: nc_fci_asr
+ file_key: quality_bt
+ long_name: quality_bt_all
+ ir_channel_id: 1
+ category_id: 0
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_bt_clear_wv63:
+ name: quality_bt_clear_wv63
+ resolution: 32000
+ wavelength: [5.3, 6.3, 7.3]
+ file_type: nc_fci_asr
+ file_key: quality_bt
+ long_name: quality_bt_clear
+ ir_channel_id: 1
+ category_id: 1
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_bt_cloudy_wv63:
+ name: quality_bt_cloudy_wv63
+ resolution: 32000
+ wavelength: [5.3, 6.3, 7.3]
+ file_type: nc_fci_asr
+ file_key: quality_bt
+ long_name: quality_bt_cloudy
+ ir_channel_id: 1
+ category_id: 2
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_bt_all_wv73:
+ name: quality_bt_all_wv73
+ resolution: 32000
+ wavelength: [6.85, 7.35, 7.85]
+ file_type: nc_fci_asr
+ file_key: quality_bt
+ long_name: quality_bt_all
+ ir_channel_id: 2
+ category_id: 0
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_bt_clear_wv73:
+ name: quality_bt_clear_wv73
+ resolution: 32000
+ wavelength: [6.85, 7.35, 7.85]
+ file_type: nc_fci_asr
+ file_key: quality_bt
+ long_name: quality_bt_clear
+ ir_channel_id: 2
+ category_id: 1
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_bt_cloudy_wv73:
+ name: quality_bt_cloudy_wv73
+ resolution: 32000
+ wavelength: [6.85, 7.35, 7.85]
+ file_type: nc_fci_asr
+ file_key: quality_bt
+ long_name: quality_bt_cloudy
+ ir_channel_id: 2
+ category_id: 2
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_bt_all_ir87:
+ name: quality_bt_all_ir87
+ resolution: 32000
+ wavelength: [8.3, 8.7, 9.1]
+ file_type: nc_fci_asr
+ file_key: quality_bt
+ long_name: quality_bt_all
+ ir_channel_id: 3
+ category_id: 0
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_bt_clear_ir87:
+ name: quality_bt_clear_ir87
+ resolution: 32000
+ wavelength: [8.3, 8.7, 9.1]
+ file_type: nc_fci_asr
+ file_key: quality_bt
+ long_name: quality_bt_clear
+ ir_channel_id: 3
+ category_id: 1
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_bt_cloudy_ir87:
+ name: quality_bt_cloudy_ir87
+ resolution: 32000
+ wavelength: [8.3, 8.7, 9.1]
+ file_type: nc_fci_asr
+ file_key: quality_bt
+ long_name: quality_bt_cloudy
+ ir_channel_id: 3
+ category_id: 2
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_bt_all_ir97:
+ name: quality_bt_all_ir97
+ resolution: 32000
+ wavelength: [9.36, 9.66, 9.96]
+ file_type: nc_fci_asr
+ file_key: quality_bt
+ long_name: quality_bt_all
+ ir_channel_id: 4
+ category_id: 0
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_bt_clear_ir97:
+ name: quality_bt_clear_ir97
+ resolution: 32000
+ wavelength: [9.36, 9.66, 9.96]
+ file_type: nc_fci_asr
+ file_key: quality_bt
+ long_name: quality_bt_clear
+ ir_channel_id: 4
+ category_id: 1
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_bt_cloudy_ir97:
+ name: quality_bt_cloudy_ir97
+ resolution: 32000
+ wavelength: [9.36, 9.66, 9.96]
+ file_type: nc_fci_asr
+ file_key: quality_bt
+ long_name: quality_bt_cloudy
+ ir_channel_id: 4
+ category_id: 2
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_bt_all_ir105:
+ name: quality_bt_all_ir105
+ resolution: 32000
+ wavelength: [9.8, 10.5, 11.2]
+ file_type: nc_fci_asr
+ file_key: quality_bt
+ long_name: quality_bt_all
+ ir_channel_id: 5
+ category_id: 0
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_bt_clear_ir105:
+ name: quality_bt_clear_ir105
+ resolution: 32000
+ wavelength: [9.8, 10.5, 11.2]
+ file_type: nc_fci_asr
+ file_key: quality_bt
+ long_name: quality_bt_clear
+ ir_channel_id: 5
+ category_id: 1
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_bt_cloudy_ir105:
+ name: quality_bt_cloudy_ir105
+ resolution: 32000
+ wavelength: [9.8, 10.5, 11.2]
+ file_type: nc_fci_asr
+ file_key: quality_bt
+ long_name: quality_bt_cloudy
+ ir_channel_id: 5
+ category_id: 2
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_bt_all_ir123:
+ name: quality_bt_all_ir123
+ resolution: 32000
+ wavelength: [11.8, 12.3, 12.8]
+ file_type: nc_fci_asr
+ file_key: quality_bt
+ long_name: quality_bt_all
+ ir_channel_id: 6
+ category_id: 0
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_bt_clear_ir123:
+ name: quality_bt_clear_ir123
+ resolution: 32000
+ wavelength: [11.8, 12.3, 12.8]
+ file_type: nc_fci_asr
+ file_key: quality_bt
+ long_name: quality_bt_clear
+ ir_channel_id: 6
+ category_id: 1
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_bt_cloudy_ir123:
+ name: quality_bt_cloudy_ir123
+ resolution: 32000
+ wavelength: [11.8, 12.3, 12.8]
+ file_type: nc_fci_asr
+ file_key: quality_bt
+ long_name: quality_bt_cloudy
+ ir_channel_id: 6
+ category_id: 2
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_bt_all_ir133:
+ name: quality_bt_all_ir133
+ resolution: 32000
+ wavelength: [12.7, 13.3, 13.9]
+ file_type: nc_fci_asr
+ file_key: quality_bt
+ long_name: quality_bt_all
+ ir_channel_id: 7
+ category_id: 0
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_bt_clear_ir133:
+ name: quality_bt_clear_ir133
+ resolution: 32000
+ wavelength: [12.7, 13.3, 13.9]
+ file_type: nc_fci_asr
+ file_key: quality_bt
+ long_name: quality_bt_clear
+ ir_channel_id: 7
+ category_id: 1
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ quality_bt_cloudy_ir133:
+ name: quality_bt_cloudy_ir133
+ resolution: 32000
+ wavelength: [12.7, 13.3, 13.9]
+ file_type: nc_fci_asr
+ file_key: quality_bt
+ long_name: quality_bt_cloudy
+ ir_channel_id: 7
+ category_id: 2
+ fill_value: -1
+ coordinates:
+ - longitude
+ - latitude
+
+ pixel_percentage_all:
+ name: pixel_percentage_all
+ resolution: 32000
+ file_type: nc_fci_asr
+ file_key: pixel_percentage
+ long_name: pixel_percentage_all
+ category_id: 0
+ coordinates:
+ - longitude
+ - latitude
+
+ pixel_percentage_clear:
+ name: pixel_percentage_clear
+ resolution: 32000
+ file_type: nc_fci_asr
+ file_key: pixel_percentage
+ long_name: pixel_percentage_clear
+ category_id: 1
+ coordinates:
+ - longitude
+ - latitude
+
+ pixel_percentage_cloudy:
+ name: pixel_percentage_cloudy
+ resolution: 32000
+ file_type: nc_fci_asr
+ file_key: pixel_percentage
+ long_name: pixel_percentage_cloudy
+ category_id: 2
+ coordinates:
+ - longitude
+ - latitude
+
+ product_quality_asr:
+ name: product_quality_asr
+ file_type: nc_fci_asr
+ file_key: product_quality
+ long_name: product_quality_index
+
+ product_completeness_asr:
+ name: product_completeness_asr
+ file_type: nc_fci_asr
+ file_key: product_completeness
+ long_name: product_completeness_index
+
+ product_timeliness_asr:
+ name: product_timeliness_asr
+ file_type: nc_fci_asr
+ file_key: product_timeliness
+ long_name: product_timeliness_index
diff --git a/satpy/etc/readers/generic_image.yaml b/satpy/etc/readers/generic_image.yaml
index 5da1bb3ff1..07d1bdeb50 100644
--- a/satpy/etc/readers/generic_image.yaml
+++ b/satpy/etc/readers/generic_image.yaml
@@ -1,6 +1,10 @@
reader:
name: generic_image
+ short_name: Generic Image
+ long_name: Generic Images e.g. GeoTIFF
description: generic image reader
+ status: Nominal
+ supports_fsspec: false
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
sensors: [images]
default_channels: [image]
diff --git a/satpy/etc/readers/geocat.yaml b/satpy/etc/readers/geocat.yaml
index 66e80b5711..81660da1bd 100644
--- a/satpy/etc/readers/geocat.yaml
+++ b/satpy/etc/readers/geocat.yaml
@@ -1,6 +1,10 @@
reader:
- description: CSPP Geo and GEOCAT file reader
name: geocat
+ short_name: CSPP Geo/GEOCAT
+ long_name: GEOstationary Cloud Algorithm Test-bed
+ description: CSPP Geo and GEOCAT file reader
+ status: Nominal
+ supports_fsspec: false
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
sensors: [abi, ahi, goes_imager]
@@ -234,4 +238,4 @@ datasets:
# radiance:
# standard_name: toa_outgoing_radiance_per_unit_wavelength
# units: W m-2 um-1 sr-1
- file_type: ahi_level1
\ No newline at end of file
+ file_type: ahi_level1
diff --git a/satpy/etc/readers/ghi_l1.yaml b/satpy/etc/readers/ghi_l1.yaml
new file mode 100644
index 0000000000..59c8f35f70
--- /dev/null
+++ b/satpy/etc/readers/ghi_l1.yaml
@@ -0,0 +1,185 @@
+# References:
+# - L1_SDR Data of FY4A Advanced Geostationary Radiation Imager
+# - http://fy4.nsmc.org.cn/data/en/data/realtime.html
+
+reader:
+ name: ghi_l1
+ description: FY-4A GHI instrument HDF5 reader
+ sensors: [ghi]
+ default_channels:
+ reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
+
+file_types:
+ ghi_l1_0250m:
+ file_reader: !!python/name:satpy.readers.ghi_l1.HDF_GHI_L1
+ file_patterns: ['{platform_id:4s}-_{instrument:3s}---_N_{observation_type:s}_{longitude:5s}_L1-_FDI-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_0250M_{version:s}.HDF']
+ ghi_l1_0500m:
+ file_reader: !!python/name:satpy.readers.ghi_l1.HDF_GHI_L1
+ file_patterns: ['{platform_id:4s}-_{instrument:3s}---_N_{observation_type:s}_{longitude:5s}_L1-_FDI-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_0500M_{version:s}.HDF']
+ ghi_l1_2000m:
+ file_reader: !!python/name:satpy.readers.ghi_l1.HDF_GHI_L1
+ file_patterns: ['{platform_id:4s}-_{instrument:3s}---_N_{observation_type:s}_{longitude:5s}_L1-_FDI-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_2000M_{version:s}.HDF']
+ ghi_l1_2000m_geo:
+ file_reader: !!python/name:satpy.readers.ghi_l1.HDF_GHI_L1
+ file_patterns: ['{platform_id:4s}-_{instrument:3s}---_N_{observation_type:s}_{longitude:5s}_L1-_GEO-_MULT_{projection:s}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_2000M_{version:s}.HDF']
+
+datasets:
+ C01:
+ name: C01
+ wavelength: [0.45, 0.675, 0.90]
+ resolution:
+ 250: {file_type: ghi_l1_0250m}
+ 500: {file_type: ghi_l1_0500m}
+ 2000: {file_type: ghi_l1_2000m}
+ calibration:
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ counts:
+ standard_name: counts
+ units: "1"
+ file_key: NOMChannel01
+ lut_key: CALChannel01
+
+ C02:
+ name: C02
+ wavelength: [0.445, 0.47, 0.495]
+ resolution:
+ 500: {file_type: ghi_l1_0500m}
+ 1000: {file_type: ghi_l1_1000m}
+ 2000: {file_type: ghi_l1_2000m}
+ calibration:
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ counts:
+ standard_name: counts
+ units: "1"
+ file_key: NOMChannel02
+ lut_key: CALChannel02
+
+ C03:
+ name: C03
+ wavelength: [0.52, 0.545, 0.57]
+ resolution:
+ 500: {file_type: ghi_l1_0500m}
+ 1000: {file_type: ghi_l1_1000m}
+ 2000: {file_type: ghi_l1_2000m}
+ calibration:
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ counts:
+ standard_name: counts
+ units: "1"
+ file_key: NOMChannel03
+ lut_key: CALChannel03
+
+ C04:
+ name: C04
+ wavelength: [0.62, 0.645, 0.67]
+ resolution:
+ 500: {file_type: ghi_l1_0500m}
+ 1000: {file_type: ghi_l1_1000m}
+ 2000: {file_type: ghi_l1_2000m}
+ calibration:
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ counts:
+ standard_name: counts
+ units: "1"
+ file_key: NOMChannel04
+ lut_key: CALChannel04
+
+ C05:
+ name: C05
+ wavelength: [1.371, 1.378, 1.386]
+ resolution:
+ 500: {file_type: ghi_l1_0500m}
+ 1000: {file_type: ghi_l1_1000m}
+ 2000: {file_type: ghi_l1_2000m}
+ calibration:
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ counts:
+ standard_name: counts
+ units: "1"
+ file_key: NOMChannel05
+ lut_key: CALChannel05
+
+ C06:
+ name: C06
+ wavelength: [1.58, 1.61, 1.64]
+ resolution:
+ 500: {file_type: ghi_l1_0500m}
+ 1000: {file_type: ghi_l1_1000m}
+ 2000: {file_type: ghi_l1_2000m}
+ calibration:
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ counts:
+ standard_name: counts
+ units: "1"
+ file_key: NOMChannel06
+ lut_key: CALChannel06
+
+ C07:
+ name: C07
+ wavelength: [10.3, 11.4, 12.5]
+ resolution:
+ 2000: {file_type: ghi_l1_2000m}
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: "mW/ (m2 cm-1 sr)"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: "K"
+ counts:
+ standard_name: counts
+ units: "1"
+ file_key: NOMChannel07
+ lut_key: CALChannel07
+
+ solar_zenith_angle:
+ name: solar_zenith_angle
+ units: degree
+ standard_name: solar_zenith_angle
+ resolution: 2000
+ file_type: ghi_l1_2000m_geo
+ file_key: NOMSunZenith
+
+ solar_azimuth_angle:
+ name: solar_azimuth_angle
+ units: degree
+ standard_name: solar_azimuth_angle
+ resolution: 2000
+ file_type: ghi_l1_2000m_geo
+ file_key: NOMSunAzimuth
+
+ solar_glint_angle:
+ name: solar_glint_angle
+ units: degree
+ standard_name: solar_glint_angle
+ resolution: 2000
+ file_type: ghi_l1_2000m_geo
+ file_key: NOMSunGlintAngle
+
+ satellite_zenith_angle:
+ name: satellite_zenith_angle
+ units: degree
+ standard_name: satellite_zenith_angle
+ resolution: 2000
+ file_type: ghi_l1_2000m_geo
+ file_key: NOMSatelliteZenith
+
+ satellite_azimuth_angle:
+ name: satellite_azimuth_angle
+ units: degree
+ standard_name: satellite_azimuth_angle
+ resolution: 2000
+ file_type: ghi_l1_2000m_geo
+ file_key: NOMSatelliteAzimuth
diff --git a/satpy/etc/readers/ghrsst_l2.yaml b/satpy/etc/readers/ghrsst_l2.yaml
new file mode 100644
index 0000000000..bdab881dbf
--- /dev/null
+++ b/satpy/etc/readers/ghrsst_l2.yaml
@@ -0,0 +1,101 @@
+reader:
+ name: ghrsst_l2
+ short_name: GHRSST l2
+ long_name: Sentinel-3 SLSTR SST data in netCDF4 format
+ description: NC Reader for GHRSST Level 2 data
+ status: Beta
+ supports_fsspec: false
+ sensors: ['slstr', 'avhrr/3', 'viirs']
+ default_channels: []
+ reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
+
+file_types:
+ GHRSST_OSISAF:
+ file_reader: !!python/name:satpy.readers.ghrsst_l2.GHRSSTL2FileHandler
+ # S-OSI_-FRA_-NPP_-NARSST_FIELD-202010141300Z.nc
+ file_patterns: ['S-OSI_-{generating_centre:4s}-{satid:s}-{field_type:s}_FIELD-{valid_time:%Y%m%d%H%M}Z.nc']
+
+ SLSTR:
+ file_reader: !!python/name:satpy.readers.ghrsst_l2.GHRSSTL2FileHandler
+ file_patterns: ['{dt1:%Y%m%d%H%M%S}-{generating_centre:3s}-{type_id:3s}_GHRSST-SSTskin-SLSTR{something:1s}-{dt2:%Y%m%d%H%M%S}-{version}.nc',
+ '{mission_id:3s}_SL_{processing_level:1s}_WST____{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4s}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3.tar']
+
+datasets:
+ # SLSTR SST and Sea Ice products
+ longitude_slstr:
+ name: longitude_slstr
+ resolution: 1000
+ view: nadir
+ file_type: SLSTR
+ standard_name: lon
+ units: degree
+
+ latitude_slstr:
+ name: latitude_slstr
+ resolution: 1000
+ view: nadir
+ file_type: SLSTR
+ standard_name: lat
+ units: degree
+
+ sea_surface_temperature_slstr:
+ name: sea_surface_temperature
+ sensor: slstr
+ coordinates: [longitude_slstr, latitude_slstr]
+ file_type: SLSTR
+ resolution: 1000
+ view: nadir
+ units: kelvin
+ standard_name: sea_surface_temperature
+
+ sea_ice_fraction_slstr:
+ name: sea_ice_fraction
+ sensor: slstr
+ coordinates: [longitude_slstr, latitude_slstr]
+ file_type: SLSTR
+ resolution: 1000
+ view: nadir
+ units: "%"
+ standard_name: sea_ice_fraction
+
+ # Quality estimation 0-5: no data, cloud, worst, low, acceptable, best
+ quality_level_slstr:
+ name: quality_level
+ sensor: slstr
+ coordinates: [longitude_slstr, latitude_slstr]
+ file_type: SLSTR
+ resolution: 1000
+ view: nadir
+ standard_name: quality_level
+
+
+ # OSISAF SST:
+ longitude_osisaf:
+ name: longitude_osisaf
+ resolution: 2000
+ file_type: GHRSST_OSISAF
+ standard_name: lon
+ units: degree
+
+ latitude_osisaf:
+ name: latitude_osisaf
+ resolution: 2000
+ file_type: GHRSST_OSISAF
+ standard_name: lat
+ units: degree
+
+ sea_surface_temperature_osisaf:
+ name: sea_surface_temperature
+ coordinates: [longitude_osisaf, latitude_osisaf]
+ file_type: GHRSST_OSISAF
+ resolution: 2000
+ units: kelvin
+ standard_name: sea_surface_temperature
+
+ sea_ice_fraction_osisaf:
+ name: sea_ice_fraction
+ coordinates: [longitude_osisaf, latitude_osisaf]
+ file_type: GHRSST_OSISAF
+ resolution: 2000
+ units: "%"
+ standard_name: sea_ice_fraction
diff --git a/satpy/etc/readers/ghrsst_l3c_sst.yaml b/satpy/etc/readers/ghrsst_l3c_sst.yaml
deleted file mode 100644
index fd3ada064f..0000000000
--- a/satpy/etc/readers/ghrsst_l3c_sst.yaml
+++ /dev/null
@@ -1,17 +0,0 @@
-reader:
- description: OSISAF SST GHRSST netCDF reader
- name: ghrsst_l3c_sst
- reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
- sensors: [avhrr/3, viirs]
-
-datasets:
-
- sea_surface_temperature:
- name: sea_surface_temperature
- file_type: ghrsst_osisaf_l2
- resolution: 1000
-
-file_types:
- ghrsst_osisaf_l2:
- file_reader: !!python/name:satpy.readers.ghrsst_l3c_sst.GHRSST_OSISAFL2
- file_patterns: ['S-OSI_-FRA_-{satid:3s}_-NARSST_FIELD-{start_time:%Y%m%d%H00}Z.nc']
diff --git a/satpy/etc/readers/glm_l2.yaml b/satpy/etc/readers/glm_l2.yaml
index 991c7d28f3..8fcd522951 100644
--- a/satpy/etc/readers/glm_l2.yaml
+++ b/satpy/etc/readers/glm_l2.yaml
@@ -6,6 +6,8 @@ reader:
NetCDF4 reader for GOES-R series GLM data. Currently only gridded L2 files
output from `gltmtools `_ are
supported.
+ status: Beta
+ supports_fsspec: false
sensors: [glm]
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
# file pattern keys to sort files by with 'satpy.utils.group_files'
diff --git a/satpy/etc/readers/goes-imager_hrit.yaml b/satpy/etc/readers/goes-imager_hrit.yaml
index 9b2e39cc4b..10ea78cc78 100644
--- a/satpy/etc/readers/goes-imager_hrit.yaml
+++ b/satpy/etc/readers/goes-imager_hrit.yaml
@@ -3,6 +3,8 @@ reader:
short_name: GOES Imager HRIT
long_name: GOES Imager Level 1 (HRIT)
description: Reader for GOES Imager Level 1 data in HRIT format
+ status: Nominal
+ supports_fsspec: false
sensors: [goes_imager]
default_channels: [00_7, 03_9, 06_6, 10_7]
reader: !!python/name:satpy.readers.yaml_reader.GEOSegmentYAMLReader
diff --git a/satpy/etc/readers/goes-imager_nc.yaml b/satpy/etc/readers/goes-imager_nc.yaml
index 0ce3458079..877f071121 100644
--- a/satpy/etc/readers/goes-imager_nc.yaml
+++ b/satpy/etc/readers/goes-imager_nc.yaml
@@ -9,6 +9,8 @@ reader:
- GOES 8-12: https://goes.gsfc.nasa.gov/text/databook/databook.pdf, page 20 ff.
- GOES 13-15: https://goes.gsfc.nasa.gov/text/GOES-N_Databook/databook.pdf, chapter 3.
+ status: Beta
+ supports_fsspec: false
sensors: [goes_imager]
default_channels: []
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
diff --git a/satpy/etc/readers/gpm_imerg.yaml b/satpy/etc/readers/gpm_imerg.yaml
index 349b31005c..b8f2c3872d 100644
--- a/satpy/etc/readers/gpm_imerg.yaml
+++ b/satpy/etc/readers/gpm_imerg.yaml
@@ -1,6 +1,10 @@
reader:
- description: HDF5 reader for the GPM/IMERG data
name: gpm_imerg
+ short_name: GPM/IMERG l3
+ long_name: GPM IMERG level 3 precipitation data in HDF5 format
+ description: HDF5 reader for the GPM/IMERG data
+ status: Nominal
+ supports_fsspec: false
sensors: [multiple]
default_channels: []
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
diff --git a/satpy/etc/readers/grib.yaml b/satpy/etc/readers/grib.yaml
index e1aa019591..c53b9dfb24 100644
--- a/satpy/etc/readers/grib.yaml
+++ b/satpy/etc/readers/grib.yaml
@@ -1,6 +1,10 @@
reader:
- description: GRIB2 file reader
name: grib
+ short_name: GRIB2
+ long_name: GRIB2 format
+ description: GRIB2 file reader
+ status: Beta
+ supports_fsspec: false
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
sensors: [unknown]
data_identification_keys:
diff --git a/satpy/etc/readers/hsaf_grib.yaml b/satpy/etc/readers/hsaf_grib.yaml
index 8ade717651..e77fad101c 100644
--- a/satpy/etc/readers/hsaf_grib.yaml
+++ b/satpy/etc/readers/hsaf_grib.yaml
@@ -1,6 +1,10 @@
reader:
- description: Reader for Hydrology SAF products
name: hsaf_grib
+ short_name: Hydrology SAF
+ long_name: Hydrology SAF products in GRIB format
+ description: Reader for Hydrology SAF products
+ status: Beta, only h03, h03b, h05 and h05b currently supported
+ supports_fsspec: false
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
sensors: [hsaf]
diff --git a/satpy/etc/readers/hy2_scat_l2b_h5.yaml b/satpy/etc/readers/hy2_scat_l2b_h5.yaml
index 85377416d3..3648939b38 100644
--- a/satpy/etc/readers/hy2_scat_l2b_h5.yaml
+++ b/satpy/etc/readers/hy2_scat_l2b_h5.yaml
@@ -1,6 +1,10 @@
reader:
- description: Generic Eumetsat HY2 L2B H5 Wind field Reader
name: hy2_scat_l2b_h5
+ short_name: Eumetsat Wind field
+ long_name: HY-2B Scatterometer level 2b data in HDF5 format from both EUMETSAT and NSOAS
+ description: Generic Eumetsat HY2 L2B H5 Wind field Reader
+ status: Beta
+ supports_fsspec: false
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
sensors: [scatterometer]
default_datasets:
@@ -8,7 +12,9 @@ reader:
file_types:
hy2_scat_l2b_h5:
file_reader: !!python/name:satpy.readers.hy2_scat_l2b_h5.HY2SCATL2BH5FileHandler
- file_patterns: ['W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,{platform_name}+SM_C_EUMP_{start_date:%Y%m%d------}_{orbit_number}_o_250_{product_level}.h5']
+ file_patterns:
+ - 'W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,{platform_name}+SM_C_EUMP_{start_date:%Y%m%d------}_{orbit_number}_o_250_{product_level}.h5'
+ - '{platform_name}_OPER_SCA_{product_level}_OR_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{orbit_number}_pwp_250_07_owv.h5'
datasets:
wvc_lon:
diff --git a/satpy/etc/readers/iasi_l2.yaml b/satpy/etc/readers/iasi_l2.yaml
index b409262c37..22c81b4c7d 100644
--- a/satpy/etc/readers/iasi_l2.yaml
+++ b/satpy/etc/readers/iasi_l2.yaml
@@ -1,6 +1,10 @@
reader:
- description: Reader for IASI L2 files
name: iasi_l2
+ short_name: IASI l2
+ long_name: IASI Level 2 data in HDF5 format
+ description: Reader for IASI L2 files
+ status: Alpha
+ supports_fsspec: false
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
sensors: [iasi]
default_datasets:
diff --git a/satpy/etc/readers/iasi_l2_so2_bufr.yaml b/satpy/etc/readers/iasi_l2_so2_bufr.yaml
index db8bf61169..111e46d93e 100644
--- a/satpy/etc/readers/iasi_l2_so2_bufr.yaml
+++ b/satpy/etc/readers/iasi_l2_so2_bufr.yaml
@@ -1,9 +1,11 @@
reader:
name: iasi_l2_so2_bufr
short_name: IASI L2 SO2 BUFR
- long_name: METOP IASI Level 2 SO2 BUFR
+ long_name: METOP IASI Level 2 SO2 in BUFR format
description: >
Reader for IASI L2 files
+ status: Beta
+ supports_fsspec: false
sensors: [iasi]
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
default_datasets:
@@ -284,10 +286,3 @@ datasets:
coordinates: [longitude, latitude]
key: '#1#brightnessTemperatureRealPart'
fill_value: -1.e+100
-
-
-
-
-
-
-
diff --git a/satpy/etc/readers/ici_l1b_nc.yaml b/satpy/etc/readers/ici_l1b_nc.yaml
new file mode 100644
index 0000000000..cbf6c9f993
--- /dev/null
+++ b/satpy/etc/readers/ici_l1b_nc.yaml
@@ -0,0 +1,671 @@
+reader:
+ name: ici_l1b_nc
+ short_name: ICI L1B RAD NetCDF4
+ long_name: EPS-SG ICI L1B Radiance (NetCDF4)
+ description: >
+ Reader for EUMETSAT EPS-SG Ice Cloud Imager Level 1B Radiance files in NetCDF4.
+ status: Beta
+ sensors: [ici]
+ reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
+
+ data_identification_keys:
+ name:
+ required: true
+ frequency_double_sideband:
+ type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyDoubleSideBand
+ polarization:
+ enum:
+ - H
+ - V
+ calibration:
+ enum:
+ - brightness_temperature
+ - radiance
+ transitive: true
+
+file_types:
+ # EUMETSAT EPSG-SG Ice Cloud Imager Level 1B Radiance files in NetCDF4 format
+ nc_ici_l1b_rad:
+ file_reader: !!python/name:satpy.readers.ici_l1b_nc.IciL1bNCFileHandler
+ file_patterns: ['W_XX-EUMETSAT-Darmstadt,SAT,{spacecraft_name:s}-ICI-1B-RAD_C_EUMT_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc']
+ longitude: data/navigation_data/longitude
+ latitude: data/navigation_data/latitude
+ observation_zenith: data/navigation_data/ici_oza
+ observation_azimuth: data/navigation_data/ici_azimuth
+ solar_zenith: data/navigation_data/ici_solar_zenith_angle
+ solar_azimuth: data/navigation_data/ici_solar_azimuth_angle
+ orthorect: True
+
+datasets:
+
+# --- Coordinates ---
+ lon_pixels_horn_1:
+ name: lon_pixels_horn_1
+ file_type: nc_ici_l1b_rad
+ file_key: longitude
+ orthorect_data: data/navigation_data/delta_longitude
+ standard_name: longitude
+ n_horns: 0
+
+ lat_pixels_horn_1:
+ name: lat_pixels_horn_1
+ file_type: nc_ici_l1b_rad
+ file_key: latitude
+ orthorect_data: data/navigation_data/delta_latitude
+ standard_name: latitude
+ n_horns: 0
+
+ lon_pixels_horn_2:
+ name: lon_pixels_horn_2
+ file_type: nc_ici_l1b_rad
+ file_key: longitude
+ orthorect_data: data/navigation_data/delta_longitude
+ standard_name: longitude
+ n_horns: 1
+
+ lat_pixels_horn_2:
+ name: lat_pixels_horn_2
+ file_type: nc_ici_l1b_rad
+ file_key: latitude
+ orthorect_data: data/navigation_data/delta_latitude
+ standard_name: latitude
+ n_horns: 1
+
+ lon_pixels_horn_3:
+ name: lon_pixels_horn_3
+ file_type: nc_ici_l1b_rad
+ file_key: longitude
+ orthorect_data: data/navigation_data/delta_longitude
+ standard_name: longitude
+ n_horns: 2
+
+ lat_pixels_horn_3:
+ name: lat_pixels_horn_3
+ file_type: nc_ici_l1b_rad
+ file_key: latitude
+ orthorect_data: data/navigation_data/delta_latitude
+ standard_name: latitude
+ n_horns: 3
+
+ lon_pixels_horn_4:
+ name: lon_pixels_horn_4
+ file_type: nc_ici_l1b_rad
+ file_key: longitude
+ orthorect_data: data/navigation_data/delta_longitude
+ standard_name: longitude
+ n_horns: 3
+
+ lat_pixels_horn_4:
+ name: lat_pixels_horn_4
+ file_type: nc_ici_l1b_rad
+ file_key: latitude
+ orthorect_data: data/navigation_data/delta_latitude
+ standard_name: latitude
+ n_horns: 3
+
+ lon_pixels_horn_5:
+ name: lon_pixels_horn_5
+ file_type: nc_ici_l1b_rad
+ file_key: longitude
+ orthorect_data: data/navigation_data/delta_longitude
+ standard_name: longitude
+ n_horns: 4
+
+ lat_pixels_horn_5:
+ name: lat_pixels_horn_5
+ file_type: nc_ici_l1b_rad
+ file_key: latitude
+ orthorect_data: data/navigation_data/delta_latitude
+ standard_name: latitude
+ n_horns: 4
+
+ lon_pixels_horn_6:
+ name: lon_pixels_horn_6
+ file_type: nc_ici_l1b_rad
+ file_key: longitude
+ orthorect_data: data/navigation_data/delta_longitude
+ standard_name: longitude
+ n_horns: 5
+
+ lat_pixels_horn_6:
+ name: lat_pixels_horn_6
+ file_type: nc_ici_l1b_rad
+ file_key: latitude
+ orthorect_data: data/navigation_data/delta_latitude
+ standard_name: latitude
+ n_horns: 5
+
+ lon_pixels_horn_7:
+ name: lon_pixels_horn_7
+ file_type: nc_ici_l1b_rad
+ file_key: longitude
+ orthorect_data: data/navigation_data/delta_longitude
+ standard_name: longitude
+ n_horns: 6
+
+ lat_pixels_horn_7:
+ name: lat_pixels_horn_7
+ file_type: nc_ici_l1b_rad
+ file_key: latitude
+ orthorect_data: data/navigation_data/delta_latitude
+ standard_name: latitude
+ n_horns: 6
+
+ longitude_ssp:
+ name: longitude_ssp
+ file_type: nc_ici_l1b_rad
+ file_key: data/navigation_data/longitude_ssp
+ standard_name: longitude
+
+ latitude_ssp:
+ name: latitude_ssp
+ file_type: nc_ici_l1b_rad
+ file_key: data/navigation_data/latitude_ssp
+ standard_name: latitude
+
+# --- Measurement data ---
+ '1':
+ name: '1'
+ file_type: nc_ici_l1b_rad
+ file_key: data/measurement_data/ici_radiance_183
+ coordinates: [lon_pixels_horn_1, lat_pixels_horn_1]
+ n_183: 0
+ chan_index: 0
+ frequency_double_sideband:
+ central: 183.31
+ side: 7.0
+ bandwidth: 2.0
+ unit: GHz
+ polarization: V
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: "mWm^-2sr^-1(cm^-1)^-1"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+ '2':
+ name: '2'
+ file_type: nc_ici_l1b_rad
+ file_key: data/measurement_data/ici_radiance_183
+ coordinates: [lon_pixels_horn_1, lat_pixels_horn_1]
+ n_183: 1
+ chan_index: 1
+ frequency_double_sideband:
+ central: 183.31
+ side: 3.4
+ bandwidth: 1.5
+ unit: GHz
+ polarization: V
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: "mWm^-2sr^-1(cm^-1)^-1"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+ '3':
+ name: '3'
+ file_type: nc_ici_l1b_rad
+ file_key: data/measurement_data/ici_radiance_183
+ coordinates: [lon_pixels_horn_1, lat_pixels_horn_1]
+ n_183: 2
+ chan_index: 2
+ frequency_double_sideband:
+ central: 183.31
+ side: 2.0
+ bandwidth: 1.5
+ unit: GHz
+ polarization: V
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: "mWm^-2sr^-1(cm^-1)^-1"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+ '4':
+ name: '4'
+ file_type: nc_ici_l1b_rad
+ file_key: data/measurement_data/ici_radiance_243
+ coordinates: [lon_pixels_horn_2, lat_pixels_horn_2]
+ n_243: 0
+ chan_index: 3
+ frequency_double_sideband:
+ central: 243.2
+ side: 2.5
+ bandwidth: 3.0
+ unit: GHz
+ polarization: V
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: "mWm^-2sr^-1(cm^-1)^-1"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+ '5':
+ name: '5'
+ file_type: nc_ici_l1b_rad
+ file_key: data/measurement_data/ici_radiance_243
+ coordinates: [lon_pixels_horn_3, lat_pixels_horn_3]
+ n_243: 1
+ chan_index: 4
+ frequency_double_sideband:
+ central: 243.2
+ side: 2.5
+ bandwidth: 3.0
+ unit: GHz
+ polarization: H
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: "mWm^-2sr^-1(cm^-1)^-1"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+ '6':
+ name: '6'
+ file_type: nc_ici_l1b_rad
+ file_key: data/measurement_data/ici_radiance_325
+ coordinates: [lon_pixels_horn_4, lat_pixels_horn_4]
+ n_325: 0
+ chan_index: 5
+ frequency_double_sideband:
+ central: 325.15
+ side: 9.5
+ bandwidth: 3.0
+ unit: GHz
+ polarization: V
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: "mWm^-2sr^-1(cm^-1)^-1"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+ '7':
+ name: '7'
+ file_type: nc_ici_l1b_rad
+ file_key: data/measurement_data/ici_radiance_325
+ coordinates: [lon_pixels_horn_4, lat_pixels_horn_4]
+ n_325: 1
+ chan_index: 6
+ frequency_double_sideband:
+ central: 325.15
+ side: 3.5
+ bandwidth: 2.4
+ unit: GHz
+ polarization: V
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: "mWm^-2sr^-1(cm^-1)^-1"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+ '8':
+ name: '8'
+ file_type: nc_ici_l1b_rad
+ file_key: data/measurement_data/ici_radiance_325
+ coordinates: [lon_pixels_horn_4, lat_pixels_horn_4]
+ n_325: 2
+ chan_index: 6
+ frequency_double_sideband:
+ central: 325.15
+ side: 1.5
+ bandwidth: 1.6
+ unit: GHz
+ polarization: V
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: "mWm^-2sr^-1(cm^-1)^-1"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+ '9':
+ name: '9'
+ file_type: nc_ici_l1b_rad
+ file_key: data/measurement_data/ici_radiance_448
+ coordinates: [lon_pixels_horn_5, lat_pixels_horn_5]
+ n_448: 0
+ chan_index: 8
+ frequency_double_sideband:
+ central: 448.0
+ side: 7.2
+ bandwidth: 3.0
+ unit: GHz
+ polarization: V
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: "mWm^-2sr^-1(cm^-1)^-1"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+ '10':
+ name: '10'
+ file_type: nc_ici_l1b_rad
+ file_key: data/measurement_data/ici_radiance_448
+ coordinates: [lon_pixels_horn_5, lat_pixels_horn_5]
+ n_448: 1
+ chan_index: 9
+ frequency_double_sideband:
+ central: 448.0
+ side: 3.0
+ bandwidth: 2.0
+ unit: GHz
+ polarization: V
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: "mWm^-2sr^-1(cm^-1)^-1"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+ '11':
+ name: '11'
+ file_type: nc_ici_l1b_rad
+ file_key: data/measurement_data/ici_radiance_448
+ coordinates: [lon_pixels_horn_5, lat_pixels_horn_5]
+ n_448: 2
+ chan_index: 10
+ frequency_double_sideband:
+ central: 448.0
+ side: 1.4
+ bandwidth: 1.2
+ unit: GHz
+ polarization: V
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: "mWm^-2sr^-1(cm^-1)^-1"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+ '12':
+ name: '12'
+ file_type: nc_ici_l1b_rad
+ file_key: data/measurement_data/ici_radiance_664
+ coordinates: [lon_pixels_horn_6, lat_pixels_horn_6]
+ n_664: 0
+ chan_index: 11
+ frequency_double_sideband:
+ central: 664.0
+ side: 4.2
+ bandwidth: 5.0
+ unit: GHz
+ polarization: V
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: "mWm^-2sr^-1(cm^-1)^-1"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+ '13':
+ name: '13'
+ file_type: nc_ici_l1b_rad
+ file_key: data/measurement_data/ici_radiance_664
+ coordinates: [lon_pixels_horn_7, lat_pixels_horn_7]
+ n_664: 1
+ chan_index: 12
+ frequency_double_sideband:
+ central: 664.0
+ side: 4.2
+ bandwidth: 5.0
+ unit: GHz
+ polarization: H
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: "mWm^-2sr^-1(cm^-1)^-1"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+ # --- Navigation data ---
+ time_start_scan_utc:
+ name: time_start_scan_utc
+ standard_name: time_start_scan_utc
+ file_type: nc_ici_l1b_rad
+ file_key: data/navigation_data/time_start_scan_utc
+ coordinates: [longitude_ssp, latitude_ssp]
+
+ # --- Geometric data ---
+ solar_zenith_horn_1:
+ name: solar_zenith_horn_1
+ standard_name: solar_zenith_angle
+ file_type: nc_ici_l1b_rad
+ file_key: solar_zenith
+ n_horns: 0
+ coordinates: [lon_pixels_horn_1, lat_pixels_horn_1]
+
+ solar_zenith_horn_2:
+ name: solar_zenith_horn_2
+ standard_name: solar_zenith_angle
+ file_type: nc_ici_l1b_rad
+ file_key: solar_zenith
+ n_horns: 1
+ coordinates: [lon_pixels_horn_2, lat_pixels_horn_2]
+
+ solar_zenith_horn_3:
+ name: solar_zenith_horn_3
+ standard_name: solar_zenith_angle
+ file_type: nc_ici_l1b_rad
+ file_key: solar_zenith
+ n_horns: 2
+ coordinates: [lon_pixels_horn_3, lat_pixels_horn_3]
+
+ solar_zenith_horn_4:
+ name: solar_zenith_horn_4
+ standard_name: solar_zenith_angle
+ file_type: nc_ici_l1b_rad
+ file_key: solar_zenith
+ n_horns: 3
+ coordinates: [lon_pixels_horn_4, lat_pixels_horn_4]
+
+ solar_zenith_horn_5:
+ name: solar_zenith_horn_5
+ standard_name: solar_zenith_angle
+ file_type: nc_ici_l1b_rad
+ file_key: solar_zenith
+ n_horns: 4
+ coordinates: [lon_pixels_horn_5, lat_pixels_horn_5]
+
+ solar_zenith_horn_6:
+ name: solar_zenith_horn_6
+ standard_name: solar_zenith_angle
+ file_type: nc_ici_l1b_rad
+ file_key: solar_zenith
+ n_horns: 5
+ coordinates: [lon_pixels_horn_6, lat_pixels_horn_6]
+
+ solar_zenith_horn_7:
+ name: solar_zenith_horn_7
+ standard_name: solar_zenith_angle
+ file_type: nc_ici_l1b_rad
+ file_key: solar_zenith
+ n_horns: 6
+ coordinates: [lon_pixels_horn_7, lat_pixels_horn_7]
+
+ solar_azimuth_horn_1:
+ name: solar_azimuth_horn_1
+ standard_name: solar_azimuth_angle
+ file_type: nc_ici_l1b_rad
+ file_key: solar_azimuth
+ n_horns: 0
+ coordinates: [lon_pixels_horn_1, lat_pixels_horn_1]
+
+ solar_azimuth_horn_2:
+ name: solar_azimuth_horn_2
+ standard_name: solar_azimuth_angle
+ file_type: nc_ici_l1b_rad
+ file_key: solar_azimuth
+ n_horns: 1
+ coordinates: [lon_pixels_horn_2, lat_pixels_horn_2]
+
+ solar_azimuth_horn_3:
+ name: solar_azimuth_horn_3
+ standard_name: solar_azimuth_angle
+ file_type: nc_ici_l1b_rad
+ file_key: solar_azimuth
+ n_horns: 2
+ coordinates: [lon_pixels_horn_3, lat_pixels_horn_3]
+
+ solar_azimuth_horn_4:
+ name: solar_azimuth_horn_4
+ standard_name: solar_azimuth_angle
+ file_type: nc_ici_l1b_rad
+ file_key: solar_azimuth
+ n_horns: 3
+ coordinates: [lon_pixels_horn_4, lat_pixels_horn_4]
+
+ solar_azimuth_horn_5:
+ name: solar_azimuth_horn_5
+ standard_name: solar_azimuth_angle
+ file_type: nc_ici_l1b_rad
+ file_key: solar_azimuth
+ n_horns: 4
+ coordinates: [lon_pixels_horn_5, lat_pixels_horn_5]
+
+ solar_azimuth_horn_6:
+ name: solar_azimuth_horn_6
+ standard_name: solar_azimuth_angle
+ file_type: nc_ici_l1b_rad
+ file_key: solar_azimuth
+ n_horns: 5
+ coordinates: [lon_pixels_horn_6, lat_pixels_horn_6]
+
+ solar_azimuth_horn_7:
+ name: solar_azimuth_horn_7
+ standard_name: solar_azimuth_angle
+ file_type: nc_ici_l1b_rad
+ file_key: solar_azimuth
+ n_horns: 6
+ coordinates: [lon_pixels_horn_7, lat_pixels_horn_7]
+
+ observation_zenith_horn_1:
+ name: observation_zenith_horn_1
+ standard_name: sensor_zenith_angle
+ file_type: nc_ici_l1b_rad
+ file_key: observation_zenith
+ n_horns: 0
+ coordinates: [lon_pixels_horn_1, lat_pixels_horn_1]
+
+ observation_zenith_horn_2:
+ name: observation_zenith_horn_2
+ standard_name: sensor_zenith_angle
+ file_type: nc_ici_l1b_rad
+ file_key: observation_zenith
+ n_horns: 1
+ coordinates: [lon_pixels_horn_2, lat_pixels_horn_2]
+
+ observation_zenith_horn_3:
+ name: observation_zenith_horn_3
+ standard_name: sensor_zenith_angle
+ file_type: nc_ici_l1b_rad
+ file_key: observation_zenith
+ n_horns: 2
+ coordinates: [lon_pixels_horn_3, lat_pixels_horn_3]
+
+ observation_zenith_horn_4:
+ name: observation_zenith_horn_4
+ standard_name: sensor_zenith_angle
+ file_type: nc_ici_l1b_rad
+ file_key: observation_zenith
+ n_horns: 3
+ coordinates: [lon_pixels_horn_4, lat_pixels_horn_4]
+
+ observation_zenith_horn_5:
+ name: observation_zenith_horn_5
+ standard_name: sensor_zenith_angle
+ file_type: nc_ici_l1b_rad
+ file_key: observation_zenith
+ n_horns: 4
+ coordinates: [lon_pixels_horn_5, lat_pixels_horn_5]
+
+ observation_zenith_horn_6:
+ name: observation_zenith_horn_6
+ standard_name: sensor_zenith_angle
+ file_type: nc_ici_l1b_rad
+ file_key: observation_zenith
+ n_horns: 5
+ coordinates: [lon_pixels_horn_6, lat_pixels_horn_6]
+
+ observation_zenith_horn_7:
+ name: observation_zenith_horn_7
+ standard_name: sensor_zenith_angle
+ file_type: nc_ici_l1b_rad
+ file_key: observation_zenith
+ n_horns: 6
+ coordinates: [lon_pixels_horn_7, lat_pixels_horn_7]
+
+ observation_azimuth_horn_1:
+ name: observation_azimuth_horn_1
+ standard_name: sensor_azimuth_angle
+ file_type: nc_ici_l1b_rad
+ file_key: observation_azimuth
+ n_horns: 0
+ coordinates: [lon_pixels_horn_1, lat_pixels_horn_1]
+
+ observation_azimuth_horn_2:
+ name: observation_azimuth_horn_2
+ standard_name: sensor_azimuth_angle
+ file_type: nc_ici_l1b_rad
+ file_key: observation_azimuth
+ n_horns: 1
+ coordinates: [lon_pixels_horn_2, lat_pixels_horn_2]
+
+ observation_azimuth_horn_3:
+ name: observation_azimuth_horn_3
+ standard_name: sensor_azimuth_angle
+ file_type: nc_ici_l1b_rad
+ file_key: observation_azimuth
+ n_horns: 2
+ coordinates: [lon_pixels_horn_3, lat_pixels_horn_3]
+
+ observation_azimuth_horn_4:
+ name: observation_azimuth_horn_4
+ standard_name: sensor_azimuth_angle
+ file_type: nc_ici_l1b_rad
+ file_key: observation_azimuth
+ n_horns: 3
+ coordinates: [lon_pixels_horn_4, lat_pixels_horn_4]
+
+ observation_azimuth_horn_5:
+ name: observation_azimuth_horn_5
+ standard_name: sensor_azimuth_angle
+ file_type: nc_ici_l1b_rad
+ file_key: observation_azimuth
+ n_horns: 4
+ coordinates: [lon_pixels_horn_5, lat_pixels_horn_5]
+
+ observation_azimuth_horn_6:
+ name: observation_azimuth_horn_6
+ standard_name: sensor_azimuth_angle
+ file_type: nc_ici_l1b_rad
+ file_key: observation_azimuth
+ n_horns: 5
+ coordinates: [lon_pixels_horn_6, lat_pixels_horn_6]
+
+ observation_azimuth_horn_7:
+ name: observation_azimuth_horn_5
+ standard_name: sensor_azimuth_angle
+ file_type: nc_ici_l1b_rad
+ file_key: observation_azimuth
+ n_horns: 6
+ coordinates: [lon_pixels_horn_7, lat_pixels_horn_7]
diff --git a/satpy/etc/readers/jami_hrit.yaml b/satpy/etc/readers/jami_hrit.yaml
index 663008034e..0480139459 100644
--- a/satpy/etc/readers/jami_hrit.yaml
+++ b/satpy/etc/readers/jami_hrit.yaml
@@ -1,7 +1,7 @@
reader:
name: jami_hrit
short_name: JAMI HRIT
- long_name: MTSAT-1R JAMI Level 1 (HRIT)
+ long_name: MTSAT-1R JAMI Level 1 data in JMA HRIT format
description: >
Reader for MTSAT-1R JAMI data in JMA HRIT format. Note that there
exist two versions of the dataset. A segmented (data split into
@@ -11,6 +11,8 @@ reader:
- https://www.wmo-sat.info/oscar/instruments/view/236
- http://www.data.jma.go.jp/mscweb/notice/Himawari7_e.html
+ status: Beta
+ supports_fsspec: false
sensors: [jami]
default_channels: []
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
@@ -22,6 +24,7 @@ file_types:
- 'IMG_DK{area:02d}VIS_{start_time:%Y%m%d%H%M}_{segment:03d}'
- 'IMG_DK{area:02d}VIS_{start_time:%Y%m%d%H%M}'
- 'HRIT_MTSAT1_{start_time:%Y%m%d_%H%M}_DK{area:02d}VIS'
+ - 'HRIT_MTSAT1_{start_time:%Y%m%d_%H%M}_DK{area:02d}VIS.gz'
hrit_ir1:
file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler
@@ -29,6 +32,7 @@ file_types:
- 'IMG_DK{area:02d}IR1_{start_time:%Y%m%d%H%M}_{segment:03d}'
- 'IMG_DK{area:02d}IR1_{start_time:%Y%m%d%H%M}'
- 'HRIT_MTSAT1_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR1'
+ - 'HRIT_MTSAT1_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR1.gz'
hrit_ir2:
file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler
@@ -36,6 +40,7 @@ file_types:
- 'IMG_DK{area:02d}IR2_{start_time:%Y%m%d%H%M}_{segment:03d}'
- 'IMG_DK{area:02d}IR2_{start_time:%Y%m%d%H%M}'
- 'HRIT_MTSAT1_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR2'
+ - 'HRIT_MTSAT1_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR2.gz'
hrit_ir3:
file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler
@@ -43,6 +48,7 @@ file_types:
- 'IMG_DK{area:02d}IR3_{start_time:%Y%m%d%H%M}_{segment:03d}'
- 'IMG_DK{area:02d}IR3_{start_time:%Y%m%d%H%M}'
- 'HRIT_MTSAT1_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR3'
+ - 'HRIT_MTSAT1_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR3.gz'
hrit_ir4:
@@ -51,6 +57,7 @@ file_types:
- 'IMG_DK{area:02d}IR4_{start_time:%Y%m%d%H%M}_{segment:03d}'
- 'IMG_DK{area:02d}IR4_{start_time:%Y%m%d%H%M}'
- 'HRIT_MTSAT1_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR4'
+ - 'HRIT_MTSAT1_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR4.gz'
datasets:
VIS:
@@ -121,4 +128,4 @@ datasets:
brightness_temperature:
standard_name: toa_brightness_temperature
units: "K"
- file_type: hrit_ir4
\ No newline at end of file
+ file_type: hrit_ir4
diff --git a/satpy/etc/readers/li_l2.yaml b/satpy/etc/readers/li_l2.yaml
index 0088db19b2..3433edd56c 100644
--- a/satpy/etc/readers/li_l2.yaml
+++ b/satpy/etc/readers/li_l2.yaml
@@ -1,6 +1,10 @@
reader:
- description: Generic MTG LI L2 product reader
name: li_l2
+ short_name: MTG LI l2
+ long_name: MTG (Meteosat > 12) Lightning Imager (LI) Level 2 data in netCDF4 format
+ description: Generic MTG LI L2 product reader
+ status: Alpha
+ supports_fsspec: false
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
sensors: [li]
default_datasets:
@@ -64,4 +68,3 @@ file_types:
li_lfl:
file_reader: !!python/name:satpy.readers.li_l2.LIFileHandler
file_patterns: ['{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+{data_source}-{processing_evel}-LFL-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-{format}_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day}.nc']
-
diff --git a/satpy/etc/readers/maia.yaml b/satpy/etc/readers/maia.yaml
index 06e7f6ae6f..07dfbd9283 100644
--- a/satpy/etc/readers/maia.yaml
+++ b/satpy/etc/readers/maia.yaml
@@ -1,6 +1,10 @@
reader:
- description: MAIA Reader
name: maia
+ short_name: MAIA
+ long_name: AAPP MAIA VIIRS and AVHRR products in HDF5 format
+ description: MAIA Reader
+ status: Nominal
+ supports_fsspec: false
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
sensors: [viirs, avhrr]
@@ -43,10 +47,10 @@ datasets:
coordinates: [Longitude, Latitude ]
CloudMask:
- name: CloudMask
+ name: CloudMask
file_type: maia
coordinates: [Longitude, Latitude ]
-
+
# CloudType and CloudMask are bitfields
# description of sub fields
ct:
@@ -54,7 +58,7 @@ datasets:
file_type: maia
coordinates: [Longitude, Latitude ]
-# Cloud Mask On Pixel
+# Cloud Mask On Pixel
cma:
name: cma
file_type: maia
@@ -109,7 +113,7 @@ datasets:
units: degrees celcius
file_type: maia
coordinates: [Longitude, Latitude ]
-
+
Sat_zenith:
name: Sat_zenith
units: degrees
@@ -177,4 +181,3 @@ datasets:
file_type: maia
coordinates: [Longitude, Latitude ]
-
diff --git a/satpy/etc/readers/mersi2_l1b.yaml b/satpy/etc/readers/mersi2_l1b.yaml
index e841bbdd8b..5c205c2a33 100644
--- a/satpy/etc/readers/mersi2_l1b.yaml
+++ b/satpy/etc/readers/mersi2_l1b.yaml
@@ -1,6 +1,10 @@
reader:
- description: FY-3D Medium Resolution Spectral Imager 2 (MERSI-2) L1B Reader
name: mersi2_l1b
+ short_name: MERSI-2 l1b
+ long_name: MERSI-2 L1B data in HDF5 format
+ description: FY-3D Medium Resolution Spectral Imager 2 (MERSI-2) L1B Reader
+ status: Beta
+ supports_fsspec: false
sensors: [mersi-2]
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
diff --git a/satpy/etc/readers/mhs_l1c_aapp.yaml b/satpy/etc/readers/mhs_l1c_aapp.yaml
new file mode 100644
index 0000000000..ab2ba082e7
--- /dev/null
+++ b/satpy/etc/readers/mhs_l1c_aapp.yaml
@@ -0,0 +1,169 @@
+reader:
+ name: mhs_l1c_aapp
+ short_name: MHS l1c
+ long_name: AAPP L1C in MHS format
+ description: AAPP l1c Reader for MHS data
+ status: Nominal
+ supports_fsspec: false
+ reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
+ sensors: [mhs,]
+ default_channels: []
+
+ data_identification_keys:
+ name:
+ required: true
+ frequency_double_sideband:
+ type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyDoubleSideBand
+ frequency_range:
+ type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyRange
+ resolution:
+ polarization:
+ enum:
+ - H
+ - V
+ calibration:
+ enum:
+ - brightness_temperature
+ transitive: true
+ modifiers:
+ required: true
+ default: []
+ type: !!python/name:satpy.dataset.ModifierTuple
+
+datasets:
+ '1':
+ name: '1'
+ frequency_range:
+ central: 89.
+ bandwidth: 2.8
+ unit: GHz
+ polarization: 'V'
+ resolution: 16000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ coordinates:
+ - longitude
+ - latitude
+ file_type: mhs_aapp_l1c
+ '2':
+ name: '2'
+ frequency_range:
+ central: 157.
+ bandwidth: 2.8
+ unit: GHz
+ polarization: 'V'
+ resolution: 16000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ coordinates:
+ - longitude
+ - latitude
+ file_type: mhs_aapp_l1c
+ '3':
+ name: '3'
+ frequency_double_sideband:
+ unit: GHz
+ central: 183.31
+ side: 1.0
+ bandwidth: 1.0
+ polarization: 'H'
+ resolution: 16000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ coordinates:
+ - longitude
+ - latitude
+ file_type: mhs_aapp_l1c
+ '4':
+ name: '4'
+ frequency_double_sideband:
+ unit: GHz
+ central: 183.31
+ side: 3.0
+ bandwidth: 2.0
+ polarization: 'H'
+ resolution: 16000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ coordinates:
+ - longitude
+ - latitude
+ file_type: mhs_aapp_l1c
+ '5':
+ name: '5'
+ frequency_range:
+ unit: GHz
+ central: 190.311
+ bandwidth: 2.0
+ polarization: 'V'
+ resolution: 16000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ coordinates:
+ - longitude
+ - latitude
+ file_type: mhs_aapp_l1c
+
+ solar_zenith_angle:
+ name: solar_zenith_angle
+ resolution: 16000
+ coordinates:
+ - longitude
+ - latitude
+ file_type: mhs_aapp_l1c
+ standard_name: solar_zenith_angle
+ units: degrees
+
+ solar_azimuth_angle:
+ name: solar_azimuth_angle
+ resolution: 16000
+ coordinates:
+ - longitude
+ - latitude
+ file_type: mhs_aapp_l1c
+ standard_name: solar_azimuth_angle
+ units: degrees
+
+ sensor_zenith_angle:
+ name: sensor_zenith_angle
+ resolution: 16000
+ coordinates:
+ - longitude
+ - latitude
+ file_type: mhs_aapp_l1c
+ standard_name: sensor_zenith_angle
+ units: degrees
+
+ sensor_azimuth_angle:
+ name: sensor_azimuth_angle
+ resolution: 16000
+ coordinates:
+ - longitude
+ - latitude
+ file_type: mhs_aapp_l1c
+ standard_name: sensor_azimuth_angle
+ units: degrees
+
+ latitude:
+ name: latitude
+ resolution: 16000
+ file_type: mhs_aapp_l1c
+ standard_name: latitude
+ units: degrees_north
+
+ longitude:
+ name: longitude
+ resolution: 16000
+ file_type: mhs_aapp_l1c
+ standard_name: longitude
+ units: degrees_east
+
+file_types:
+ mhs_aapp_l1c:
+ file_reader: !!python/name:satpy.readers.aapp_mhs_amsub_l1c.MHS_AMSUB_AAPPL1CFile
+ file_patterns: ['mhsl1c_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit_number:05d}.l1c']
diff --git a/satpy/etc/readers/mimicTPW2_comp.yaml b/satpy/etc/readers/mimicTPW2_comp.yaml
index ca1492da94..8b4ad41c7f 100644
--- a/satpy/etc/readers/mimicTPW2_comp.yaml
+++ b/satpy/etc/readers/mimicTPW2_comp.yaml
@@ -1,6 +1,10 @@
reader:
- description: NetCDF4 reader for the MIMIC TPW Version 2.0 product
name: mimicTPW2_comp
+ short_name: MIMIC TPW v2
+ long_name: MIMIC Total Precipitable Water Product Reader in netCDF format
+ description: NetCDF4 reader for the MIMIC TPW Version 2.0 product
+ status: Beta
+ supports_fsspec: false
sensors: [mimic]
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
diff --git a/satpy/etc/readers/mirs.yaml b/satpy/etc/readers/mirs.yaml
index 558f8a2254..4e70fbed2c 100644
--- a/satpy/etc/readers/mirs.yaml
+++ b/satpy/etc/readers/mirs.yaml
@@ -1,8 +1,10 @@
reader:
- description: NetCDF Reader for the Microwave Integrated Retrieval System Level 2 swath products
name: mirs
short_name: MiRS Level 2 NetCDF4
- long_name: MiRS Level 2 Swath Product Reader (NetCDF4)
+ long_name: MiRS Level 2 Precipitation and Surface Swath Product Reader in netCDF4 format
+ description: NetCDF Reader for the Microwave Integrated Retrieval System Level 2 swath products
+ status: Beta
+ supports_fsspec: false
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
sensors: [amsu, amsu-mhs, atms, ssmis, gmi]
data_files:
@@ -25,7 +27,7 @@ file_types:
file_patterns:
- 'IMG_SX.{platform_shortname}.D{start_time:%y%j.S%H%M}.E{end_time:%H%M}.B{num}.WE.HR.ORB.nc'
-datasets:
+datasets:
longitude:
name: longitude
file_type: metop_amsu
diff --git a/satpy/etc/readers/modis_l1b.yaml b/satpy/etc/readers/modis_l1b.yaml
index 10a5c25770..d39ffbb99c 100644
--- a/satpy/etc/readers/modis_l1b.yaml
+++ b/satpy/etc/readers/modis_l1b.yaml
@@ -1,9 +1,13 @@
reader:
- default_datasets: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36]
- description: Generic MODIS HDF-EOS Reader
name: modis_l1b
+ short_name: MODIS l1b
+ long_name: Terra and Aqua MODIS data in EOS-hdf4 level-1 format as produced by IMAPP and IPOPP or downloaded from LAADS
+ description: Generic MODIS HDF-EOS Reader
+ status: Nominal
+ supports_fsspec: false
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
sensors: [modis]
+ default_datasets: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36]
navigations:
hdf_eos_geo:
@@ -412,11 +416,13 @@ datasets:
# For EUM reduced (thinned) files
file_type: hdf_eos_data_1000m
1000:
- file_type: [hdf_eos_geo, hdf_eos_data_1000m]
+ file_type: [hdf_eos_geo, hdf_eos_data_1000m, hdf_eos_data_500m, hdf_eos_data_250m]
+ # Both 500m and 250m files have 1km resolution Longitude/Latitude
+ # 1km Longitude/Latitude can be interpolated to 500m or 250m resolution
500:
- file_type: hdf_eos_geo
+ file_type: [hdf_eos_geo, hdf_eos_data_500m, hdf_eos_data_250m]
250:
- file_type: hdf_eos_geo
+ file_type: [hdf_eos_geo, hdf_eos_data_500m, hdf_eos_data_250m]
standard_name: longitude
units: degree
@@ -427,39 +433,65 @@ datasets:
# For EUM reduced (thinned) files
file_type: hdf_eos_data_1000m
1000:
- file_type: [hdf_eos_geo, hdf_eos_data_1000m]
+ file_type: [hdf_eos_geo, hdf_eos_data_1000m, hdf_eos_data_500m, hdf_eos_data_250m]
+ # Both 500m and 250m files have 1km resolution Longitude/Latitude
+ # 1km Longitude/Latitude can be interpolated to 500m or 250m resolution
500:
- file_type: hdf_eos_geo
+ file_type: [hdf_eos_geo, hdf_eos_data_500m, hdf_eos_data_250m]
250:
- file_type: hdf_eos_geo
+ file_type: [hdf_eos_geo, hdf_eos_data_500m, hdf_eos_data_250m]
standard_name: latitude
units: degree
solar_zenith_angle:
name: solar_zenith_angle
sensor: modis
- resolution: [1000, 500, 250]
+ resolution:
+ 1000:
+ file_type: [hdf_eos_geo, hdf_eos_data_1000m]
+ 500:
+ file_type: [hdf_eos_geo]
+ 250:
+ file_type: [hdf_eos_geo]
coordinates: [longitude, latitude]
file_type: [hdf_eos_geo, hdf_eos_data_1000m]
solar_azimuth_angle:
name: solar_azimuth_angle
sensor: modis
- resolution: [1000, 500, 250]
+ resolution:
+ 1000:
+ file_type: [hdf_eos_geo, hdf_eos_data_1000m]
+ 500:
+ file_type: [hdf_eos_geo]
+ 250:
+ file_type: [hdf_eos_geo]
coordinates: [longitude, latitude]
file_type: [hdf_eos_geo, hdf_eos_data_1000m]
satellite_zenith_angle:
name: satellite_zenith_angle
sensor: modis
- resolution: [1000, 500, 250]
+ resolution:
+ 1000:
+ file_type: [hdf_eos_geo, hdf_eos_data_1000m]
+ 500:
+ file_type: [hdf_eos_geo]
+ 250:
+ file_type: [hdf_eos_geo]
coordinates: [longitude, latitude]
file_type: [hdf_eos_geo, hdf_eos_data_1000m]
satellite_azimuth_angle:
name: satellite_azimuth_angle
sensor: modis
- resolution: [1000, 500, 250]
+ resolution:
+ 1000:
+ file_type: [hdf_eos_geo, hdf_eos_data_1000m]
+ 500:
+ file_type: [hdf_eos_geo]
+ 250:
+ file_type: [hdf_eos_geo]
coordinates: [longitude, latitude]
file_type: [hdf_eos_geo, hdf_eos_data_1000m]
@@ -472,7 +504,8 @@ file_types:
- 'M{platform_indicator:1s}D02QKM.A{start_time:%Y%j.%H%M}.{collection:03d}{suffix}.hdf'
- 'M{platform_indicator:1s}D02QKM.{start_time:%y%j%H%M%S}.hdf'
- '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.250m.hdf'
- file_reader: !!python/name:satpy.readers.modis_l1b.HDFEOSBandReader
+ - '{platform_indicator:1s}{start_time:%Y%j%H%M%S}.L1B_QKM'
+ file_reader: !!python/name:satpy.readers.modis_l1b.MixedHDFEOSReader
hdf_eos_data_500m:
file_patterns:
- 'M{platform_indicator:1s}D02Hkm_A{start_time:%y%j_%H%M%S}_{processing_time:%Y%j%H%M%S}.hdf'
@@ -480,7 +513,8 @@ file_types:
- 'M{platform_indicator:1s}D02HKM.A{start_time:%Y%j.%H%M}.{collection:03d}{suffix}.hdf'
- 'M{platform_indicator:1s}D02HKM.{start_time:%y%j%H%M%S}.hdf'
- '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.500m.hdf'
- file_reader: !!python/name:satpy.readers.modis_l1b.HDFEOSBandReader
+ - '{platform_indicator:1s}{start_time:%Y%j%H%M%S}.L1B_HKM'
+ file_reader: !!python/name:satpy.readers.modis_l1b.MixedHDFEOSReader
hdf_eos_data_1000m:
file_patterns:
- 'M{platform_indicator:1s}D021km_A{start_time:%y%j_%H%M%S}_{processing_time:%Y%j%H%M%S}.hdf'
@@ -489,6 +523,7 @@ file_types:
- 'thin_M{platform_indicator:1s}D021KM.A{start_time:%Y%j.%H%M}.{collection:03d}{suffix}.hdf'
- 'M{platform_indicator:1s}D021KM.{start_time:%y%j%H%M%S}.hdf'
- '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.1000m.hdf'
+ - '{platform_indicator:1s}{start_time:%Y%j%H%M%S}.L1B_LAC'
- 'M{platform_indicator:1s}D021KM_A{start_time:%Y%j_%H%M}_{collection:03d}_NRT.hdf'
file_reader: !!python/name:satpy.readers.modis_l1b.MixedHDFEOSReader
hdf_eos_geo:
@@ -498,4 +533,5 @@ file_types:
- 'M{platform_indicator:1s}D03.A{start_time:%Y%j.%H%M}.{collection:03d}{suffix}.hdf'
- 'M{platform_indicator:1s}D03.{start_time:%y%j%H%M%S}.hdf'
- '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.geo.hdf'
+ - '{platform_indicator:1s}{start_time:%Y%j%H%M%S}.GEO'
file_reader: !!python/name:satpy.readers.modis_l1b.HDFEOSGeoReader
diff --git a/satpy/etc/readers/modis_l2.yaml b/satpy/etc/readers/modis_l2.yaml
index 21975c95b0..5e763fe6f9 100644
--- a/satpy/etc/readers/modis_l2.yaml
+++ b/satpy/etc/readers/modis_l2.yaml
@@ -1,7 +1,10 @@
reader:
- default_datasets: [cloud_mask]
- description: MODIS HDF-EOS Cloud Mask Reader
name: modis_l2
+ short_name: MODIS l2
+ long_name: MODIS Level 2 (mod35) data in HDF-EOS format
+ description: MODIS HDF-EOS L2 Reader
+ status: Beta
+ supports_fsspec: false
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
sensors: [modis]
@@ -9,10 +12,16 @@ file_types:
mod35_hdf:
file_patterns:
- 'M{platform_indicator:1s}D35_L2.A{acquisition_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf'
+ - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.mod35.hdf'
file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler
mod06_hdf:
file_patterns:
- 'M{platform_indicator:1s}D06_L2.A{acquisition_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf'
+ - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.mod06.hdf'
+ file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler
+ mod06ct_hdf:
+ file_patterns:
+ - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.mod06ct.hdf'
file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler
hdf_eos_geo:
file_patterns:
@@ -22,13 +31,50 @@ file_types:
- 'M{platform_indicator:1s}D03.{start_time:%y%j%H%M%S}.hdf'
- '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.geo.hdf'
file_reader: !!python/name:satpy.readers.modis_l1b.HDFEOSGeoReader
+ icecon_hdf:
+ file_patterns:
+ - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.icecon.hdf'
+ file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler
+ inversion_hdf:
+ file_patterns:
+ - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.inversion.hdf'
+ file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler
+ ist_hdf:
+ file_patterns:
+ - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.ist.hdf'
+ file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler
+ mask_byte1_hdf:
+ file_patterns:
+ - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.mask_byte1.hdf'
+ file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler
+ mod07_hdf:
+ file_patterns:
+ - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.mod07.hdf'
+ - 'M{platform_indicator:1s}D07_L2.A{acquisition_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf'
+ file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler
+ mod28_hdf:
+ file_patterns:
+ - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.mod28.hdf'
+ file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler
+ modlst_hdf:
+ file_patterns:
+ - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.modlst.hdf'
+ file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler
+ ndvi_1000m_hdf:
+ file_patterns:
+ - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.ndvi.1000m.hdf'
+ file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler
+ snowmask_hdf:
+ file_patterns:
+ - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.snowmask.hdf'
+ file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler
datasets:
longitude:
name: longitude
resolution:
5000:
- file_type: [mod35_hdf, mod06_hdf]
+ file_type: [mod35_hdf, mod06_hdf, mod06ct_hdf, mod07_hdf]
1000:
file_type: [hdf_eos_geo, mod35_hdf, mod06_hdf]
500:
@@ -43,7 +89,7 @@ datasets:
resolution:
5000:
# For EUM reduced (thinned) files
- file_type: [mod35_hdf, mod06_hdf]
+ file_type: [mod35_hdf, mod06_hdf, mod06ct_hdf, mod07_hdf]
1000:
file_type: [hdf_eos_geo, mod35_hdf, mod06_hdf]
500:
@@ -59,25 +105,28 @@ datasets:
cloud_mask:
# byte Cloud_Mask(Byte_Segment, Cell_Along_Swath_1km, Cell_Across_Swath_1km)
name: cloud_mask
- resolution: [1000, 250]
+ coordinates: [longitude, latitude]
+ resolution:
+ 250:
+ file_type: mod35_hdf
+ # Quality Assurance flag is necessary for 250m resolution dataset
+ quality_assurance: True
+ byte: [4, 5]
+ bit_start: 0
+ bit_count: 1
+ 1000:
+ file_type: [mod35_hdf, mask_byte1_hdf]
+ quality_assurance: False
+ byte: 0
+ bit_start: 1
+ bit_count: 2
+ # NOTE: byte information and file_key below are unused for the
+ # mask_byte1_hdf file type.
# The dimension of the dataset where the byte information is stored
- byte_dimension: 0
- # Different logic depending on the resolution
- byte:
- - 1000: 0
- - 250: [4, 5]
- bit_start:
- - 1000: 1
- - 250: 0
- bit_count:
- - 1000: 2
- - 250: 1
- # Quality Assurance flag is necessary for 250m resolution dataset
- quality_assurance:
- - 250: True
file_key: Cloud_Mask
- file_type: mod35_hdf
- coordinates: [longitude, latitude]
+ imapp_file_key: MODIS_Cloud_Mask
+ category: True
+ byte_dimension: 0
quality_assurance:
# byte Quality_Assurance(Cell_Along_Swath_1km, Cell_Across_Swath_1km, QA_Dimension)
@@ -89,6 +138,7 @@ datasets:
byte: 0
bit_start: 0
bit_count: 1
+ category: True
file_key: Quality_Assurance
file_type: mod35_hdf
coordinates: [longitude, latitude]
@@ -103,7 +153,7 @@ datasets:
name: brightness_temperature
long_name: Observed Brightness Temperature from Averaged Radiances in a 5x5 1-km Pixel Region
units: K
- file_type: mod06_hdf
+ file_type: [mod06_hdf, mod06ct_hdf]
coordinates: [longitude, latitude]
resolution:
5000:
@@ -113,19 +163,20 @@ datasets:
name: surface_temperature
long_name: Surface Temperature from Ancillary Data
units: K
- file_type: mod06_hdf
coordinates: [longitude, latitude]
resolution:
1000:
file_key: surface_temperature_1km
+ file_type: mod06_hdf
5000:
file_key: Surface_Temperature
+ file_type: [mod06_hdf, mod06ct_hdf]
surface_pressure:
name: surface_pressure
long_name: Surface Pressure from Ancillary Data
units: hPa
- file_type: mod06_hdf
+ file_type: [mod06_hdf, mod06ct_hdf]
coordinates: [longitude, latitude]
resolution:
5000:
@@ -137,7 +188,7 @@ datasets:
units: None
comment: "1: CO2-slicing retrieval, bands 36/35, 2: CO2-slicing retrieval, bands 35/34, 3: CO2-slicing retrieval, bands 35/33,
4: CO2-slicing retrieval, bands 34/33, 6: IR-window retrieval, band 31"
- file_type: mod06_hdf
+ file_type: [mod06_hdf, mod06ct_hdf]
coordinates: [longitude, latitude]
resolution:
5000:
@@ -189,13 +240,14 @@ datasets:
name: cloud_top_pressure
long_name: Cloud Top Pressure Level (rounded to nearest 5 mb)
units: hPa
- file_type: mod06_hdf
coordinates: [longitude, latitude]
resolution:
1000:
file_key: cloud_top_pressure_1km
+ file_type: mod06_hdf
5000:
file_key: Cloud_Top_Pressure
+ file_type: [mod06_hdf, mod06ct_hdf]
cloud_top_pressure_nadir:
name: cloud_top_pressure_nadir
@@ -211,7 +263,7 @@ datasets:
name: cloud_top_pressure_night
long_name: Cloud Top Pressure Level, Night Data Only (rounded to nearest 5 mb)
units: hPa
- file_type: mod06_hdf
+ file_type: [mod06_hdf, mod06ct_hdf]
coordinates: [longitude, latitude]
resolution:
5000:
@@ -231,7 +283,7 @@ datasets:
name: cloud_top_pressure_day
long_name: Cloud Top Pressure Level, Day Only (rounded to nearest 5 mb)
units: hPa
- file_type: mod06_hdf
+ file_type: [mod06_hdf, mod06ct_hdf]
coordinates: [longitude, latitude]
resolution:
5000:
@@ -251,13 +303,14 @@ datasets:
name: cloud_top_temperature
long_name: Temperature from Ancillary Data at Retrieved Cloud Top Pressure Level
units: K
- file_type: mod06_hdf
coordinates: [longitude, latitude]
resolution:
1000:
file_key: cloud_top_temperature_1km
+ file_type: mod06_hdf
5000:
file_key: Cloud_Top_Temperature
+ file_type: [mod06_hdf, mod06ct_hdf]
cloud_top_temperature_nadir:
name: cloud_top_temperature_nadir
@@ -273,7 +326,7 @@ datasets:
name: cloud_top_temperature_night
long_name: Temperature from Ancillary Data at Retrieved Cloud Top Pressure Level, Night Only
units: K
- file_type: mod06_hdf
+ file_type: [mod06_hdf, mod06ct_hdf]
coordinates: [longitude, latitude]
resolution:
5000:
@@ -293,7 +346,7 @@ datasets:
name: cloud_top_temperature_day
long_name: Temperature from Ancillary Data at Retrieved Cloud Top Pressure Level, Day Only
units: K
- file_type: mod06_hdf
+ file_type: [mod06_hdf, mod06ct_hdf]
coordinates: [longitude, latitude]
resolution:
5000:
@@ -313,7 +366,7 @@ datasets:
name: tropopause_height
long_name: Tropopause Height from Ancillary Data
units: hPa
- file_type: mod06_hdf
+ file_type: [mod06_hdf, mod06ct_hdf]
coordinates: [longitude, latitude]
resolution:
5000:
@@ -323,7 +376,7 @@ datasets:
name: cloud_fraction
long_name: Cloud Fraction in Retrieval Region (5x5 1-km Pixels) from 1-km Cloud Mask
units: None
- file_type: mod06_hdf
+ file_type: [mod06_hdf, mod06ct_hdf]
coordinates: [longitude, latitude]
resolution:
5000:
@@ -343,7 +396,7 @@ datasets:
name: cloud_fraction_night
long_name: Cloud Fraction in Retrieval Region (5x5 1-km Pixels) from 1-km Cloud Mask, Night Only
units: None
- file_type: mod06_hdf
+ file_type: [mod06_hdf, mod06ct_hdf]
coordinates: [longitude, latitude]
resolution:
5000:
@@ -363,7 +416,7 @@ datasets:
name: cloud_fraction_day
long_name: Cloud Fraction in Retrieval Region (5x5 1-km Pixels) from 1-km Cloud Mask, Day Only
units: None
- file_type: mod06_hdf
+ file_type: [mod06_hdf, mod06ct_hdf]
coordinates: [longitude, latitude]
resolution:
5000:
@@ -383,7 +436,7 @@ datasets:
name: cloud_effective_emissivity
long_name: Cloud Effective Emissivity from Cloud Top Pressure Retrieval
units: None
- file_type: mod06_hdf
+ file_type: [mod06_hdf, mod06ct_hdf]
coordinates: [longitude, latitude]
resolution:
5000:
@@ -403,7 +456,7 @@ datasets:
name: cloud_effective_emissivity_night
long_name: Cloud Effective Emissivity from Cloud Top Pressure Retrieval, Night Only
units: None
- file_type: mod06_hdf
+ file_type: [mod06_hdf, mod06ct_hdf]
coordinates: [longitude, latitude]
resolution:
5000:
@@ -423,7 +476,7 @@ datasets:
name: cloud_effective_emissivity_day
long_name: Cloud Effective Emissivity from Cloud Top Pressure Retrieval, Day Only
units: None
- file_type: mod06_hdf
+ file_type: [mod06_hdf, mod06ct_hdf]
coordinates: [longitude, latitude]
resolution:
5000:
@@ -443,7 +496,7 @@ datasets:
name: cloud_top_pressure_infrared
long_name: Cloud Top Pressure from IR Window Retrieval
units: hPa
- file_type: mod06_hdf
+ file_type: [mod06_hdf, mod06ct_hdf]
coordinates: [longitude, latitude]
resolution:
5000:
@@ -453,7 +506,7 @@ datasets:
name: spectral_cloud_forcing
long_name: Spectral Cloud Forcing (cloud minus clear radiance)
units: Watts/meter2/steradian/micron
- file_type: mod06_hdf
+ file_type: [mod06_hdf, mod06ct_hdf]
coordinates: [longitude, latitude]
resolution:
5000:
@@ -463,7 +516,7 @@ datasets:
name: cloud_top_pressure_from_ratios
long_name: Cloud Top Pressure Levels from Ratios of Bands 36/35, 35/34, 35/33, 34/33 from the CO2-slicing Algorithm
units: hPa
- file_type: mod06_hdf
+ file_type: [mod06_hdf, mod06ct_hdf]
coordinates: [longitude, latitude]
resolution:
5000:
@@ -473,7 +526,7 @@ datasets:
name: radiance_variance
long_name: Band 31 Radiance Standard Deviation
units: Watts/meter2/steradian/micron
- file_type: mod06_hdf
+ file_type: [mod06_hdf, mod06ct_hdf]
coordinates: [longitude, latitude]
resolution:
5000:
@@ -484,20 +537,23 @@ datasets:
long_name: Cloud Phase from 8.5 and 11 um Bands
units: None
comment: "0: cloud free, 1: water cloud, 2: ice cloud, 3: mixed phase cloud, 6: undetermined phase"
- file_type: mod06_hdf
coordinates: [longitude, latitude]
+ category: True
resolution:
1000:
file_key: Cloud_Phase_Infrared_1km
+ file_type: mod06_hdf
5000:
file_key: Cloud_Phase_Infrared
+ file_type: [mod06_hdf, mod06ct_hdf]
cloud_phase_infrared_night:
name: cloud_phase_infrared_night
long_name: Cloud Phase from 8.5 and 11 um Bands, Night Only
units: None
- file_type: mod06_hdf
+ file_type: [mod06_hdf, mod06ct_hdf]
coordinates: [longitude, latitude]
+ category: True
resolution:
5000:
file_key: Cloud_Phase_Infrared_Night
@@ -506,8 +562,9 @@ datasets:
name: cloud_phase_infrared_day
long_name: Cloud Phase from 8.5 and 11 um Bands, Day Only
units: None
- file_type: mod06_hdf
+ file_type: [mod06_hdf, mod06ct_hdf]
coordinates: [longitude, latitude]
+ category: True
resolution:
5000:
file_key: Cloud_Phase_Infrared_Day
@@ -519,6 +576,7 @@ datasets:
comment: "0: stratospheric cloud test not performed, 1: stratospheric cloud not indicated, 2: stratospheric cloud indicated (BTD35-33 > 0.5K)"
file_type: mod06_hdf
coordinates: [longitude, latitude]
+ category: True
resolution:
1000:
file_key: os_top_flag_1km
@@ -541,6 +599,7 @@ datasets:
4: CO2-slicing retrieval, bands 34/33, 6: IR-window retrieval, band 31"
file_type: mod06_hdf
coordinates: [longitude, latitude]
+ category: True
resolution:
1000:
file_key: cloud_top_method_1km
@@ -589,7 +648,7 @@ datasets:
name: cloud_effective_radius
long_name: "Cloud Particle Effective Radius two-channel retrieval using band 7 and either band 1, 2, or 5 (specified in Quality_Assurance_1km)from best points: not failed in any way, not marked for clear sky restoral"
units: micron
- file_type: mod06_hdf
+ file_type: [mod06_hdf, mod06ct_hdf]
coordinates: [longitude, latitude]
resolution:
1000:
@@ -649,7 +708,7 @@ datasets:
name: cloud_optical_thickness
long_name: "Cloud Optical Thickness two-channel retrieval using band 7 and either band 1, 2, or 5 (specified in Quality_Assurance_1km)from best points: not failed in any way, not marked for clear sky restoral"
units: None
- file_type: mod06_hdf
+ file_type: [mod06_hdf, mod06ct_hdf]
coordinates: [longitude, latitude]
resolution:
1000:
@@ -669,7 +728,7 @@ datasets:
name: cloud_effective_radius_1621
long_name: "Cloud Particle Effective Radius two-channel retrieval using band 7 and band 6from best points: not failed in any way, not marked for clear sky restoral"
units: None
- file_type: mod06_hdf
+ file_type: [mod06_hdf, mod06ct_hdf]
coordinates: [longitude, latitude]
resolution:
1000:
@@ -689,7 +748,7 @@ datasets:
name: cloud_optical_thickness_1621
long_name: "Cloud Optical Thickness two-channel retrieval using band 7 and band 6 from best points: not failed in any way, not marked for clear sky restoral"
units: None
- file_type: mod06_hdf
+ file_type: [mod06_hdf, mod06ct_hdf]
coordinates: [longitude, latitude]
resolution:
1000:
@@ -709,7 +768,7 @@ datasets:
name: cloud_water_path
long_name: "Column Water Path two-band retrieval using band 7 and either band 1, 2, or 5 (specified in Quality_Assurance_1km)from best points: not failed in any way, not marked for clear sky restoral"
units: g/m^2
- file_type: mod06_hdf
+ file_type: [mod06_hdf, mod06ct_hdf]
coordinates: [longitude, latitude]
resolution:
1000:
@@ -729,7 +788,7 @@ datasets:
name: cloud_water_path_1621
long_name: "Column Water Path two-band retrieval using band 7 and band 6from best points: not failed in any way, not marked for clear sky restoral"
units: g/m^2
- file_type: mod06_hdf
+ file_type: [mod06_hdf, mod06ct_hdf]
coordinates: [longitude, latitude]
resolution:
1000:
@@ -744,7 +803,7 @@ datasets:
resolution:
1000:
file_key: Cloud_Water_Path_1621_PCL
-
+
cloud_water_path_16:
name: cloud_water_path_16
long_name: "Column Water Path two-band retrieval using band 6 and either band 1, 2, or 5 (specified in Quality_Assurance_1km)from best points: not failed in any way, not marked for clear sky restoral"
@@ -764,7 +823,7 @@ datasets:
resolution:
1000:
file_key: Cloud_Water_Path_16_PCL
-
+
cloud_water_path_37:
name: cloud_water_path_37
long_name: "Column Water Path two-band retrieval using band 20 and either band 1, 2, or 5 (specified in Quality_Assurance_1km)from best points: not failed in any way, not marked for clear sky restoral"
@@ -784,7 +843,7 @@ datasets:
resolution:
1000:
file_key: Cloud_Water_Path_37_PCL
-
+
cloud_effective_radius_uncertainty:
name: cloud_effective_radius_uncertainty
long_name: Cloud Effective Particle Radius (from band 7) Relative Uncertainty (Percent)from both best points and points identified as cloud edge at 1km resolution or partly cloudy at 250m
@@ -793,7 +852,7 @@ datasets:
coordinates: [longitude, latitude]
resolution:
1000:
- file_key: Cloud_Effective_Radius_Unvertainty
+ file_key: Cloud_Effective_Radius_Uncertainty
cloud_effective_radius_uncertainty_16:
name: cloud_effective_radius_uncertainty_16
@@ -819,7 +878,7 @@ datasets:
name: cloud_optical_thickness_uncertainty
long_name: Cloud Optical Thickness Relative Uncertainty (Percent)from both best points and points identified as cloud edge at 1km resolution or partly cloudy at 250m
units: "%"
- file_type: mod06_hdf
+ file_type: [mod06_hdf, mod06ct_hdf]
coordinates: [longitude, latitude]
resolution:
1000:
@@ -829,7 +888,7 @@ datasets:
name: cloud_water_path_uncertainty
long_name: Cloud Water Path Relative Uncertainty (Percent)from both best points and points identified as cloud edge at 1km resolution or partly cloudy at 250m
units: "%"
- file_type: mod06_hdf
+ file_type: [mod06_hdf, mod06ct_hdf]
coordinates: [longitude, latitude]
resolution:
1000:
@@ -839,7 +898,7 @@ datasets:
name: cloud_effective_radius_uncertainty_1621
long_name: Cloud Effective Particle Radius Relative Uncertainty (Percent) using band 7 and band 6from both best points and points identified as cloud edge at 1km resolution or partly cloudy at 250m
units: "%"
- file_type: mod06_hdf
+ file_type: [mod06_hdf, mod06ct_hdf]
coordinates: [longitude, latitude]
resolution:
1000:
@@ -849,7 +908,7 @@ datasets:
name: cloud_optical_thickness_uncertainty_1621
long_name: Cloud Optical Thickness Relative Uncertainty (Percent) using band 7 and band 6from both best points and points identified as cloud edge at 1km resolution or partly cloudy at 250m
units: "%"
- file_type: mod06_hdf
+ file_type: [mod06_hdf, mod06ct_hdf]
coordinates: [longitude, latitude]
resolution:
1000:
@@ -859,7 +918,7 @@ datasets:
name: cloud_water_path_uncertainty_1621
long_name: Cloud Water Path Relative Uncertainty (Percent) using band 7 and band 6from both best points and points identified as cloud edge at 1km resolution or partly cloudy at 250m
units: "%"
- file_type: mod06_hdf
+ file_type: [mod06_hdf, mod06ct_hdf]
coordinates: [longitude, latitude]
resolution:
1000:
@@ -910,8 +969,9 @@ datasets:
long_name: Cloud Phase Determination Used in Optical Thickness/Effective Radius Retrieval
units: None
comment: "0: cloud mask undetermined, 1: clear sky, 2: liquid water cloud, 3: ice cloud, 4: undetermined phase cloud (but attempted as liquid water)"
- file_type: mod06_hdf
+ file_type: [mod06_hdf, mod06ct_hdf]
coordinates: [longitude, latitude]
+ category: True
resolution:
1000:
file_key: Cloud_Phase_Optical_Properties
@@ -920,8 +980,9 @@ datasets:
name: cloud_multi_layer_flag
long_name: Cloud Multi Layer Identification From MODIS Shortwave Observations
units: None
- file_type: mod06_hdf
+ file_type: [mod06_hdf, mod06ct_hdf]
coordinates: [longitude, latitude]
+ category: True
resolution:
1000:
file_key: Cloud_Multi_Layer_Flag
@@ -930,7 +991,7 @@ datasets:
name: cirrus_reflectance
long_name: Cirrus Reflectance
units: None
- file_type: mod06_hdf
+ file_type: [mod06_hdf, mod06ct_hdf]
coordinates: [longitude, latitude]
resolution:
1000:
@@ -941,8 +1002,105 @@ datasets:
long_name: Cirrus Reflectance Flag
units: None
comment: "0: bad data, 1: non-cirrus pixel, 2: cirrus pixel, 3: contrail pixel"
- file_type: mod06_hdf
+ file_type: [mod06_hdf, mod06ct_hdf]
coordinates: [longitude, latitude]
resolution:
1000:
file_key: Cirrus_Reflectance_Flag
+
+ # Ice Concentration
+ ice_con:
+ name: ice_concentration
+ file_type: icecon_hdf
+ coordinates: [longitude, latitude]
+ resolution: 1000
+ file_key: Ice_Concentration
+ ice_mask:
+ # TODO: Do fancy integer handling
+ name: ice_mask
+ file_type: icecon_hdf
+ coordinates: [longitude, latitude]
+ category: True
+ resolution: 1000
+ file_key: Ice_Mask
+
+ # Inversion
+ inversion_depth:
+ name: inversion_depth
+ file_type: inversion_hdf
+ coordinates: [longitude, latitude]
+ resolution: 1000
+ file_key: Inversion_Depth
+ inversion_strength:
+ name: inversion_strength
+ file_type: inversion_hdf
+ coordinates: [longitude, latitude]
+ resolution: 1000
+ file_key: Inversion_Strength
+
+ # IST
+ ice_surface_temperature:
+ name: ice_surface_temperature
+ file_type: ist_hdf
+ coordinates: [longitude, latitude]
+ resolution: 1000
+ file_key: Ice_Surface_Temperature
+
+ # MOD07
+ # Total Precipitable Water
+ water_vapor:
+ name: water_vapor
+ file_type: mod07_hdf
+ coordinates: [longitude, latitude]
+ resolution: 5000
+ file_key: Water_Vapor
+
+ # MOD28
+ sea_surface_temperature:
+ name: sea_surface_temperature
+ file_type: mod28_hdf
+ coordinates: [longitude, latitude]
+ resolution: 1000
+ file_key: Sea_Surface_Temperature
+
+ # MODLST
+ land_surface_temperature:
+ name: lst
+ file_type: modlst_hdf
+ coordinates: [longitude, latitude]
+ resolution: 1000
+ file_key: LST
+
+ # NDVI
+ ndvi:
+ name: ndvi
+ file_type: ndvi_1000m_hdf
+ coordinates: [longitude, latitude]
+ resolution: 1000
+ file_key: NDVI
+
+ # Snow Mask
+ snow_mask:
+ name: snow_mask
+ file_type: snowmask_hdf
+ coordinates: [longitude, latitude]
+ category: True
+ resolution: 1000
+ file_key: Snow_Mask
+
+ # mask_byte1
+ # See the MOD35 cloud_mask entry which also handles mask_byte1 cloud_mask
+ land_sea_mask_mask_byte1:
+ name: land_sea_mask
+ resolution: 1000
+ file_key: MODIS_Simple_LandSea_Mask
+ file_type: mask_byte1_hdf
+ category: True
+ coordinates: [longitude, latitude]
+ snow_ice_mask_mask_byte1:
+ name: snow_ice_mask
+ resolution: 1000
+ file_key: MODIS_Snow_Ice_Flag
+ file_type: mask_byte1_hdf
+ category: True
+ coordinates: [longitude, latitude]
diff --git a/satpy/etc/readers/msi_safe.yaml b/satpy/etc/readers/msi_safe.yaml
index f6c42c6a4e..d93d269782 100644
--- a/satpy/etc/readers/msi_safe.yaml
+++ b/satpy/etc/readers/msi_safe.yaml
@@ -1,18 +1,26 @@
reader:
- description: SAFE Reader for MSI data (Sentinel-2)
name: msi_safe
+ short_name: MSI SAFE
+ long_name: Sentinel-2 A and B MSI data in SAFE format
+ description: SAFE Reader for MSI data (Sentinel-2)
+ status: Nominal
+ supports_fsspec: false
sensors: [msi]
default_channels: []
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
file_types:
- safe_granule:
+ safe_granule:
file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIL1C
file_patterns: ['{fmission_id:3s}_MSIL1C_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L1C_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/IMG_DATA/T{tile_number:5s}_{file_discriminator:%Y%m%dT%H%M%S}_{band_name:3s}.jp2']
- requires: [safe_g_metadata]
- safe_g_metadata:
- file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIMDXML
+ requires: [safe_metadata, safe_tile_metadata]
+ safe_tile_metadata:
+ file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSITileMDXML
file_patterns: ['{fmission_id:3s}_MSIL1C_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/GRANULE/L1C_T{gtile_number:5s}_A{absolute_orbit_number:6d}_{gfile_discriminator:%Y%m%dT%H%M%S}/MTD_TL.xml']
+ safe_metadata:
+ file_reader: !!python/name:satpy.readers.msi_safe.SAFEMSIMDXML
+ file_patterns: ['{fmission_id:3s}_MSIL1C_{observation_time:%Y%m%dT%H%M%S}_N{fprocessing_baseline_number:4d}_R{relative_orbit_number:3d}_T{dtile_number:5s}_{dproduct_discriminator:%Y%m%dT%H%M%S}.SAFE/MTD_MSIL1C.xml']
+
datasets:
@@ -21,6 +29,13 @@ datasets:
sensor: MSI
wavelength: [0.415, 0.443, 0.470]
resolution: 60
+ calibration:
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: W m-2 um-1 sr-1
file_type: safe_granule
B02:
@@ -28,6 +43,13 @@ datasets:
sensor: MSI
wavelength: [0.440, 0.490, 0.540]
resolution: 10
+ calibration:
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: W m-2 um-1 sr-1
file_type: safe_granule
B03:
@@ -35,6 +57,13 @@ datasets:
sensor: MSI
wavelength: [0.540, 0.560, 0.580]
resolution: 10
+ calibration:
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: W m-2 um-1 sr-1
file_type: safe_granule
B04:
@@ -42,6 +71,13 @@ datasets:
sensor: MSI
wavelength: [0.645, 0.665, 0.685]
resolution: 10
+ calibration:
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: W m-2 um-1 sr-1
file_type: safe_granule
B05:
@@ -49,6 +85,13 @@ datasets:
sensor: MSI
wavelength: [0.695, 0.705, 0.715]
resolution: 20
+ calibration:
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: W m-2 um-1 sr-1
file_type: safe_granule
B06:
@@ -56,6 +99,13 @@ datasets:
sensor: MSI
wavelength: [0.731, 0.740, 0.749]
resolution: 20
+ calibration:
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: W m-2 um-1 sr-1
file_type: safe_granule
B07:
@@ -63,6 +113,13 @@ datasets:
sensor: MSI
wavelength: [0.764, 0.783, 0.802]
resolution: 20
+ calibration:
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: W m-2 um-1 sr-1
file_type: safe_granule
B08:
@@ -70,6 +127,13 @@ datasets:
sensor: MSI
wavelength: [0.780, 0.842, 0.905]
resolution: 10
+ calibration:
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: W m-2 um-1 sr-1
file_type: safe_granule
B8A:
@@ -77,6 +141,13 @@ datasets:
sensor: MSI
wavelength: [0.855, 0.865, 0.875]
resolution: 20
+ calibration:
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: W m-2 um-1 sr-1
file_type: safe_granule
B09:
@@ -84,6 +155,13 @@ datasets:
sensor: MSI
wavelength: [0.935, 0.945, 0.955]
resolution: 60
+ calibration:
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: W m-2 um-1 sr-1
file_type: safe_granule
B10:
@@ -91,6 +169,13 @@ datasets:
sensor: MSI
wavelength: [1.365, 1.375, 1.385]
resolution: 60
+ calibration:
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: W m-2 um-1 sr-1
file_type: safe_granule
B11:
@@ -98,6 +183,13 @@ datasets:
sensor: MSI
wavelength: [1.565, 1.610, 1.655]
resolution: 20
+ calibration:
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: W m-2 um-1 sr-1
file_type: safe_granule
B12:
@@ -105,31 +197,38 @@ datasets:
sensor: MSI
wavelength: [2.100, 2.190, 2.280]
resolution: 20
+ calibration:
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: W m-2 um-1 sr-1
file_type: safe_granule
solar_zenith_angle:
name: solar_zenith_angle
resolution: [10, 20, 60]
- file_type: safe_g_metadata
+ file_type: safe_tile_metadata
xml_tag: Sun_Angles_Grid/Zenith
solar_azimuth_angle:
name: solar_azimuth_angle
resolution: [10, 20, 60]
- file_type: safe_g_metadata
+ file_type: safe_tile_metadata
xml_tag: Sun_Angles_Grid/Azimuth
satellite_azimuth_angle:
name: satellite_azimuth_angle
resolution: [10, 20, 60]
- file_type: safe_g_metadata
+ file_type: safe_tile_metadata
xml_tag: Viewing_Incidence_Angles_Grids
xml_item: Azimuth
satellite_zenith_angle:
name: satellite_zenith_angle
resolution: [10, 20, 60]
- file_type: safe_g_metadata
+ file_type: safe_tile_metadata
xml_tag: Viewing_Incidence_Angles_Grids
xml_item: Zenith
diff --git a/satpy/etc/readers/msu_gsa_l1b.yaml b/satpy/etc/readers/msu_gsa_l1b.yaml
new file mode 100644
index 0000000000..d74046e1ba
--- /dev/null
+++ b/satpy/etc/readers/msu_gsa_l1b.yaml
@@ -0,0 +1,223 @@
+reader:
+ name: msu_gsa_l1b
+ short_name: MSU-GS/A
+ long_name: Arctica-M (N1) MSU-GS/A data in HDF5 format
+ description: H5 reader for MSG-GS/A data
+ status: Beta
+ supports_fsspec: false
+ sensors: [msu_gsa]
+ default_channels: []
+ reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
+
+
+file_types:
+ msu_gsa_l1b:
+ file_reader: !!python/name:satpy.readers.msu_gsa_l1b.MSUGSAFileHandler
+ file_patterns: ['ArcticaM{mission_id:1s}_{start_time:%Y%m%d%H%M}.h5']
+
+datasets:
+ longitude:
+ name: longitude
+ units: degrees_east
+ standard_name: longitude
+ resolution:
+ 4000:
+ file_type: msu_gsa_l1b
+ file_key: Geolocation/resolution_4km/Longitude
+ 1000:
+ file_type: msu_gsa_l1b
+ file_key: Geolocation/resolution_1km/Longitude
+ latitude:
+ name: latitude
+ units: degrees_north
+ standard_name: latitude
+ resolution:
+ 4000:
+ file_type: msu_gsa_l1b
+ file_key: Geolocation/resolution_4km/Latitude
+ 1000:
+ file_type: msu_gsa_l1b
+ file_key: Geolocation/resolution_1km/Latitude
+
+ # The channels C01-C03 (VIS) are available at 1km resolution
+ C01:
+ name: C01
+ sensor: msu_gsa
+ wavelength: [0.5, 0.6, 0.65]
+ resolution: 1000
+ calibration:
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ radiance:
+ standard_name: toa_outgoing_radiance
+ units: W m-2 sr-1
+ coordinates: [longitude, latitude]
+ file_type: msu_gsa_l1b
+ file_key: Data/resolution_1km/Radiance_01
+ C02:
+ name: C02
+ sensor: msu_gsa
+ wavelength: [0.65, 0.7, 0.8]
+ resolution: 1000
+ calibration:
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ radiance:
+ standard_name: toa_outgoing_radiance
+ units: W m-2 sr-1
+ coordinates: [longitude, latitude]
+ file_type: msu_gsa_l1b
+ file_key: Data/resolution_1km/Radiance_02
+ C03:
+ name: C03
+ sensor: msu_gsa
+ wavelength: [0.8, 0.9, 0.9]
+ resolution: 1000
+ calibration:
+ reflectance:
+ standard_name: toa_bidirectional_reflectance
+ units: "%"
+ radiance:
+ standard_name: toa_outgoing_radiance
+ units: W m-2 sr-1
+ coordinates: [longitude, latitude]
+ file_type: msu_gsa_l1b
+ file_key: Data/resolution_1km/Radiance_03
+
+ # The channels C04-C10 (IR) are available at 4km resolution
+ C04:
+ name: C04
+ sensor: msu_gsa
+ wavelength: [3.5, 3.8, 4.0]
+ resolution: 4000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+ coordinates: [longitude, latitude]
+ file_type: msu_gsa_l1b
+ file_key: Data/resolution_4km/Brightness_Temperature_04
+ C05:
+ name: C05
+ sensor: msu_gsa
+ wavelength: [5.7, 6.4, 7.0]
+ resolution: 4000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+ coordinates: [longitude, latitude]
+ file_type: msu_gsa_l1b
+ file_key: Data/resolution_4km/Brightness_Temperature_05
+ C06:
+ name: C06
+ sensor: msu_gsa
+ wavelength: [7.5, 8.0, 8.5]
+ resolution: 4000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+ coordinates: [longitude, latitude]
+ file_type: msu_gsa_l1b
+ file_key: Data/resolution_4km/Brightness_Temperature_06
+ C07:
+ name: C07
+ sensor: msu_gsa
+ wavelength: [8.2, 8.7, 9.2]
+ resolution: 4000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+ coordinates: [longitude, latitude]
+ file_type: msu_gsa_l1b
+ file_key: Data/resolution_4km/Brightness_Temperature_07
+ C08:
+ name: C08
+ sensor: msu_gsa
+ wavelength: [9.2, 9.7, 10.2]
+ resolution: 4000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+ coordinates: [longitude, latitude]
+ file_type: msu_gsa_l1b
+ file_key: Data/resolution_4km/Brightness_Temperature_08
+ C09:
+ name: C09
+ sensor: msu_gsa
+ wavelength: [10.2, 10.8, 11.2]
+ resolution: 4000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+ coordinates: [longitude, latitude]
+ file_type: msu_gsa_l1b
+ file_key: Data/resolution_4km/Brightness_Temperature_09
+ C10:
+ name: C10
+ sensor: msu_gsa
+ wavelength: [11.2, 11.9, 12.5]
+ resolution: 4000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+ coordinates: [longitude, latitude]
+ file_type: msu_gsa_l1b
+ file_key: Data/resolution_4km/Brightness_Temperature_10
+
+ # The solar and viewing geometry is available at both resolutions
+ solar_zenith_angle:
+ name: solar_zenith_angle
+ units: degrees
+ standard_name: solar_zenith_angle
+ resolution:
+ 4000:
+ file_type: msu_gsa_l1b
+ file_key: Data/resolution_4km/Solar_Zenith_Angle
+ 1000:
+ file_type: msu_gsa_l1b
+ file_key: Data/resolution_1km/Solar_Zenith_Angle
+ coordinates: [longitude, latitude]
+ solar_azimuth_angle:
+ name: solar_azimuth_angle
+ units: degrees
+ standard_name: solar_azimuth_angle
+ resolution:
+ 4000:
+ file_type: msu_gsa_l1b
+ file_key: Data/resolution_4km/Solar_Azimuth_Angle
+ 1000:
+ file_type: msu_gsa_l1b
+ file_key: Data/resolution_1km/Solar_Azimuth_Angle
+ coordinates: [longitude, latitude]
+ satellite_zenith_angle:
+ name: satellite_zenith_angle
+ units: degrees
+ standard_name: satellite_zenith_angle
+ resolution:
+ 4000:
+ file_type: msu_gsa_l1b
+ file_key: Data/resolution_4km/Satellite_Zenith_Angle
+ 1000:
+ file_type: msu_gsa_l1b
+ file_key: Data/resolution_1km/Satellite_Zenith_Angle
+ coordinates: [longitude, latitude]
+ satellite_azimuth_angle:
+ name: satellite_azimuth_angle
+ units: degrees
+ standard_name: satellite_azimuth_angle
+ resolution:
+ 4000:
+ file_type: msu_gsa_l1b
+ file_key: Data/resolution_4km/Satellite_Azimuth_Angle
+ 1000:
+ file_type: msu_gsa_l1b
+ file_key: Data/resolution_1km/Satellite_Azimuth_Angle
+ coordinates: [longitude, latitude]
diff --git a/satpy/etc/readers/mtsat2-imager_hrit.yaml b/satpy/etc/readers/mtsat2-imager_hrit.yaml
index be7b613a82..126026b7a1 100644
--- a/satpy/etc/readers/mtsat2-imager_hrit.yaml
+++ b/satpy/etc/readers/mtsat2-imager_hrit.yaml
@@ -1,7 +1,7 @@
reader:
name: mtsat2-imager_hrit
short_name: MTSAT-2 Imager HRIT
- long_name: MTSAT-2 Imager Level 1 (HRIT)
+ long_name: MTSAT-2 Imager Level 1 data in JMA HRIT format
description: >
Reader for MTSAT-2 Imager data in JMA HRIT format. Note that there
exist two versions of the dataset. A segmented (data split into
@@ -11,6 +11,8 @@ reader:
- https://www.wmo-sat.info/oscar/instruments/view/219
- http://www.data.jma.go.jp/mscweb/notice/Himawari7_e.html
+ status: Beta
+ supports_fsspec: false
sensors: [mtsat2_imager]
default_channels: []
reader: !!python/name:satpy.readers.yaml_reader.GEOSegmentYAMLReader
@@ -21,30 +23,35 @@ file_types:
file_patterns:
- 'IMG_DK{area:02d}VIS_{start_time:%Y%m%d%H%M}'
- 'HRIT_MTSAT2_{start_time:%Y%m%d_%H%M}_DK{area:02d}VIS'
+ - 'HRIT_MTSAT2_{start_time:%Y%m%d_%H%M}_DK{area:02d}VIS.gz'
hrit_ir1:
file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler
file_patterns:
- 'IMG_DK{area:02d}IR1_{start_time:%Y%m%d%H%M}'
- 'HRIT_MTSAT2_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR1'
+ - 'HRIT_MTSAT2_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR1.gz'
hrit_ir2:
file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler
file_patterns:
- 'IMG_DK{area:02d}IR2_{start_time:%Y%m%d%H%M}'
- 'HRIT_MTSAT2_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR2'
+ - 'HRIT_MTSAT2_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR2.gz'
hrit_ir3:
file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler
file_patterns:
- 'IMG_DK{area:02d}IR3_{start_time:%Y%m%d%H%M}'
- 'HRIT_MTSAT2_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR3'
+ - 'HRIT_MTSAT2_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR3.gz'
hrit_ir4:
file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler
file_patterns:
- 'IMG_DK{area:02d}IR4_{start_time:%Y%m%d%H%M}'
- 'HRIT_MTSAT2_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR4'
+ - 'HRIT_MTSAT2_{start_time:%Y%m%d_%H%M}_DK{area:02d}IR4.gz'
hrit_vis_seg:
file_reader: !!python/name:satpy.readers.hrit_jma.HRITJMAFileHandler
diff --git a/satpy/etc/readers/mviri_l1b_fiduceo_nc.yaml b/satpy/etc/readers/mviri_l1b_fiduceo_nc.yaml
index bf17a427ee..ec3c5cab77 100644
--- a/satpy/etc/readers/mviri_l1b_fiduceo_nc.yaml
+++ b/satpy/etc/readers/mviri_l1b_fiduceo_nc.yaml
@@ -5,12 +5,13 @@
reader:
name: mviri_l1b_fiduceo_nc
short_name: FIDUCEO MVIRI FCDR
- long_name: >
- Fundamental Climate Data Record of re-calibrated Level 1.5 Infrared, Water Vapour, and Visible radiances
- from the Meteosat Visible Infra-Red Imager (MVIRI) instrument onboard the Meteosat First Generation satellites
+ long_name: MFG (Meteosat 2 to 7) MVIRI data in netCDF format (FIDUCEO FCDR)
description: >
- Reader for FIDUCEO MVIRI FCDR data in netCDF format. For documentation see
- http://doi.org/10.15770/EUM_SEC_CLM_0009 .
+ Reader for re-calibrated Level 1.5 Infrared, Water Vapour, and Visible radiances from
+ Meteosat Visible Infra-Red Imager (MVIRI) Fundamental Climate Data Record (FCDR) data.
+ For documentation see: http://doi.org/10.15770/EUM_SEC_CLM_0009 .
+ status: Beta
+ supports_fsspec: false
sensors: [mviri]
default_channels: [VIS, WV, IR]
reader: !!python/name:satpy.readers.yaml_reader.GEOFlippableFileYAMLReader
diff --git a/satpy/etc/readers/mwi_l1b_nc.yaml b/satpy/etc/readers/mwi_l1b_nc.yaml
new file mode 100644
index 0000000000..3c579e128b
--- /dev/null
+++ b/satpy/etc/readers/mwi_l1b_nc.yaml
@@ -0,0 +1,978 @@
+reader:
+ name: mwi_l1b_nc
+ short_name: MWI L1B RAD NetCDF4
+ long_name: EPS-SG MWI L1B Radiance (NetCDF4)
+ description: >
+ Reader for EUMETSAT EPS-SG Micro-Wave Imager Level 1B Radiance files in NetCDF4.
+ status: Beta
+ sensors: [mwi]
+ reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
+
+ data_identification_keys:
+ name:
+ required: true
+ frequency_double_sideband:
+ type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyDoubleSideBand
+ frequency_range:
+ type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyRange
+ polarization:
+ enum:
+ - H
+ - V
+ calibration:
+ enum:
+ - brightness_temperature
+ - radiance
+ transitive: true
+
+file_types:
+ # EUMETSAT EPSG-SG Micro-Wave Imager Level 1B Radiance files in NetCDF4 format
+ nc_mwi_l1b_rad:
+ file_reader: !!python/name:satpy.readers.ici_l1b_nc.IciL1bNCFileHandler
+ file_patterns: ['W_XX-EUMETSAT-Darmstadt,SAT,{spacecraft_name:s}-MWI-1B-RAD_C_EUMT_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc']
+ longitude: data/navigation_data/longitude
+ latitude: data/navigation_data/latitude
+ observation_zenith: data/navigation_data/mwi_oza
+ observation_azimuth: data/navigation_data/mwi_azimuth
+ solar_zenith: data/navigation_data/mwi_solar_zenith_angle
+ solar_azimuth: data/navigation_data/mwi_solar_azimuth_angle
+ orthorect: True
+
+datasets:
+
+# --- Coordinates ---
+ lon_pixels_group_1:
+ name: lon_pixels_group_1
+ file_type: nc_mwi_l1b_rad
+ file_key: longitude
+ orthorect_data: data/navigation_data/delta_longitude
+ standard_name: longitude
+ n_data_groups: 0
+
+ lat_pixels_group_1:
+ name: lat_pixels_group_1
+ file_type: nc_mwi_l1b_rad
+ file_key: latitude
+ orthorect_data: data/navigation_data/delta_latitude
+ standard_name: latitude
+ n_data_groups: 0
+
+ lon_pixels_group_2:
+ name: lon_pixels_group_2
+ file_type: nc_mwi_l1b_rad
+ file_key: longitude
+ orthorect_data: data/navigation_data/delta_longitude
+ standard_name: longitude
+ n_data_groups: 1
+
+ lat_pixels_group_2:
+ name: lat_pixels_group_2
+ file_type: nc_mwi_l1b_rad
+ file_key: latitude
+ orthorect_data: data/navigation_data/delta_latitude
+ standard_name: latitude
+ n_data_groups: 1
+
+ lon_pixels_group_3:
+ name: lon_pixels_group_3
+ file_type: nc_mwi_l1b_rad
+ file_key: longitude
+ orthorect_data: data/navigation_data/delta_longitude
+ standard_name: longitude
+ n_data_groups: 2
+
+ lat_pixels_group_3:
+ name: lat_pixels_group_3
+ file_type: nc_mwi_l1b_rad
+ file_key: latitude
+ orthorect_data: data/navigation_data/delta_latitude
+ standard_name: latitude
+ n_data_groups: 2
+
+ lon_pixels_group_4:
+ name: lon_pixels_group_4
+ file_type: nc_mwi_l1b_rad
+ file_key: longitude
+ orthorect_data: data/navigation_data/delta_longitude
+ standard_name: longitude
+ n_data_groups: 3
+
+ lat_pixels_group_4:
+ name: lat_pixels_group_4
+ file_type: nc_mwi_l1b_rad
+ file_key: latitude
+ orthorect_data: data/navigation_data/delta_latitude
+ standard_name: latitude
+ n_data_groups: 3
+
+ lon_pixels_group_5:
+ name: lon_pixels_group_5
+ file_type: nc_mwi_l1b_rad
+ file_key: longitude
+ orthorect_data: data/navigation_data/delta_longitude
+ standard_name: longitude
+ n_data_groups: 4
+
+ lat_pixels_group_5:
+ name: lat_pixels_group_5
+ file_type: nc_mwi_l1b_rad
+ file_key: latitude
+ orthorect_data: data/navigation_data/delta_latitude
+ standard_name: latitude
+ n_data_groups: 4
+
+ lon_pixels_group_6:
+ name: lon_pixels_group_6
+ file_type: nc_mwi_l1b_rad
+ file_key: longitude
+ orthorect_data: data/navigation_data/delta_longitude
+ standard_name: longitude
+ n_data_groups: 5
+
+ lat_pixels_group_6:
+ name: lat_pixels_group_6
+ file_type: nc_mwi_l1b_rad
+ file_key: latitude
+ orthorect_data: data/navigation_data/delta_latitude
+ standard_name: latitude
+ n_data_groups: 5
+
+ lon_pixels_group_7:
+ name: lon_pixels_group_7
+ file_type: nc_mwi_l1b_rad
+ file_key: longitude
+ orthorect_data: data/navigation_data/delta_longitude
+ standard_name: longitude
+ n_data_groups: 6
+
+ lat_pixels_group_7:
+ name: lat_pixels_group_7
+ file_type: nc_mwi_l1b_rad
+ file_key: latitude
+ orthorect_data: data/navigation_data/delta_latitude
+ standard_name: latitude
+ n_data_groups: 6
+
+ lon_pixels_group_8:
+ name: lon_pixels_group_8
+ file_type: nc_mwi_l1b_rad
+ file_key: longitude
+ orthorect_data: data/navigation_data/delta_longitude
+ standard_name: longitude
+ n_data_groups: 7
+
+ lat_pixels_group_8:
+ name: lat_pixels_group_8
+ file_type: nc_mwi_l1b_rad
+ file_key: latitude
+ orthorect_data: data/navigation_data/delta_latitude
+ standard_name: latitude
+ n_data_groups: 7
+
+ longitude_ssp:
+ name: longitude_ssp
+ file_type: nc_mwi_l1b_rad
+ file_key: data/navigation_data/longitude_ssp
+ standard_name: longitude
+
+ latitude_ssp:
+ name: latitude_ssp
+ file_type: nc_mwi_l1b_rad
+ file_key: data/navigation_data/latitude_ssp
+ standard_name: latitude
+
+# --- Measurement data ---
+ '1':
+ name: '1'
+ file_type: nc_mwi_l1b_rad
+ file_key: data/measurement_data/mwi_radiance_18_vh
+ coordinates: [lon_pixels_group_1, lat_pixels_group_1]
+ n_18: 0
+ chan_index: 0
+ frequency_range:
+ central: 18.7
+ bandwidth: 0.2
+ unit: GHz
+ polarization: V
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: "mWm^-2sr^-1(cm^-1)^-1"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+ '2':
+ name: '2'
+ file_type: nc_mwi_l1b_rad
+ file_key: data/measurement_data/mwi_radiance_18_vh
+ coordinates: [lon_pixels_group_1, lat_pixels_group_1]
+ n_18: 1
+ chan_index: 0
+ frequency_range:
+ central: 18.7
+ bandwidth: 0.2
+ unit: GHz
+ polarization: H
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: "mWm^-2sr^-1(cm^-1)^-1"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+ '3':
+ name: '3'
+ file_type: nc_mwi_l1b_rad
+ file_key: data/measurement_data/mwi_radiance_23_vh
+ coordinates: [lon_pixels_group_2, lat_pixels_group_2]
+ n_23: 0
+ chan_index: 1
+ frequency_range:
+ central: 23.8
+ bandwidth: 0.4
+ unit: GHz
+ polarization: V
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: "mWm^-2sr^-1(cm^-1)^-1"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+ '4':
+ name: '4'
+ file_type: nc_mwi_l1b_rad
+ file_key: data/measurement_data/mwi_radiance_23_vh
+ coordinates: [lon_pixels_group_2, lat_pixels_group_2]
+ n_23: 1
+ chan_index: 1
+ frequency_range:
+ central: 23.8
+ bandwidth: 0.4
+ unit: GHz
+ polarization: H
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: "mWm^-2sr^-1(cm^-1)^-1"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+ '5':
+ name: '5'
+ file_type: nc_mwi_l1b_rad
+ file_key: data/measurement_data/mwi_radiance_31_vh
+ coordinates: [lon_pixels_group_3, lat_pixels_group_3]
+ n_31: 0
+ chan_index: 2
+ frequency_range:
+ central: 31.4
+ bandwidth: 0.4
+ unit: GHz
+ polarization: V
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: "mWm^-2sr^-1(cm^-1)^-1"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+ '6':
+ name: '6'
+ file_type: nc_mwi_l1b_rad
+ file_key: data/measurement_data/mwi_radiance_31_vh
+ coordinates: [lon_pixels_group_3, lat_pixels_group_3]
+ n_31: 1
+ chan_index: 2
+ frequency_range:
+ central: 31.4
+ bandwidth: 0.4
+ unit: GHz
+ polarization: H
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: "mWm^-2sr^-1(cm^-1)^-1"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+ '7':
+ name: '7'
+ file_type: nc_mwi_l1b_rad
+ file_key: data/measurement_data/mwi_radiance_50_53_v
+ coordinates: [lon_pixels_group_4, lat_pixels_group_4]
+ n_50: 0
+ chan_index: 3
+ frequency_range:
+ central: 50.3
+ bandwidth: 0.4
+ unit: GHz
+ polarization: V
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: "mWm^-2sr^-1(cm^-1)^-1"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+ '8':
+ name: '8'
+ file_type: nc_mwi_l1b_rad
+ file_key: data/measurement_data/mwi_radiance_50_53_h
+ coordinates: [lon_pixels_group_4, lat_pixels_group_4]
+ n_50: 0
+ chan_index: 3
+ frequency_range:
+ central: 50.3
+ bandwidth: 0.4
+ unit: GHz
+ polarization: H
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: "mWm^-2sr^-1(cm^-1)^-1"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+ '9':
+ name: '9'
+ file_type: nc_mwi_l1b_rad
+ file_key: data/measurement_data/mwi_radiance_50_53_v
+ coordinates: [lon_pixels_group_4, lat_pixels_group_4]
+ n_50: 1
+ chan_index: 4
+ frequency_range:
+ central: 52.61
+ bandwidth: 0.4
+ unit: GHz
+ polarization: V
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: "mWm^-2sr^-1(cm^-1)^-1"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+ '10':
+ name: '10'
+ file_type: nc_mwi_l1b_rad
+ file_key: data/measurement_data/mwi_radiance_50_53_h
+ coordinates: [lon_pixels_group_4, lat_pixels_group_4]
+ n_50: 1
+ chan_index: 4
+ frequency_range:
+ central: 52.61
+ bandwidth: 0.4
+ unit: GHz
+ polarization: H
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: "mWm^-2sr^-1(cm^-1)^-1"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+ '11':
+ name: '11'
+ file_type: nc_mwi_l1b_rad
+ file_key: data/measurement_data/mwi_radiance_50_53_v
+ coordinates: [lon_pixels_group_4, lat_pixels_group_4]
+ n_50: 2
+ chan_index: 5
+ frequency_range:
+ central: 53.24
+ bandwidth: 0.4
+ unit: GHz
+ polarization: V
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: "mWm^-2sr^-1(cm^-1)^-1"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+ '12':
+ name: '12'
+ file_type: nc_mwi_l1b_rad
+ file_key: data/measurement_data/mwi_radiance_50_53_h
+ coordinates: [lon_pixels_group_4, lat_pixels_group_4]
+ n_50: 2
+ chan_index: 5
+ frequency_range:
+ central: 53.24
+ bandwidth: 0.4
+ unit: GHz
+ polarization: H
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: "mWm^-2sr^-1(cm^-1)^-1"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+ '13':
+ name: '13'
+ file_type: nc_mwi_l1b_rad
+ file_key: data/measurement_data/mwi_radiance_50_53_v
+ coordinates: [lon_pixels_group_4, lat_pixels_group_4]
+ n_50: 3
+ chan_index: 6
+ frequency_range:
+ central: 53.75
+ bandwidth: 0.4
+ unit: GHz
+ polarization: V
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: "mWm^-2sr^-1(cm^-1)^-1"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+ '14':
+ name: '14'
+ file_type: nc_mwi_l1b_rad
+ file_key: data/measurement_data/mwi_radiance_50_53_h
+ coordinates: [lon_pixels_group_4, lat_pixels_group_4]
+ n_50: 3
+ chan_index: 6
+ frequency_range:
+ central: 53.75
+ bandwidth: 0.4
+ unit: GHz
+ polarization: H
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: "mWm^-2sr^-1(cm^-1)^-1"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+ '15':
+ name: '15'
+ file_type: nc_mwi_l1b_rad
+ file_key: data/measurement_data/mwi_radiance_89_vh
+ coordinates: [lon_pixels_group_5, lat_pixels_group_5]
+ n_89: 0
+ chan_index: 7
+ frequency_range:
+ central: 89.0
+ bandwidth: 0.4
+ unit: GHz
+ polarization: V
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: "mWm^-2sr^-1(cm^-1)^-1"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+ '16':
+ name: '16'
+ file_type: nc_mwi_l1b_rad
+ file_key: data/measurement_data/mwi_radiance_89_vh
+ coordinates: [lon_pixels_group_5, lat_pixels_group_5]
+ n_89: 1
+ chan_index: 7
+ frequency_range:
+ central: 89.0
+ bandwidth: 0.4
+ unit: GHz
+ polarization: H
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: "mWm^-2sr^-1(cm^-1)^-1"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+ '17':
+ name: '17'
+ file_type: nc_mwi_l1b_rad
+ file_key: data/measurement_data/mwi_radiance_118_v
+ coordinates: [lon_pixels_group_6, lat_pixels_group_6]
+ n_118: 0
+ chan_index: 8
+ frequency_double_sideband:
+ central: 118.7503
+ side: 3.2
+ bandwidth: 0.5
+ unit: GHz
+ polarization: V
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: "mWm^-2sr^-1(cm^-1)^-1"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+ '18':
+ name: '18'
+ file_type: nc_mwi_l1b_rad
+ file_key: data/measurement_data/mwi_radiance_118_v
+ coordinates: [lon_pixels_group_6, lat_pixels_group_6]
+ n_118: 1
+ chan_index: 9
+ frequency_double_sideband:
+ central: 118.7503
+ side: 2.1
+ bandwidth: 0.4
+ unit: GHz
+ polarization: V
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: "mWm^-2sr^-1(cm^-1)^-1"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+ '19':
+ name: '19'
+ file_type: nc_mwi_l1b_rad
+ file_key: data/measurement_data/mwi_radiance_118_v
+ coordinates: [lon_pixels_group_6, lat_pixels_group_6]
+ n_118: 2
+ chan_index: 10
+ frequency_double_sideband:
+ central: 118.7503
+ side: 1.4
+ bandwidth: 0.4
+ unit: GHz
+ polarization: V
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: "mWm^-2sr^-1(cm^-1)^-1"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+ '20':
+ name: '20'
+ file_type: nc_mwi_l1b_rad
+ file_key: data/measurement_data/mwi_radiance_118_v
+ coordinates: [lon_pixels_group_6, lat_pixels_group_6]
+ n_118: 3
+ chan_index: 11
+ frequency_double_sideband:
+ central: 118.7503
+ side: 1.2
+ bandwidth: 0.4
+ unit: GHz
+ polarization: V
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: "mWm^-2sr^-1(cm^-1)^-1"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+ '21':
+ name: '21'
+ file_type: nc_mwi_l1b_rad
+ file_key: data/measurement_data/mwi_radiance_165_v
+ coordinates: [lon_pixels_group_7, lat_pixels_group_7]
+ n_165: 0
+ chan_index: 12
+ frequency_double_sideband:
+ central: 165.5
+ side: 0.75
+ bandwidth: 1.35
+ unit: GHz
+ polarization: V
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: "mWm^-2sr^-1(cm^-1)^-1"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+ '22':
+ name: '22'
+ file_type: nc_mwi_l1b_rad
+ file_key: data/measurement_data/mwi_radiance_183_v
+ coordinates: [lon_pixels_group_8, lat_pixels_group_8]
+ n_183: 0
+ chan_index: 13
+ frequency_double_sideband:
+ central: 183.31
+ side: 7.0
+ bandwidth: 2.0
+ unit: GHz
+ polarization: V
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: "mWm^-2sr^-1(cm^-1)^-1"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+ '23':
+ name: '23'
+ file_type: nc_mwi_l1b_rad
+ file_key: data/measurement_data/mwi_radiance_183_v
+ coordinates: [lon_pixels_group_8, lat_pixels_group_8]
+ n_183: 1
+ chan_index: 14
+ frequency_double_sideband:
+ central: 183.31
+ side: 6.1
+ bandwidth: 1.5
+ unit: GHz
+ polarization: V
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: "mWm^-2sr^-1(cm^-1)^-1"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+ '24':
+ name: '24'
+ file_type: nc_mwi_l1b_rad
+ file_key: data/measurement_data/mwi_radiance_183_v
+ coordinates: [lon_pixels_group_8, lat_pixels_group_8]
+ n_183: 2
+ chan_index: 15
+ frequency_double_sideband:
+ central: 183.31
+ side: 4.9
+ bandwidth: 1.5
+ unit: GHz
+ polarization: V
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: "mWm^-2sr^-1(cm^-1)^-1"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+ '25':
+ name: '25'
+ file_type: nc_mwi_l1b_rad
+ file_key: data/measurement_data/mwi_radiance_183_v
+ coordinates: [lon_pixels_group_8, lat_pixels_group_8]
+ n_183: 3
+ chan_index: 16
+ frequency_double_sideband:
+ central: 183.31
+ side: 3.4
+ bandwidth: 1.5
+ unit: GHz
+ polarization: V
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: "mWm^-2sr^-1(cm^-1)^-1"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+ '26':
+ name: '26'
+ file_type: nc_mwi_l1b_rad
+ file_key: data/measurement_data/mwi_radiance_183_v
+ coordinates: [lon_pixels_group_8, lat_pixels_group_8]
+ n_183: 4
+ chan_index: 17
+ frequency_double_sideband:
+ central: 183.31
+ side: 2.0
+ bandwidth: 1.5
+ unit: GHz
+ polarization: V
+ calibration:
+ radiance:
+ standard_name: toa_outgoing_radiance_per_unit_wavelength
+ units: "mWm^-2sr^-1(cm^-1)^-1"
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ units: K
+
+ # --- Navigation data ---
+ time_start_scan_utc:
+ name: time_start_scan_utc
+ standard_name: time_start_scan_utc
+ file_type: nc_mwi_l1b_rad
+ file_key: data/navigation_data/time_start_scan_utc
+ coordinates: [longitude_ssp, latitude_ssp]
+
+ # --- Geometric data ---
+ solar_zenith_group_1:
+ name: solar_zenith_group_1
+ standard_name: solar_zenith_angle
+ file_type: nc_mwi_l1b_rad
+ file_key: solar_zenith
+ n_data_groups: 0
+ coordinates: [lon_pixels_group_1, lat_pixels_group_1]
+
+ solar_zenith_group_2:
+ name: solar_zenith_group_2
+ standard_name: solar_zenith_angle
+ file_type: nc_mwi_l1b_rad
+ file_key: solar_zenith
+ n_data_groups: 1
+ coordinates: [lon_pixels_group_2, lat_pixels_group_2]
+
+ solar_zenith_group_3:
+ name: solar_zenith_group_3
+ standard_name: solar_zenith_angle
+ file_type: nc_mwi_l1b_rad
+ file_key: solar_zenith
+ n_data_groups: 2
+ coordinates: [lon_pixels_group_3, lat_pixels_group_3]
+
+ solar_zenith_group_4:
+ name: solar_zenith_group_4
+ standard_name: solar_zenith_angle
+ file_type: nc_mwi_l1b_rad
+ file_key: solar_zenith
+ n_data_groups: 3
+ coordinates: [lon_pixels_group_4, lat_pixels_group_4]
+
+ solar_zenith_group_5:
+ name: solar_zenith_group_5
+ standard_name: solar_zenith_angle
+ file_type: nc_mwi_l1b_rad
+ file_key: solar_zenith
+ n_data_groups: 4
+ coordinates: [lon_pixels_group_5, lat_pixels_group_5]
+
+ solar_zenith_group_6:
+ name: solar_zenith_horn_6
+ standard_name: solar_group_angle
+ file_type: nc_mwi_l1b_rad
+ file_key: solar_zenith
+ n_data_groups: 5
+ coordinates: [lon_pixels_group_6, lat_pixels_group_6]
+
+ solar_zenith_group_7:
+ name: solar_zenith_group_7
+ standard_name: solar_zenith_angle
+ file_type: nc_mwi_l1b_rad
+ file_key: solar_zenith
+ n_data_groups: 6
+ coordinates: [lon_pixels_group_7, lat_pixels_group_7]
+
+ solar_zenith_group_8:
+ name: solar_zenith_group_8
+ standard_name: solar_zenith_angle
+ file_type: nc_mwi_l1b_rad
+ file_key: solar_zenith
+ n_data_groups: 7
+ coordinates: [lon_pixels_group_8, lat_pixels_group_8]
+
+ solar_azimuth_group_1:
+ name: solar_azimuth_group_1
+ standard_name: solar_azimuth_angle
+ file_type: nc_mwi_l1b_rad
+ file_key: solar_azimuth
+ n_data_groups: 0
+ coordinates: [lon_pixels_group_1, lat_pixels_group_1]
+
+ solar_azimuth_group_2:
+ name: solar_azimuth_group_2
+ standard_name: solar_azimuth_angle
+ file_type: nc_mwi_l1b_rad
+ file_key: solar_azimuth
+ n_data_groups: 1
+ coordinates: [lon_pixels_group_2, lat_pixels_group_2]
+
+ solar_azimuth_group_3:
+ name: solar_azimuth_group_3
+ standard_name: solar_azimuth_angle
+ file_type: nc_mwi_l1b_rad
+ file_key: solar_azimuth
+ n_data_groups: 2
+ coordinates: [lon_pixels_group_3, lat_pixels_group_3]
+
+ solar_azimuth_group_4:
+ name: solar_azimuth_group_4
+ standard_name: solar_azimuth_angle
+ file_type: nc_mwi_l1b_rad
+ file_key: solar_azimuth
+ n_data_groups: 3
+ coordinates: [lon_pixels_group_4, lat_pixels_group_4]
+
+ solar_azimuth_group_5:
+ name: solar_azimuth_group_5
+ standard_name: solar_azimuth_angle
+ file_type: nc_mwi_l1b_rad
+ file_key: solar_azimuth
+ n_data_groups: 4
+ coordinates: [lon_pixels_group_5, lat_pixels_group_5]
+
+ solar_azimuth_group_6:
+ name: solar_azimuth_group_6
+ standard_name: solar_azimuth_angle
+ file_type: nc_mwi_l1b_rad
+ file_key: solar_azimuth
+ n_data_groups: 5
+ coordinates: [lon_pixels_group_6, lat_pixels_group_6]
+
+ solar_azimuth_group_7:
+ name: solar_azimuth_group_7
+ standard_name: solar_azimuth_angle
+ file_type: nc_mwi_l1b_rad
+ file_key: solar_azimuth
+ n_data_groups: 6
+ coordinates: [lon_pixels_group_7, lat_pixels_group_7]
+
+ solar_azimuth_group_8:
+ name: solar_azimuth_group_8
+ standard_name: solar_azimuth_angle
+ file_type: nc_mwi_l1b_rad
+ file_key: solar_azimuth
+ n_data_groups: 7
+ coordinates: [lon_pixels_group_8, lat_pixels_group_8]
+
+ observation_zenith_group_1:
+ name: observation_zenith_group_1
+ standard_name: sensor_zenith_angle
+ file_type: nc_mwi_l1b_rad
+ file_key: observation_zenith
+ n_data_groups: 0
+ coordinates: [lon_pixels_group_1, lat_pixels_group_1]
+
+ observation_zenith_group_2:
+ name: observation_zenith_group_2
+ standard_name: sensor_zenith_angle
+ file_type: nc_mwi_l1b_rad
+ file_key: observation_zenith
+ n_data_groups: 1
+ coordinates: [lon_pixels_group_2, lat_pixels_group_2]
+
+ observation_zenith_group_3:
+ name: observation_zenith_group_3
+ standard_name: sensor_zenith_angle
+ file_type: nc_mwi_l1b_rad
+ file_key: observation_zenith
+ n_data_groups: 2
+ coordinates: [lon_pixels_group_3, lat_pixels_group_3]
+
+ observation_zenith_group_4:
+ name: observation_zenith_group_4
+ standard_name: sensor_zenith_angle
+ file_type: nc_mwi_l1b_rad
+ file_key: observation_zenith
+ n_data_groups: 3
+ coordinates: [lon_pixels_group_4, lat_pixels_group_4]
+
+ observation_zenith_group_5:
+ name: observation_zenith_group_5
+ standard_name: sensor_zenith_angle
+ file_type: nc_mwi_l1b_rad
+ file_key: observation_zenith
+ n_data_groups: 4
+ coordinates: [lon_pixels_group_5, lat_pixels_group_5]
+
+ observation_zenith_group_6:
+ name: observation_zenith_group_6
+ standard_name: sensor_zenith_angle
+ file_type: nc_mwi_l1b_rad
+ file_key: observation_zenith
+ n_data_groups: 5
+ coordinates: [lon_pixels_group_6, lat_pixels_group_6]
+
+ observation_zenith_group_7:
+ name: observation_zenith_group_7
+ standard_name: sensor_zenith_angle
+ file_type: nc_mwi_l1b_rad
+ file_key: observation_zenith
+ n_data_groups: 6
+ coordinates: [lon_pixels_group_7, lat_pixels_group_7]
+
+ observation_zenith_group_8:
+ name: observation_zenith_group_7
+ standard_name: sensor_zenith_angle
+ file_type: nc_mwi_l1b_rad
+ file_key: observation_zenith
+ n_data_groups: 7
+ coordinates: [lon_pixels_group_8, lat_pixels_group_8]
+
+ observation_azimuth_group_1:
+ name: observation_azimuth_group_1
+ standard_name: sensor_azimuth_angle
+ file_type: nc_mwi_l1b_rad
+ file_key: observation_azimuth
+ n_data_groups: 0
+ coordinates: [lon_pixels_group_1, lat_pixels_group_1]
+
+ observation_azimuth_group_2:
+ name: observation_azimuth_group_2
+ standard_name: sensor_azimuth_angle
+ file_type: nc_mwi_l1b_rad
+ file_key: observation_azimuth
+ n_data_groups: 1
+ coordinates: [lon_pixels_group_2, lat_pixels_group_2]
+
+ observation_azimuth_group_3:
+ name: observation_azimuth_group_3
+ standard_name: sensor_azimuth_angle
+ file_type: nc_mwi_l1b_rad
+ file_key: observation_azimuth
+ n_data_groups: 2
+ coordinates: [lon_pixels_group_3, lat_pixels_group_3]
+
+ observation_azimuth_group_4:
+ name: observation_azimuth_group_4
+ standard_name: sensor_azimuth_angle
+ file_type: nc_mwi_l1b_rad
+ file_key: observation_azimuth
+ n_data_groups: 3
+ coordinates: [lon_pixels_group_4, lat_pixels_group_4]
+
+ observation_azimuth_group_5:
+ name: observation_azimuth_group_5
+ standard_name: sensor_azimuth_angle
+ file_type: nc_mwi_l1b_rad
+ file_key: observation_azimuth
+ n_data_groups: 4
+ coordinates: [lon_pixels_group_5, lat_pixels_group_5]
+
+ observation_azimuth_group_6:
+ name: observation_azimuth_group_6
+ standard_name: sensor_azimuth_angle
+ file_type: nc_mwi_l1b_rad
+ file_key: observation_azimuth
+ n_data_groups: 5
+ coordinates: [lon_pixels_group_6, lat_pixels_group_6]
+
+ observation_azimuth_group_7:
+ name: observation_azimuth_group_7
+ standard_name: sensor_azimuth_angle
+ file_type: nc_mwi_l1b_rad
+ file_key: observation_azimuth
+ n_data_groups: 6
+ coordinates: [lon_pixels_group_7, lat_pixels_group_7]
+
+ observation_azimuth_group_8:
+ name: observation_azimuth_group_8
+ standard_name: sensor_azimuth_angle
+ file_type: nc_mwi_l1b_rad
+ file_key: observation_azimuth
+ n_data_groups: 7
+ coordinates: [lon_pixels_group_8, lat_pixels_group_8]
diff --git a/satpy/etc/readers/mws_l1b_nc.yaml b/satpy/etc/readers/mws_l1b_nc.yaml
new file mode 100644
index 0000000000..1f77bbe7a2
--- /dev/null
+++ b/satpy/etc/readers/mws_l1b_nc.yaml
@@ -0,0 +1,502 @@
+reader:
+ name: mws_l1b_nc
+ short_name: MWS L1B RAD NetCDF4
+ long_name: EPS-SG MWS L1B Radiance (NetCDF4)
+ description: Reader for the EPS-SG l1b MWS (Microwave Sounder) level-1 files in netCDF4.
+ reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
+ sensors: [mws,]
+ status: Beta
+ default_channels: []
+
+ data_identification_keys:
+ name:
+ required: true
+ frequency_quadruple_sideband:
+ type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyQuadrupleSideBand
+ frequency_double_sideband:
+ type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyDoubleSideBand
+ frequency_range:
+ type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyRange
+ resolution:
+ polarization:
+ enum:
+ - QH
+ - QV
+ calibration:
+ enum:
+ - brightness_temperature
+ transitive: true
+ modifiers:
+ required: true
+ default: []
+ type: !!python/name:satpy.dataset.ModifierTuple
+
+datasets:
+ '1':
+ name: '1'
+ frequency_range:
+ central: 23.8
+ bandwidth: 0.270
+ unit: GHz
+ polarization: 'QH'
+ resolution: 17000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ coordinates:
+ - mws_lon
+ - mws_lat
+ file_type: mws_l1b_nc
+ file_key: data/calibration/mws_toa_brightness_temperature
+ '2':
+ name: '2'
+ frequency_range:
+ central: 31.4
+ bandwidth: 0.180
+ unit: GHz
+ polarization: 'QH'
+ resolution: 17000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ coordinates:
+ - mws_lon
+ - mws_lat
+ file_type: mws_l1b_nc
+ file_key: data/calibration/mws_toa_brightness_temperature
+ '3':
+ name: '3'
+ frequency_range:
+ central: 50.3
+ bandwidth: 0.180
+ unit: GHz
+ polarization: 'QH'
+ resolution: 17000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ coordinates:
+ - mws_lon
+ - mws_lat
+ file_type: mws_l1b_nc
+ file_key: data/calibration/mws_toa_brightness_temperature
+ '4':
+ name: '4'
+ frequency_range:
+ central: 52.8
+ bandwidth: 0.400
+ unit: GHz
+ polarization: 'QH'
+ resolution: 17000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ coordinates:
+ - mws_lon
+ - mws_lat
+ file_type: mws_l1b_nc
+ file_key: data/calibration/mws_toa_brightness_temperature
+ '5':
+ name: '5'
+ frequency_double_sideband:
+ central: 53.246
+ side: 0.08
+ bandwidth: 0.140
+ unit: GHz
+ polarization: 'QH'
+ resolution: 17000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ coordinates:
+ - mws_lon
+ - mws_lat
+ file_type: mws_l1b_nc
+ file_key: data/calibration/mws_toa_brightness_temperature
+ '6':
+ name: '6'
+ frequency_double_sideband:
+ central: 53.596
+ side: 0.115
+ bandwidth: 0.170
+ unit: GHz
+ polarization: 'QH'
+ resolution: 17000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ coordinates:
+ - mws_lon
+ - mws_lat
+ file_type: mws_l1b_nc
+ file_key: data/calibration/mws_toa_brightness_temperature
+ '7':
+ name: '7'
+ frequency_double_sideband:
+ central: 53.948
+ side: 0.081
+ bandwidth: 0.142
+ unit: GHz
+ polarization: 'QH'
+ resolution: 17000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ coordinates:
+ - mws_lon
+ - mws_lat
+ file_type: mws_l1b_nc
+ file_key: data/calibration/mws_toa_brightness_temperature
+ '8':
+ name: '8'
+ frequency_range:
+ central: 54.4
+ bandwidth: 0.400
+ unit: GHz
+ polarization: 'QH'
+ resolution: 17000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ coordinates:
+ - mws_lon
+ - mws_lat
+ file_type: mws_l1b_nc
+ file_key: data/calibration/mws_toa_brightness_temperature
+ '9':
+ name: '9'
+ frequency_range:
+ central: 54.94
+ bandwidth: 0.400
+ unit: GHz
+ polarization: 'QH'
+ resolution: 17000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ coordinates:
+ - mws_lon
+ - mws_lat
+ file_type: mws_l1b_nc
+ file_key: data/calibration/mws_toa_brightness_temperature
+ '10':
+ name: '10'
+ frequency_range:
+ central: 55.5
+ bandwidth: 0.330
+ unit: GHz
+ polarization: 'QH'
+ resolution: 17000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ coordinates:
+ - mws_lon
+ - mws_lat
+ file_type: mws_l1b_nc
+ file_key: data/calibration/mws_toa_brightness_temperature
+ '11':
+ name: '11'
+ frequency_range:
+ central: 57.290344
+ bandwidth: 0.330
+ unit: GHz
+ polarization: 'QH'
+ resolution: 17000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ coordinates:
+ - mws_lon
+ - mws_lat
+ file_type: mws_l1b_nc
+ file_key: data/calibration/mws_toa_brightness_temperature
+ '12':
+ #57.290344±0.217
+ name: '12'
+ frequency_double_sideband:
+ central: 57.290344
+ side: 0.217
+ bandwidth: 0.078
+ unit: GHz
+ polarization: 'QH'
+ resolution: 17000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ coordinates:
+ - mws_lon
+ - mws_lat
+ file_type: mws_l1b_nc
+ file_key: data/calibration/mws_toa_brightness_temperature
+ '13':
+ #57.290344±0.3222±0.048
+ name: '13'
+ frequency_quadruple_sideband:
+ central: 57.290344
+ side: 0.3222
+ sideside: 0.048
+ bandwidth: 0.036
+ unit: GHz
+ polarization: 'QH'
+ resolution: 17000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ coordinates:
+ - mws_lon
+ - mws_lat
+ file_type: mws_l1b_nc
+ file_key: data/calibration/mws_toa_brightness_temperature
+ '14':
+ #57.290344±0.3222±0.022
+ name: '14'
+ frequency_quadruple_sideband:
+ central: 57.290344
+ side: 0.3222
+ sideside: 0.022
+ bandwidth: 0.016
+ unit: GHz
+ polarization: 'QH'
+ resolution: 17000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ coordinates:
+ - mws_lon
+ - mws_lat
+ file_type: mws_l1b_nc
+ file_key: data/calibration/mws_toa_brightness_temperature
+ '15':
+ #57.290344±0.3222±0.010
+ name: '15'
+ frequency_quadruple_sideband:
+ central: 57.290344
+ side: 0.3222
+ sideside: 0.010
+ bandwidth: 0.008
+ unit: GHz
+ polarization: 'QH'
+ resolution: 17000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ coordinates:
+ - mws_lon
+ - mws_lat
+ file_type: mws_l1b_nc
+ file_key: data/calibration/mws_toa_brightness_temperature
+ '16':
+ #57.290344±0.3222±0.0045
+ name: '16'
+ frequency_quadruple_sideband:
+ central: 57.290344
+ side: 0.3222
+ sideside: 0.0045
+ bandwidth: 0.004
+ unit: GHz
+ polarization: 'QH'
+ resolution: 17000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ coordinates:
+ - mws_lon
+ - mws_lat
+ file_type: mws_l1b_nc
+ file_key: data/calibration/mws_toa_brightness_temperature
+ '17':
+ name: '17'
+ frequency_range:
+ central: 89.0
+ bandwidth: 4.0
+ unit: GHz
+ polarization: 'QV'
+ resolution: 17000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ coordinates:
+ - mws_lon
+ - mws_lat
+ file_type: mws_l1b_nc
+ file_key: data/calibration/mws_toa_brightness_temperature
+ '18':
+ name: '18'
+ # FIXME! Is this a souble side band or what? MWS-18; 164–167; 2 x 1350; QH
+ frequency_range:
+ central: 166.0
+ bandwidth: 2.700
+ unit: GHz
+ polarization: 'QH'
+ resolution: 17000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ coordinates:
+ - mws_lon
+ - mws_lat
+ file_type: mws_l1b_nc
+ file_key: data/calibration/mws_toa_brightness_temperature
+ '19':
+ name: '19'
+ frequency_double_sideband:
+ central: 183.311
+ side: 7.0
+ bandwidth: 2.0
+ unit: GHz
+ polarization: 'QV'
+ resolution: 17000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ coordinates:
+ - mws_lon
+ - mws_lat
+ file_type: mws_l1b_nc
+ file_key: data/calibration/mws_toa_brightness_temperature
+ '20':
+ name: '20'
+ frequency_double_sideband:
+ central: 183.311
+ side: 4.5
+ bandwidth: 2.0
+ unit: GHz
+ polarization: 'QV'
+ resolution: 17000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ coordinates:
+ - mws_lon
+ - mws_lat
+ file_type: mws_l1b_nc
+ file_key: data/calibration/mws_toa_brightness_temperature
+ '21':
+ name: '21'
+ frequency_double_sideband:
+ central: 183.311
+ side: 3.0
+ bandwidth: 1.0
+ unit: GHz
+ polarization: 'QV'
+ resolution: 17000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ coordinates:
+ - mws_lon
+ - mws_lat
+ file_type: mws_l1b_nc
+ file_key: data/calibration/mws_toa_brightness_temperature
+ '22':
+ name: '22'
+ frequency_double_sideband:
+ central: 183.311
+ side: 1.8
+ bandwidth: 1.0
+ unit: GHz
+ polarization: 'QV'
+ resolution: 17000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ coordinates:
+ - mws_lon
+ - mws_lat
+ file_type: mws_l1b_nc
+ file_key: data/calibration/mws_toa_brightness_temperature
+ '23':
+ name: '23'
+ frequency_double_sideband:
+ central: 183.311
+ side: 1.0
+ bandwidth: 0.5
+ unit: GHz
+ polarization: 'QV'
+ resolution: 17000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ coordinates:
+ - mws_lon
+ - mws_lat
+ file_type: mws_l1b_nc
+ file_key: data/calibration/mws_toa_brightness_temperature
+ '24':
+ name: '24'
+ frequency_range:
+ central: 229.
+ bandwidth: 2.0
+ unit: GHz
+ polarization: 'QV'
+ resolution: 17000
+ calibration:
+ brightness_temperature:
+ standard_name: toa_brightness_temperature
+ coordinates:
+ - mws_lon
+ - mws_lat
+ file_type: mws_l1b_nc
+ file_key: data/calibration/mws_toa_brightness_temperature
+
+# --- Coordinates ---
+
+ mws_lat:
+ name: mws_lat
+ resolution: 17000
+ file_type: mws_l1b_nc
+ file_key: data/navigation/mws_lat
+ standard_name: latitude
+ units: degrees_north
+
+ mws_lon:
+ name: mws_lon
+ resolution: 17000
+ file_type: mws_l1b_nc
+ file_key: data/navigation/mws_lat
+ standard_name: longitude
+ units: degrees_east
+
+# --- Navigation data ---
+
+ solar_azimuth:
+ name: solar_azimuth
+ standard_name: solar_azimuth_angle
+ file_type: mws_l1b_nc
+ file_key: data/navigation/mws_solar_azimuth_angle
+ coordinates:
+ - mws_lon
+ - mws_lat
+ solar_zenith:
+ name: solar_zenith
+ standard_name: solar_zenith_angle
+ file_type: mws_l1b_nc
+ file_key: data/navigation/mws_solar_zenith_angle
+ coordinates:
+ - mws_lon
+ - mws_lat
+ satellite_azimuth:
+ name: satellite_azimuth
+ standard_name: satellite_azimuth_angle
+ file_type: mws_l1b_nc
+ file_key: data/navigation/mws_satellite_azimuth_angle
+ coordinates:
+ - mws_lon
+ - mws_lat
+ satellite_zenith:
+ name: satellite_zenith
+ standard_name: satellite_zenith_angle
+ file_type: mws_l1b_nc
+ file_key: data/navigation/mws_satellite_zenith_angle
+ coordinates:
+ - mws_lon
+ - mws_lat
+
+
+file_types:
+ mws_l1b_nc:
+ # EPS-SG_MWS-1B-RAD.nc
+ # W_XX-EUMETSAT-Darmstadt,SAT,SGA1-MWS-1B-RAD_C_EUMT_20210609095009_G_D_20070912084321_20070912102225_T_N____.nc
+ file_reader: !!python/name:satpy.readers.mws_l1b.MWSL1BFile
+ file_patterns: ['W_XX-EUMETSAT-Darmstadt,SAT,{platform_shortname}-MWS-1B-RAD_C_EUMT_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_N____.nc']
diff --git a/satpy/etc/readers/nucaps.yaml b/satpy/etc/readers/nucaps.yaml
index 9ba9245780..a948357a04 100644
--- a/satpy/etc/readers/nucaps.yaml
+++ b/satpy/etc/readers/nucaps.yaml
@@ -1,8 +1,12 @@
reader:
- description: NUCAPS Retrieval Reader
name: nucaps
+ short_name: NUCAPS EDR
+ long_name: NUCAPS EDR Retrieval data in NetCDF4 format
+ description: NUCAPS Retrieval Reader
+ status: Nominal
+ supports_fsspec: false
reader: !!python/name:satpy.readers.nucaps.NUCAPSReader
- sensors: [cris, atms]
+ sensors: [cris, atms, viirs]
data_identification_keys:
name:
required: true
diff --git a/satpy/etc/readers/nwcsaf-geo.yaml b/satpy/etc/readers/nwcsaf-geo.yaml
index 00230f6150..29e3b5cc05 100644
--- a/satpy/etc/readers/nwcsaf-geo.yaml
+++ b/satpy/etc/readers/nwcsaf-geo.yaml
@@ -1,6 +1,10 @@
reader:
- description: NetCDF4 reader for the NWCSAF GEO 2016/2018 format
name: nwcsaf-geo
+ short_name: NWCSAF GEO
+ long_name: NWCSAF GEO 2016 products in netCDF4 format (limited to SEVIRI)
+ description: NetCDF4 reader for the NWCSAF GEO 2016/2018 format
+ status: Alpha
+ supports_fsspec: false
sensors: [seviri, abi, ahi]
default_channels: []
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
@@ -35,6 +39,10 @@ file_types:
file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF
file_patterns: ['S_NWC_CRR_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc']
+ nc_nwcsaf_crr-ph:
+ file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF
+ file_patterns: ['S_NWC_CRR-Ph_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc']
+
nc_nwcsaf_ishai:
file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF
file_patterns: ['S_NWC_iSHAI_{platform_id}_{region_id}_{start_time:%Y%m%dT%H%M%S}Z.nc']
@@ -353,6 +361,48 @@ datasets:
resolution: 3000
file_type: nc_nwcsaf_crr
+# ---- CRR-Ph products ------------
+
+ crrph_intensity:
+ name: crrph_intensity
+ resolution: 3000
+ file_type: nc_nwcsaf_crr-ph
+
+ crrph_pal:
+ name: crrph_intensity_pal
+ resolution: 3000
+ file_type: nc_nwcsaf_crr-ph
+
+ crrph_accum:
+ name: crrph_accum
+ resolution: 3000
+ file_type: nc_nwcsaf_crr-ph
+
+ crrph_iqf:
+ name: crrph_iqf
+ resolution: 3000
+ file_type: nc_nwcsaf_crr-ph
+
+ crrph_iqf_pal:
+ name: crrph_iqf_pal
+ resolution: 3000
+ file_type: nc_nwcsaf_crr-ph
+
+ crrph_status_flag:
+ name: crrph_status
+ resolution: 3000
+ file_type: nc_nwcsaf_crr-ph
+
+ crrph_conditions:
+ name: crrph_conditions
+ resolution: 3000
+ file_type: nc_nwcsaf_crr-ph
+
+ crrph_quality:
+ name: crrph_quality
+ resolution: 3000
+ file_type: nc_nwcsaf_crr-ph
+
# ----iSHAI products ------------
ishai_tpw:
diff --git a/satpy/etc/readers/nwcsaf-msg2013-hdf5.yaml b/satpy/etc/readers/nwcsaf-msg2013-hdf5.yaml
index 3c431ad335..72b3998cdd 100644
--- a/satpy/etc/readers/nwcsaf-msg2013-hdf5.yaml
+++ b/satpy/etc/readers/nwcsaf-msg2013-hdf5.yaml
@@ -1,6 +1,10 @@
reader:
- description: HDF5 reader for the NWCSAF/Geo Seviri 2013 format
name: nwcsaf-msg2013-hdf5
+ short_name: NWCSAF Geo
+ long_name: NWCSAF GEO 2013 products in HDF5 format (limited to SEVIRI)
+ description: HDF5 reader for the NWCSAF/Geo Seviri 2013 format
+ status: Defunct
+ supports_fsspec: false
sensors: [seviri]
default_channels: []
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
@@ -187,4 +191,3 @@ datasets:
resolution: 3000
file_type: h5_nwcsaf_ctth
file_key: CTTH_QUALITY
-
diff --git a/satpy/etc/readers/nwcsaf-pps_nc.yaml b/satpy/etc/readers/nwcsaf-pps_nc.yaml
index 7a416972b5..dd12e1558b 100644
--- a/satpy/etc/readers/nwcsaf-pps_nc.yaml
+++ b/satpy/etc/readers/nwcsaf-pps_nc.yaml
@@ -1,6 +1,10 @@
reader:
- description: NetCDF4 reader for the NWCSAF/PPS 2014 format
name: nwcsaf-pps_nc
+ short_name: NWCSAF PPS
+ long_name: NWCSAF PPS 2014, 2018 products in netCDF4 format
+ description: NetCDF4 reader for the NWCSAF/PPS 2014 format
+ status: Alpha, only standard swath based ouput supported (remapped netCDF and CPP products not supported yet)
+ supports_fsspec: false
sensors: ['avhrr-3', 'viirs', 'modis']
default_channels: []
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
@@ -35,8 +39,12 @@ file_types:
nc_nwcsaf_cpp:
file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF
file_patterns: ['S_NWC_CPP_{platform_id}_{orbit_number}_{start_time:%Y%m%dT%H%M%S%f}Z_{end_time:%Y%m%dT%H%M%S%f}Z.nc']
+ file_key_prefix: cpp_
-
+ nc_nwcsaf_cmic:
+ file_reader: !!python/name:satpy.readers.nwcsaf_nc.NcNWCSAF
+ file_patterns: ['S_NWC_CMIC_{platform_id}_{orbit_number}_{start_time:%Y%m%dT%H%M%S%f}Z_{end_time:%Y%m%dT%H%M%S%f}Z.nc']
+ file_key_prefix: cmic_
datasets:
@@ -214,63 +222,93 @@ datasets:
file_type: nc_nwcsaf_ctth
-# ---- CPP products ------------
+# ---- CMIC products (Was CPP in PPS<=2018)------------
+
+ cmic_phase:
+ name: cmic_phase
+ file_key: phase
+ file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic]
+ coordinates: [lon, lat]
+
+ cmic_phase_pal:
+ name: [cmic_phase_pal, cpp_phase_pal]
+ file_key: phase_pal
+ file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic]
- cpp_phase:
- name: cpp_phase
- file_type: nc_nwcsaf_cpp
+ cmic_reff:
+ name: cmic_reff
+ file_key: [cre, reff]
+ file_type: [nc_nwcsaf_cmic, nc_nwcsaf_cpp]
coordinates: [lon, lat]
- cpp_phase_pal:
- name: cpp_phase_pal
- file_type: nc_nwcsaf_cpp
+ cmic_reff_pal:
+ name: [cmic_reff_pal, cmic_cre_pal, cpp_reff_pal]
+ file_key: [cre_pal, reff_pal]
+ scale_offset_dataset: [reff, cre]
+ file_type: [nc_nwcsaf_cmic, nc_nwcsaf_cpp]
- cpp_reff:
- name: cpp_reff
- file_type: nc_nwcsaf_cpp
+ cmic_cot:
+ name: cmic_cot
+ file_key: cot
+ file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic]
coordinates: [lon, lat]
- cpp_reff_pal:
- name: cpp_reff_pal
- scale_offset_dataset: cpp_reff
- file_type: nc_nwcsaf_cpp
+ cmic_cot_pal:
+ name: [cmic_cot_pal, cpp_cot_pal]
+ file_key: cot_pal
+ scale_offset_dataset: cot
+ file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic]
- cpp_cot:
- name: cpp_cot
- file_type: nc_nwcsaf_cpp
+ cmic_cwp:
+ name: cmic_cwp
+ file_key: cwp
+ file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic]
coordinates: [lon, lat]
- cpp_cot_pal:
- name: cpp_cot_pal
- scale_offset_dataset: cpp_cot
- file_type: nc_nwcsaf_cpp
+ cmic_cwp_pal:
+ name: [cmic_cwp_pal, cpp_cwp_pal]
+ file_key: cwp_pal
+ scale_offset_dataset: cwp
+ file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic]
- cpp_cwp:
- name: cpp_cwp
- file_type: nc_nwcsaf_cpp
+ cmic_iwp:
+ name: cmic_iwp
+ file_key: iwp
+ file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic]
coordinates: [lon, lat]
- cpp_cwp_pal:
- name: cpp_cwp_pal
- scale_offset_dataset: cpp_cwp
- file_type: nc_nwcsaf_cpp
+ cmic_iwp_pal:
+ name: [cmic_iwp_pal, cpp_iwp_pal]
+ file_key: iwp_pal
+ scale_offset_dataset: iwp
+ file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic]
- cpp_iwp:
- name: cpp_iwp
- file_type: nc_nwcsaf_cpp
+ cmic_lwp:
+ name: cmic_lwp
+ file_key: lwp
+ file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic]
coordinates: [lon, lat]
- cpp_iwp_pal:
- name: cpp_iwp_pal
- scale_offset_dataset: cpp_iwp
- file_type: nc_nwcsaf_cpp
+ cmic_lwp_pal:
+ name: [cmic_lwp_pal, cpp_lwp_pal]
+ file_key: lwp_pal
+ scale_offset_dataset: lwp
+ file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic]
- cpp_lwp:
- name: cpp_lwp
- file_type: nc_nwcsaf_cpp
+ cmic_status_flag:
+ name: [cmic_status_flag, cpp_status_flag]
+ file_key: status_flag
+ file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic]
coordinates: [lon, lat]
- cpp_lwp_pal:
- name: cpp_lwp_pal
- scale_offset_dataset: cpp_lwp
- file_type: nc_nwcsaf_cpp
+ cmic_conditions:
+ name: [cmic_conditions, cpp_conditions]
+ file_key: conditions
+ file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic]
+ coordinates: [lon, lat]
+
+ cmic_quality:
+ name: [cmic_quality, cpp_quality]
+ file_key: quality
+ file_type: [nc_nwcsaf_cpp, nc_nwcsaf_cmic]
+ coordinates: [lon, lat]
diff --git a/satpy/etc/readers/oceancolorcci_l3_nc.yaml b/satpy/etc/readers/oceancolorcci_l3_nc.yaml
new file mode 100644
index 0000000000..7c6d1732ce
--- /dev/null
+++ b/satpy/etc/readers/oceancolorcci_l3_nc.yaml
@@ -0,0 +1,826 @@
+reader:
+ name: oceancolorcci_l3_nc
+ short_name: OCCCI Level 3
+ long_name: Ocean color CCI Level 3S data reader
+ description: NetCDF Reader for ESA Oceancolor CCI data
+ status: Nominal
+ supports_fsspec: false
+ default_channels: []
+ sensors: [merged]
+ reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
+
+file_types:
+ occci_allprods_geo:
+ file_reader: !!python/name:satpy.readers.oceancolorcci_l3_nc.OCCCIFileHandler
+ file_patterns: ['ESACCI-OC-{processing_level:3s}-OC_PRODUCTS-{sensors}-{composite_period_1}_{composite_period_2}_{spatial_res}_GEO_PML_{algorithm}-{start_time:%Y%m}-fv{version}.nc',
+ 'ESACCI-OC-{processing_level:3s}-OC_PRODUCTS-{sensors}-{composite_period_1}_{composite_period_2}_{spatial_res}_GEO_PML_{algorithm}-{start_time:%Y%m%d}-fv{version}.nc']
+ occci_chlorprods_geo:
+ file_reader: !!python/name:satpy.readers.oceancolorcci_l3_nc.OCCCIFileHandler
+ file_patterns: ['ESACCI-OC-{processing_level:3s}-CHLOR_A-{sensors}-{composite_period_1}_{composite_period_2}_{spatial_res}_GEO_PML_{algorithm}-{start_time:%Y%m}-fv{version}.nc',
+ 'ESACCI-OC-{processing_level:3s}-CHLOR_A-{sensors}-{composite_period_1}_{composite_period_2}_{spatial_res}_GEO_PML_{algorithm}-{start_time:%Y%m%d}-fv{version}.nc']
+ occci_iopprods_geo:
+ file_reader: !!python/name:satpy.readers.oceancolorcci_l3_nc.OCCCIFileHandler
+ file_patterns: ['ESACCI-OC-{processing_level:3s}-IOP-{sensors}-{composite_period_1}_{composite_period_2}_{spatial_res}_GEO_PML_{algorithm}-{start_time:%Y%m}-fv{version}.nc',
+ 'ESACCI-OC-{processing_level:3s}-IOP-{sensors}-{composite_period_1}_{composite_period_2}_{spatial_res}_GEO_PML_{algorithm}-{start_time:%Y%m%d}-fv{version}.nc']
+ occci_k490prods_geo:
+ file_reader: !!python/name:satpy.readers.oceancolorcci_l3_nc.OCCCIFileHandler
+ file_patterns: ['ESACCI-OC-{processing_level:3s}-K_490-{sensors}-{composite_period_1}_{composite_period_2}_{spatial_res}_GEO_PML_{algorithm}-{start_time:%Y%m}-fv{version}.nc',
+ 'ESACCI-OC-{processing_level:3s}-K_490-{sensors}-{composite_period_1}_{composite_period_2}_{spatial_res}_GEO_PML_{algorithm}-{start_time:%Y%m%d}-fv{version}.nc']
+ occci_rrsprods_geo:
+ file_reader: !!python/name:satpy.readers.oceancolorcci_l3_nc.OCCCIFileHandler
+ file_patterns: ['ESACCI-OC-{processing_level:3s}-RRS-{sensors}-{composite_period_1}_{composite_period_2}_{spatial_res}_GEO_PML_{algorithm}-{start_time:%Y%m}-fv{version}.nc',
+ 'ESACCI-OC-{processing_level:3s}-RRS-{sensors}-{composite_period_1}_{composite_period_2}_{spatial_res}_GEO_PML_{algorithm}-{start_time:%Y%m%d}-fv{version}.nc',]
+datasets:
+ longitude:
+ name: longitude
+ file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo]
+ standard_name: longitude
+ units: degree
+ nc_key: 'lon'
+
+ latitude:
+ name: latitude
+ file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo]
+ standard_name: latitude
+ units: degree
+ nc_key: 'lat'
+
+ adg_412:
+ name: adg_412
+ wavelength: 0.412
+ standard_name: Absorption coefficient for dissolved and detrital material at 412 nm as derived using the QAA model.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'adg_412'
+
+ adg_412_bias:
+ name: adg_412_bias
+ wavelength: 0.412
+ standard_name: Bias of absorption coefficient for dissolved and detrital material at 412 nm.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'adg_412_bias'
+
+ adg_412_rmsd:
+ name: adg_412_rmsd
+ wavelength: 0.412
+ standard_name: Root-mean-square-difference of absorption coefficient for dissolved and detrital material at 412 nm.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'adg_412_rmsd'
+
+ adg_443:
+ name: adg_443
+ wavelength: 0.443
+ standard_name: Absorption coefficient for dissolved and detrital material at 443 nm as derived using the QAA model.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'adg_443'
+
+ adg_443_bias:
+ name: adg_443_bias
+ wavelength: 0.443
+ standard_name: Bias of absorption coefficient for dissolved and detrital material at 443 nm.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'adg_443_bias'
+
+ adg_443_rmsd:
+ name: adg_443_rmsd
+ wavelength: 0.443
+ standard_name: Root-mean-square-difference of absorption coefficient for dissolved and detrital material at 443 nm.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'adg_443_rmsd'
+
+ adg_490:
+ name: adg_490
+ wavelength: 0.490
+ standard_name: Absorption coefficient for dissolved and detrital material at 490 nm as derived using the QAA model.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'adg_490'
+
+ adg_490_bias:
+ name: adg_490_bias
+ wavelength: 0.490
+ standard_name: Bias of absorption coefficient for dissolved and detrital material at 490 nm.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'adg_490_bias'
+
+ adg_490_rmsd:
+ name: adg_490_rmsd
+ wavelength: 0.490
+ standard_name: Root-mean-square-difference of absorption coefficient for dissolved and detrital material at 490 nm.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'adg_490_rmsd'
+
+ adg_510:
+ name: adg_510
+ wavelength: 0.510
+ standard_name: Absorption coefficient for dissolved and detrital material at 510 nm as derived using the QAA model.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'adg_510'
+
+ adg_510_bias:
+ name: adg_510_bias
+ wavelength: 0.510
+ standard_name: Bias of absorption coefficient for dissolved and detrital material at 510 nm.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'adg_510_bias'
+
+ adg_510_rmsd:
+ name: adg_510_rmsd
+ wavelength: 0.510
+ standard_name: Root-mean-square-difference of absorption coefficient for dissolved and detrital material at 510 nm.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'adg_510_rmsd'
+
+ adg_560:
+ name: adg_560
+ wavelength: 0.560
+ standard_name: Absorption coefficient for dissolved and detrital material at 560 nm as derived using the QAA model.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'adg_560'
+
+ adg_560_bias:
+ name: adg_560_bias
+ wavelength: 0.560
+ standard_name: Bias of absorption coefficient for dissolved and detrital material at 560 nm.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'adg_560_bias'
+
+ adg_560_rmsd:
+ name: adg_560_rmsd
+ wavelength: 0.560
+ standard_name: Root-mean-square-difference of absorption coefficient for dissolved and detrital material at 560 nm.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'adg_560_rmsd'
+
+ adg_665:
+ name: adg_665
+ wavelength: 0.665
+ standard_name: Absorption coefficient for dissolved and detrital material at 665 nm as derived using the QAA model.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'adg_665'
+
+ adg_665_bias:
+ name: adg_665_bias
+ wavelength: 0.665
+ standard_name: Bias of absorption coefficient for dissolved and detrital material at 665 nm.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'adg_665_bias'
+
+ adg_665_rmsd:
+ name: adg_665_rmsd
+ wavelength: 0.665
+ standard_name: Root-mean-square-difference of absorption coefficient for dissolved and detrital material at 665 nm.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'adg_665_rmsd'
+
+ aph_412:
+ name: aph_412
+ wavelength: 0.412
+ standard_name: Phytoplankton absorption coefficient at 412 nm as derived using the QAA model.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'aph_412'
+
+ aph_412_bias:
+ name: aph_412_bias
+ wavelength: 0.412
+ standard_name: Bias of Phytoplankton absorption coefficient at 412 nm.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'aph_412_bias'
+
+ aph_412_rmsd:
+ name: aph_412_rmsd
+ wavelength: 0.412
+ standard_name: Root-mean-square-difference of Phytoplankton absorption coefficient at 412 nm.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'aph_412_rmsd'
+
+ aph_443:
+ name: aph_443
+ wavelength: 0.443
+ standard_name: Phytoplankton absorption coefficient at 443 nm as derived using the QAA model.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'aph_443'
+
+ aph_443_bias:
+ name: aph_443_bias
+ wavelength: 0.443
+ standard_name: Bias of Phytoplankton absorption coefficient at 443 nm.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'aph_443_bias'
+
+ aph_443_rmsd:
+ name: aph_443_rmsd
+ wavelength: 0.443
+ standard_name: Root-mean-square-difference of Phytoplankton absorption coefficient at 443 nm.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'aph_443_rmsd'
+
+ aph_490:
+ name: aph_490
+ wavelength: 0.490
+ standard_name: Phytoplankton absorption coefficient at 490 nm as derived using the QAA model.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'aph_490'
+
+ aph_490_bias:
+ name: aph_490_bias
+ wavelength: 0.490
+ standard_name: Bias of Phytoplankton absorption coefficient at 490 nm.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'aph_490_bias'
+
+ aph_490_rmsd:
+ name: aph_490_rmsd
+ wavelength: 0.490
+ standard_name: Root-mean-square-difference of Phytoplankton absorption coefficient at 490 nm.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'aph_490_rmsd'
+
+ aph_510:
+ name: aph_510
+ wavelength: 0.510
+ standard_name: Phytoplankton absorption coefficient at 510 nm as derived using the QAA model.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'aph_510'
+
+ aph_510_bias:
+ name: aph_510_bias
+ wavelength: 0.510
+ standard_name: Bias of Phytoplankton absorption coefficient at 510 nm.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'aph_510_bias'
+
+ aph_510_rmsd:
+ name: aph_510_rmsd
+ wavelength: 0.510
+ standard_name: Root-mean-square-difference of Phytoplankton absorption coefficient at 510 nm.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'aph_510_rmsd'
+
+ aph_560:
+ name: aph_560
+ wavelength: 0.560
+ standard_name: Phytoplankton absorption coefficient at 560 nm as derived using the QAA model.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'aph_560'
+
+ aph_560_bias:
+ name: aph_560_bias
+ wavelength: 0.560
+ standard_name: Bias of Phytoplankton absorption coefficient at 560 nm.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'aph_560_bias'
+
+ aph_560_rmsd:
+ name: aph_560_rmsd
+ wavelength: 0.560
+ standard_name: Root-mean-square-difference of Phytoplankton absorption coefficient at 560 nm.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'aph_560_rmsd'
+
+ aph_665:
+ name: aph_665
+ wavelength: 0.665
+ standard_name: Phytoplankton absorption coefficient at 665 nm as derived using the QAA model.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'aph_665'
+
+ aph_665_bias:
+ name: aph_665_bias
+ wavelength: 0.665
+ standard_name: Bias of Phytoplankton absorption coefficient at 665 nm.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'aph_665_bias'
+
+ aph_665_rmsd:
+ name: aph_665_rmsd
+ wavelength: 0.665
+ standard_name: Root-mean-square-difference of Phytoplankton absorption coefficient at 665 nm.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'aph_665_rmsd'
+
+ atot_412:
+ name: atot_412
+ wavelength: 0.412
+ standard_name: Total absorption coefficient at 412 nm as derived using the QAA model.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'atot_412'
+
+ atot_443:
+ name: atot_443
+ wavelength: 0.443
+ standard_name: Total absorption coefficient at 443 nm as derived using the QAA model.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'atot_443'
+
+ atot_490:
+ name: atot_490
+ wavelength: 0.490
+ standard_name: Total absorption coefficient at 490 nm as derived using the QAA model.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'atot_490'
+
+ atot_510:
+ name: atot_510
+ wavelength: 0.510
+ standard_name: Total absorption coefficient at 510 nm as derived using the QAA model.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'atot_510'
+
+ atot_560:
+ name: atot_560
+ wavelength: 0.560
+ standard_name: Total absorption coefficient at 560 nm as derived using the QAA model.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'atot_560'
+
+ atot_665:
+ name: atot_665
+ wavelength: 0.665
+ standard_name: Total absorption coefficient at 665 nm as derived using the QAA model.
+ units: "m-1"
+ coordinates: [longitude, latitude]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'atot_665'
+
+ rrs_412:
+ name: rrs_412
+ wavelength: 0.412
+ standard_name: Sea surface reflectance defined as the ratio of water-leaving radiance to surface irradiance at 412 nm.
+ units: "sr-1"
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_rrsprods_geo]
+ nc_key: 'Rrs_412'
+
+ rrs_412_bias:
+ name: rrs_412_bias
+ wavelength: 0.412
+ standard_name: Bias of Sea surface reflectance defined as the ratio of water-leaving radiance to surface irradiance at 412 nm.
+ units: "sr-1"
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_rrsprods_geo]
+ nc_key: 'Rrs_412_bias'
+
+ rrs_412_rmsd:
+ name: rrs_412_rmsd
+ wavelength: 0.412
+ standard_name: Root-mean-square-difference of Sea surface reflectance defined as the ratio of water-leaving radiance to surface irradiance at 412 nm.
+ units: "sr-1"
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_rrsprods_geo]
+ nc_key: 'Rrs_412_rmsd'
+
+ rrs_443:
+ name: rrs_443
+ wavelength: 0.443
+ standard_name: Sea surface reflectance defined as the ratio of water-leaving radiance to surface irradiance at 443 nm.
+ units: "sr-1"
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_rrsprods_geo]
+ nc_key: 'Rrs_443'
+
+ rrs_443_bias:
+ name: rrs_443_bias
+ wavelength: 0.443
+ standard_name: Bias of Sea surface reflectance defined as the ratio of water-leaving radiance to surface irradiance at 443 nm.
+ units: "sr-1"
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_rrsprods_geo]
+ nc_key: 'Rrs_443_bias'
+
+ rrs_443_rmsd:
+ name: rrs_443_rmsd
+ wavelength: 0.443
+ standard_name: Root-mean-square-difference of Sea surface reflectance defined as the ratio of water-leaving radiance to surface irradiance at 443 nm.
+ units: "sr-1"
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_rrsprods_geo]
+ nc_key: 'Rrs_443_rmsd'
+
+ rrs_490:
+ name: rrs_490
+ wavelength: 0.490
+ standard_name: Sea surface reflectance defined as the ratio of water-leaving radiance to surface irradiance at 490 nm.
+ units: "sr-1"
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_rrsprods_geo]
+ nc_key: 'Rrs_490'
+
+ rrs_490_bias:
+ name: rrs_490_bias
+ wavelength: 0.490
+ standard_name: Bias of Sea surface reflectance defined as the ratio of water-leaving radiance to surface irradiance at 490 nm.
+ units: "sr-1"
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_rrsprods_geo]
+ nc_key: 'Rrs_490_bias'
+
+ rrs_490_rmsd:
+ name: rrs_490_rmsd
+ wavelength: 0.490
+ standard_name: Root-mean-square-difference of Sea surface reflectance defined as the ratio of water-leaving radiance to surface irradiance at 490 nm.
+ units: "sr-1"
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_rrsprods_geo]
+ nc_key: 'Rrs_490_rmsd'
+
+ rrs_510:
+ name: rrs_510
+ wavelength: 0.510
+ standard_name: Sea surface reflectance defined as the ratio of water-leaving radiance to surface irradiance at 510 nm.
+ units: "sr-1"
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_rrsprods_geo]
+ nc_key: 'Rrs_510'
+
+ rrs_510_bias:
+ name: rrs_510_bias
+ wavelength: 0.510
+ standard_name: Bias of Sea surface reflectance defined as the ratio of water-leaving radiance to surface irradiance at 510 nm.
+ units: "sr-1"
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_rrsprods_geo]
+ nc_key: 'Rrs_510_bias'
+
+ rrs_510_rmsd:
+ name: rrs_510_rmsd
+ wavelength: 0.510
+ standard_name: Root-mean-square-difference of Sea surface reflectance defined as the ratio of water-leaving radiance to surface irradiance at 510 nm.
+ units: "sr-1"
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_rrsprods_geo]
+ nc_key: 'Rrs_510_rmsd'
+
+ rrs_560:
+ name: rrs_560
+ wavelength: 0.560
+ standard_name: Sea surface reflectance defined as the ratio of water-leaving radiance to surface irradiance at 560 nm.
+ units: "sr-1"
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_rrsprods_geo]
+ nc_key: 'Rrs_560'
+
+ rrs_560_bias:
+ name: rrs_560_bias
+ wavelength: 0.560
+ standard_name: Bias of Sea surface reflectance defined as the ratio of water-leaving radiance to surface irradiance at 560 nm.
+ units: "sr-1"
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_rrsprods_geo]
+ nc_key: 'Rrs_560_bias'
+
+ rrs_560_rmsd:
+ name: rrs_560_rmsd
+ wavelength: 0.560
+ standard_name: Root-mean-square-difference of Sea surface reflectance defined as the ratio of water-leaving radiance to surface irradiance at 560 nm.
+ units: "sr-1"
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_rrsprods_geo]
+ nc_key: 'Rrs_560_rmsd'
+
+ rrs_665:
+ name: rrs_665
+ wavelength: 0.665
+ standard_name: Sea surface reflectance defined as the ratio of water-leaving radiance to surface irradiance at 665 nm.
+ units: "sr-1"
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_rrsprods_geo]
+ nc_key: 'Rrs_665'
+
+ rrs_665_bias:
+ name: rrs_665_bias
+ wavelength: 0.665
+ standard_name: Bias of Sea surface reflectance defined as the ratio of water-leaving radiance to surface irradiance at 665 nm.
+ units: "sr-1"
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_rrsprods_geo]
+ nc_key: 'Rrs_665_bias'
+
+ rrs_665_rmsd:
+ name: rrs_665_rmsd
+ wavelength: 0.665
+ standard_name: Root-mean-square-difference of Sea surface reflectance defined as the ratio of water-leaving radiance to surface irradiance at 665 nm.
+ units: "sr-1"
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_rrsprods_geo]
+ nc_key: 'Rrs_665_rmsd'
+
+ bbp_412:
+ name: bbp_412
+ wavelength: 0.412
+ standard_name: Particulate backscattering coefficient for dissolved and detrital material at 412 nm as derived using the QAA model.
+ units: "m-1"
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'bbp_412'
+
+ bbp_443:
+ name: bbp_443
+ wavelength: 0.443
+ standard_name: Particulate backscattering coefficient for dissolved and detrital material at 443 nm as derived using the QAA model.
+ units: "m-1"
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'bbp_443'
+
+ bbp_490:
+ name: bbp_490
+ wavelength: 0.490
+ standard_name: Particulate backscattering coefficient for dissolved and detrital material at 490 nm as derived using the QAA model.
+ units: "m-1"
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'bbp_490'
+
+ bbp_510:
+ name: bbp_510
+ wavelength: 0.510
+ standard_name: Particulate backscattering coefficient for dissolved and detrital material at 510 nm as derived using the QAA model.
+ units: "m-1"
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'bbp_510'
+
+ bbp_560:
+ name: bbp_560
+ wavelength: 0.560
+ standard_name: Particulate backscattering coefficient for dissolved and detrital material at 560 nm as derived using the QAA model.
+ units: "m-1"
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'bbp_560'
+
+ bbp_665:
+ name: bbp_665
+ wavelength: 0.665
+ standard_name: Particulate backscattering coefficient for dissolved and detrital material at 665 nm as derived using the QAA model.
+ units: "m-1"
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_iopprods_geo]
+ nc_key: 'bbp_665'
+
+ chlor_a:
+ name: chlor_a
+ standard_name: Chlorophyll-a concentration in seawater (not log-transformed), generated by as a blended combination of OCI, OCI2, OC2 and OCx algorithms, depending on water class memberships.
+ units: "milligram m-3"
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_chlorprods_geo]
+ nc_key: 'chlor_a'
+
+ chlor_a_log10_bias:
+ name: chlor_a_log10_bias
+ standard_name: Bias of log10-transformed chlorophyll-a concentration in seawater.
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_chlorprods_geo]
+ nc_key: 'chlor_a_log10_bias'
+
+ chlor_a_log10_rmsd:
+ name: chlor_a_log10_rmsd
+ standard_name: Root-mean-square-difference of log10-transformed chlorophyll-a concentration in seawater.
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_chlorprods_geo]
+ nc_key: 'chlor_a_log10_rmsd'
+
+ kd_490:
+ name: kd_490
+ standard_name: Downwelling attenuation coefficient at 490nm, derived using Lee 2005 equation and bbw from Zhang 2009 (following the SeaDAS Kd_lee algorithm).
+ units: "m-1"
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_k490prods_geo]
+ nc_key: 'kd_490'
+
+ kd_490_bias:
+ name: kd_490_bias
+ standard_name: Bias of downwelling attenuation coefficient at 490 nm derived using Lee 2005 equation and bbw from Zhang 2009.
+ units: "m-1"
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_k490prods_geo]
+ nc_key: 'kd_490_bias'
+
+ kd_490_rmsd:
+ name: kd_490_rmsd
+ standard_name: Root-mean-square-difference of downwelling attenuation coefficient at 490 nm derived using Lee 2005 equation and bbw from Zhang 2009.
+ units: "m-1"
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_k490prods_geo]
+ nc_key: 'kd_490_rmsd'
+
+ meris_nobs_sum:
+ name: meris_nobs_sum
+ standard_name: Count of the number of observations from the MERIS sensor contributing to this bin cell.
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo]
+ nc_key: 'MERIS_nobs_sum'
+
+ modis_nobs_sum:
+ name: modis_nobs_sum
+ standard_name: Count of the number of observations from the MODIS (Aqua) sensor contributing to this bin cell.
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo]
+ nc_key: 'MODISA_nobs_sum'
+
+ olci_nobs_sum:
+ name: olci_nobs_sum
+ standard_name: Count of the number of observations from the OLCI sensor contributing to this bin cell.
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo]
+ nc_key: 'OLCI_nobs_sum'
+
+ seawifs_nobs_sum:
+ name: seawifs_nobs_sum
+ standard_name: Count of the number of observations from the SeaWiFS (GAC and LAC) sensor contributing to this bin cell.
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo]
+ nc_key: 'SeaWiFS_nobs_sum'
+
+ viirs_nobs_sum:
+ name: viirs_nobs_sum
+ standard_name: Count of the number of observations from the VIIRS sensor contributing to this bin cell.
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo]
+ nc_key: 'VIIRS_nobs_sum'
+
+ total_nobs_sum:
+ name: total_nobs_sum
+ standard_name: Count of the total number of observations contributing to this bin cell.
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo]
+ nc_key: 'total_nobs_sum'
+
+ water_class1:
+ name: water_class1
+ standard_name: Mean of normalised water class 1 membership over the compositing period.
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo]
+ nc_key: 'water_class1'
+
+ water_class2:
+ name: water_class2
+ standard_name: Mean of normalised water class 2 membership over the compositing period.
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo]
+ nc_key: 'water_class2'
+
+ water_class3:
+ name: water_class3
+ standard_name: Mean of normalised water class 3 membership over the compositing period.
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo]
+ nc_key: 'water_class3'
+
+ water_class4:
+ name: water_class4
+ standard_name: Mean of normalised water class 4 membership over the compositing period.
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo]
+ nc_key: 'water_class4'
+
+ water_class5:
+ name: water_class5
+ standard_name: Mean of normalised water class 5 membership over the compositing period.
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo]
+ nc_key: 'water_class5'
+
+ water_class6:
+ name: water_class6
+ standard_name: Mean of normalised water class 6 membership over the compositing period.
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo]
+ nc_key: 'water_class6'
+
+ water_class7:
+ name: water_class7
+ standard_name: Mean of normalised water class 7 membership over the compositing period.
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo]
+ nc_key: 'water_class7'
+
+ water_class8:
+ name: water_class8
+ standard_name: Mean of normalised water class 8 membership over the compositing period.
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo]
+ nc_key: 'water_class8'
+
+ water_class9:
+ name: water_class9
+ standard_name: Mean of normalised water class 9 membership over the compositing period.
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo]
+ nc_key: 'water_class9'
+
+ water_class10:
+ name: water_class10
+ standard_name: Mean of normalised water class 10 membership over the compositing period.
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo]
+ nc_key: 'water_class10'
+
+ water_class11:
+ name: water_class11
+ standard_name: Mean of normalised water class 11 membership over the compositing period.
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo]
+ nc_key: 'water_class11'
+
+ water_class12:
+ name: water_class12
+ standard_name: Mean of normalised water class 12 membership over the compositing period.
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo]
+ nc_key: 'water_class12'
+
+ water_class13:
+ name: water_class13
+ standard_name: Mean of normalised water class 13 membership over the compositing period.
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo]
+ nc_key: 'water_class13'
+
+ water_class14:
+ name: water_class14
+ standard_name: Mean of normalised water class 14 membership over the compositing period.
+ coordinates: [ longitude, latitude ]
+ file_type: [occci_allprods_geo, occci_chlorprods_geo, occci_iopprods_geo, occci_k490prods_geo, occci_rrsprods_geo]
+ nc_key: 'water_class14'
diff --git a/satpy/etc/readers/olci_l1b.yaml b/satpy/etc/readers/olci_l1b.yaml
index 46d6767596..7e7cacff83 100644
--- a/satpy/etc/readers/olci_l1b.yaml
+++ b/satpy/etc/readers/olci_l1b.yaml
@@ -1,6 +1,10 @@
reader:
- description: NC Reader for OLCI data
name: olci_l1b
+ short_name: OLCI Level 1b
+ long_name: Sentinel-3 A and B OLCI Level 1B data in netCDF4 format
+ description: NC Reader for OLCI data
+ status: Nominal
+ supports_fsspec: true
sensors: [olci]
default_channels: []
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
diff --git a/satpy/etc/readers/olci_l2.yaml b/satpy/etc/readers/olci_l2.yaml
index 259c1e307d..eff0c8f94d 100644
--- a/satpy/etc/readers/olci_l2.yaml
+++ b/satpy/etc/readers/olci_l2.yaml
@@ -1,6 +1,10 @@
reader:
- description: NC Reader for OLCI data
name: olci_l2
+ short_name: OLCI Level 2
+ long_name: Sentinel-3 A and B OLCI Level 2 data in netCDF4 format
+ description: NC Reader for OLCI data
+ status: Nominal
+ supports_fsspec: true
sensors: [olci]
default_channels: []
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
diff --git a/satpy/etc/readers/omps_edr.yaml b/satpy/etc/readers/omps_edr.yaml
index d872a966db..61bda5165c 100644
--- a/satpy/etc/readers/omps_edr.yaml
+++ b/satpy/etc/readers/omps_edr.yaml
@@ -1,6 +1,10 @@
reader:
- description: Generic OMPS EDR reader
name: omps_edr
+ short_name: OMPS EDR
+ long_name: OMPS EDR data in HDF5 format
+ description: Generic OMPS EDR reader
+ status: Beta
+ supports_fsspec: false
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
sensors: [omps]
@@ -240,4 +244,3 @@ datasets:
coordinates: [longitude_sampo, latitude_sampo]
file_type: omps_sampo
file_key: SCIENCE_DATA/CloudFraction
-
diff --git a/satpy/etc/readers/safe_sar_l2_ocn.yaml b/satpy/etc/readers/safe_sar_l2_ocn.yaml
index 24db9a2586..19a350cc1a 100644
--- a/satpy/etc/readers/safe_sar_l2_ocn.yaml
+++ b/satpy/etc/readers/safe_sar_l2_ocn.yaml
@@ -1,6 +1,10 @@
reader:
- description: SAFE Reader for SAR L2 OCN data
name: safe_sar_l2_ocn
+ short_name: SAR l2 OCN
+ long_name: SAR Level 2 OCN data in SAFE format
+ description: SAFE Reader for SAR L2 OCN data
+ status: Defunct
+ supports_fsspec: false
sensors: [sar-c]
default_channels: []
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
diff --git a/satpy/etc/readers/sar-c_safe.yaml b/satpy/etc/readers/sar-c_safe.yaml
index f3569d710b..4e45ca8584 100644
--- a/satpy/etc/readers/sar-c_safe.yaml
+++ b/satpy/etc/readers/sar-c_safe.yaml
@@ -1,6 +1,10 @@
reader:
- description: SAFE Reader for SAR-C data
name: sar-c_safe
+ short_name: SAR-C
+ long_name: Sentinel-1 A and B SAR-C data in SAFE format
+ description: SAFE Reader for SAR-C data
+ status: Nominal
+ supports_fsspec: false
sensors: [sar-c]
default_channels: []
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
@@ -40,15 +44,15 @@ file_types:
file_patterns: ['{fmission_id:3s}_{fsar_mode:2s}_{fproduct_type:3s}{fresolution:1s}_{fprocessing_level:1s}{fproduct_class:1s}{fpolarization:2s}_{fstart_time:%Y%m%dT%H%M%S}_{fend_time:%Y%m%dT%H%M%S}_{forbit_number:6d}_{fmission_data_take_id:6s}_{fproduct_unique_id:4s}.SAFE/measurement/{mission_id:3s}-{swath_id:2s}-{product_type:3s}-{polarization:2s}-{start_time:%Y%m%dt%H%M%S}-{end_time:%Y%m%dt%H%M%S}-{orbit_number:6d}-{mission_data_take_id:6s}-{image_number:3s}.tiff']
requires: [safe_calibration, safe_noise, safe_annotation]
safe_calibration:
- file_reader: !!python/name:satpy.readers.sar_c_safe.SAFEXML
+ file_reader: !!python/name:satpy.readers.sar_c_safe.SAFEXMLCalibration
file_patterns: ['{fmission_id:3s}_{fsar_mode:2s}_{fproduct_type:3s}{fresolution:1s}_{fprocessing_level:1s}{fproduct_class:1s}{fpolarization:2s}_{fstart_time:%Y%m%dT%H%M%S}_{fend_time:%Y%m%dT%H%M%S}_{forbit_number:6d}_{fmission_data_take_id:6s}_{fproduct_unique_id:4s}.SAFE/annotation/calibration/calibration-{mission_id:3s}-{swath_id:2s}-{product_type:3s}-{polarization:2s}-{start_time:%Y%m%dt%H%M%S}-{end_time:%Y%m%dt%H%M%S}-{orbit_number:6d}-{mission_data_take_id:6s}-{image_number:3s}.xml']
requires: [safe_annotation]
safe_noise:
- file_reader: !!python/name:satpy.readers.sar_c_safe.SAFEXML
+ file_reader: !!python/name:satpy.readers.sar_c_safe.SAFEXMLNoise
file_patterns: ['{fmission_id:3s}_{fsar_mode:2s}_{fproduct_type:3s}{fresolution:1s}_{fprocessing_level:1s}{fproduct_class:1s}{fpolarization:2s}_{fstart_time:%Y%m%dT%H%M%S}_{fend_time:%Y%m%dT%H%M%S}_{forbit_number:6d}_{fmission_data_take_id:6s}_{fproduct_unique_id:4s}.SAFE/annotation/calibration/noise-{mission_id:3s}-{swath_id:2s}-{product_type:3s}-{polarization:2s}-{start_time:%Y%m%dt%H%M%S}-{end_time:%Y%m%dt%H%M%S}-{orbit_number:6d}-{mission_data_take_id:6s}-{image_number:3s}.xml']
requires: [safe_annotation]
safe_annotation:
- file_reader: !!python/name:satpy.readers.sar_c_safe.SAFEXML
+ file_reader: !!python/name:satpy.readers.sar_c_safe.SAFEXMLAnnotation
file_patterns: ['{fmission_id:3s}_{fsar_mode:2s}_{fproduct_type:3s}{fresolution:1s}_{fprocessing_level:1s}{fproduct_class:1s}{fpolarization:2s}_{fstart_time:%Y%m%dT%H%M%S}_{fend_time:%Y%m%dT%H%M%S}_{forbit_number:6d}_{fmission_data_take_id:6s}_{fproduct_unique_id:4s}.SAFE/annotation/{mission_id:3s}-{swath_id:2s}-{product_type:3s}-{polarization:2s}-{start_time:%Y%m%dt%H%M%S}-{end_time:%Y%m%dt%H%M%S}-{orbit_number:6d}-{mission_data_take_id:6s}-{image_number:3s}.xml']
@@ -148,3 +152,10 @@ datasets:
file_type: safe_annotation
xml_item: geolocationGridPoint
xml_tag: incidenceAngle
+
+ calibration_constant:
+ name: calibration_constant
+ sensor: sar-c
+ polarization: [hh, hv, vv, vh]
+ units: 1
+ file_type: safe_calibration
diff --git a/satpy/etc/readers/satpy_cf_nc.yaml b/satpy/etc/readers/satpy_cf_nc.yaml
index d522da13cb..bd968eb474 100644
--- a/satpy/etc/readers/satpy_cf_nc.yaml
+++ b/satpy/etc/readers/satpy_cf_nc.yaml
@@ -1,6 +1,10 @@
reader:
name: satpy_cf_nc
+ short_name: Satpy CF
+ long_name: Reader for CF conform netCDF files written with Satpy
description: Reader for Satpy's NC/CF files
+ status: Nominal
+ supports_fsspec: false
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
sensors: [many]
default_channels: []
diff --git a/satpy/etc/readers/scatsat1_l2b.yaml b/satpy/etc/readers/scatsat1_l2b.yaml
index bacd61eae1..9e37cbf211 100644
--- a/satpy/etc/readers/scatsat1_l2b.yaml
+++ b/satpy/etc/readers/scatsat1_l2b.yaml
@@ -1,7 +1,11 @@
reader:
- description: Generic Eumetsat Scatsat-1 L2B Wind field Reader
name: scatsat1_l2b
+ short_name: Scatsat-1 l2b
+ long_name: Scatsat-1 Level 2b Wind field data in HDF5 format
+ description: Generic Eumetsat Scatsat-1 L2B Wind field Reader
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
+ status: defunct
+ supports_fsspec: false
sensors: [scatterometer]
default_datasets:
diff --git a/satpy/etc/readers/seadas_l2.yaml b/satpy/etc/readers/seadas_l2.yaml
new file mode 100644
index 0000000000..99ff897bac
--- /dev/null
+++ b/satpy/etc/readers/seadas_l2.yaml
@@ -0,0 +1,67 @@
+reader:
+ name: seadas_l2
+ short_name: MODIS/VIIRS SEADAS
+ long_name: SEADAS L2 Chlorphyll A product in HDF4 format
+ description: MODIS and VIIRS SEADAS Reader
+ status: Beta
+ supports_fsspec: false
+ reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
+ sensors: [modis, viirs]
+
+file_types:
+ chlora_seadas:
+ file_patterns:
+ # IMAPP-style filenames:
+ - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.seadas.hdf'
+ file_reader: !!python/name:satpy.readers.seadas_l2.SEADASL2HDFFileHandler
+ geo_resolution: 1000
+ chlora_seadas_nc:
+ file_patterns:
+ # IMAPP-style filenames:
+ - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.seadas.nc'
+ file_reader: !!python/name:satpy.readers.seadas_l2.SEADASL2NetCDFFileHandler
+ geo_resolution: 1000
+ chlora_seadas_viirs:
+ # SEADAS_npp_d20211118_t1728125_e1739327.hdf
+ file_patterns:
+ - 'SEADAS_{platform_indicator:s}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}.hdf'
+ file_reader: !!python/name:satpy.readers.seadas_l2.SEADASL2HDFFileHandler
+ geo_resolution: 750
+ chlora_seadas_viirs_nc:
+ # SEADAS_npp_d20211118_t1728125_e1739327.nc
+ file_patterns:
+ - 'SEADAS_{platform_indicator:s}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}.nc'
+ file_reader: !!python/name:satpy.readers.seadas_l2.SEADASL2NetCDFFileHandler
+ geo_resolution: 750
+
+datasets:
+ longitude:
+ name: longitude
+ file_type: [chlora_seadas_viirs_nc, chlora_seadas_nc, seadas_hdf_viirs, chlora_sedas]
+ file_key: ["navigation_data/longitude", "longitude"]
+ resolution:
+ 1000:
+ file_type: [chlora_seadas_nc, chlora_seadas]
+ 750:
+ file_type: [chlora_seadas_viirs_nc, chlora_seadas_viirs]
+
+ latitude:
+ name: latitude
+ file_type: [chlora_seadas_viirs_nc, chlora_seadas_nc, seadas_hdf_viirs, chlora_sedas]
+ file_key: ["navigation_data/latitude", "latitude"]
+ resolution:
+ 1000:
+ file_type: [chlora_seadas_nc, chlora_seadas]
+ 750:
+ file_type: [chlora_seadas_viirs_nc, chlora_seadas_viirs]
+
+ chlor_a:
+ name: chlor_a
+ file_type: [chlora_seadas_viirs_nc, chlora_seadas_nc, seadas_hdf_viirs, chlora_sedas]
+ file_key: ["geophysical_data/chlor_a", "chlor_a"]
+ resolution:
+ 1000:
+ file_type: [chlora_seadas_nc, chlora_seadas]
+ 750:
+ file_type: [chlora_seadas_viirs_nc, chlora_seadas_viirs]
+ coordinates: [longitude, latitude]
diff --git a/satpy/etc/readers/seviri_l1b_hrit.yaml b/satpy/etc/readers/seviri_l1b_hrit.yaml
index 5aa7582e62..5ece4b8992 100644
--- a/satpy/etc/readers/seviri_l1b_hrit.yaml
+++ b/satpy/etc/readers/seviri_l1b_hrit.yaml
@@ -8,7 +8,9 @@ reader:
short_name: SEVIRI L1b HRIT
long_name: MSG SEVIRI Level 1b (HRIT)
description: >
- HRIT reader for EUMETSAT MSG SEVIRI Level 1b files.
+ HRIT reader for EUMETSAT MSG (Meteosat 8 to 11) SEVIRI Level 1b files.
+ status: Nominal
+ supports_fsspec: true
sensors: [seviri]
default_channels: [HRV, IR_016, IR_039, IR_087, IR_097, IR_108, IR_120, IR_134, VIS006, VIS008, WV_062, WV_073]
reader: !!python/name:satpy.readers.yaml_reader.GEOSegmentYAMLReader
@@ -16,73 +18,73 @@ reader:
file_types:
HRIT_HRV:
file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler
- file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-HRV______-{segment:06d}___-{start_time:%Y%m%d%H%M}-__']
+ file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-HRV______-{segment:06d}___-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-HRV______-{segment:06d}___-{start_time:%Y%m%d%H%M}-__.bz2']
requires: [HRIT_PRO, HRIT_EPI]
expected_segments: 24
HRIT_IR_016:
file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler
- file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_016___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__']
+ file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_016___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_016___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__.bz2']
requires: [HRIT_PRO, HRIT_EPI]
expected_segments: 8
HRIT_IR_039:
file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler
- file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_039___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__']
+ file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_039___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_039___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__.bz2']
requires: [HRIT_PRO, HRIT_EPI]
expected_segments: 8
HRIT_IR_087:
file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler
- file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_087___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__']
+ file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_087___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_087___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__.bz2']
requires: [HRIT_PRO, HRIT_EPI]
expected_segments: 8
HRIT_IR_097:
file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler
- file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_097___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__']
+ file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_097___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_097___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__.bz2']
requires: [HRIT_PRO, HRIT_EPI]
expected_segments: 8
HRIT_IR_108:
file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler
- file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_108___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__']
+ file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_108___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_108___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__.bz2']
requires: [HRIT_PRO, HRIT_EPI]
expected_segments: 8
HRIT_IR_120:
file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler
- file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_120___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__']
+ file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_120___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_120___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__.bz2']
requires: [HRIT_PRO, HRIT_EPI]
expected_segments: 8
HRIT_IR_134:
file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler
- file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_134___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__']
+ file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_134___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-IR_134___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__.bz2']
requires: [HRIT_PRO, HRIT_EPI]
expected_segments: 8
HRIT_VIS006:
file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler
- file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-VIS006___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__']
+ file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-VIS006___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-VIS006___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__.bz2']
requires: [HRIT_PRO, HRIT_EPI]
expected_segments: 8
HRIT_VIS008:
file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler
- file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-VIS008___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__']
+ file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-VIS008___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-VIS008___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__.bz2']
requires: [HRIT_PRO, HRIT_EPI]
expected_segments: 8
HRIT_WV_062:
file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler
- file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-WV_062___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__']
+ file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-WV_062___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-WV_062___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__.bz2']
requires: [HRIT_PRO, HRIT_EPI]
expected_segments: 8
HRIT_WV_073:
file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler
- file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-WV_073___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__']
+ file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-WV_073___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-WV_073___-{segment:06d}___-{start_time:%Y%m%d%H%M}-__.bz2']
requires: [HRIT_PRO, HRIT_EPI]
expected_segments: 8
@@ -160,11 +162,11 @@ file_types:
HRIT_PRO:
file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGPrologueFileHandler
- file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-_________-PRO______-{start_time:%Y%m%d%H%M}-__']
+ file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-_________-PRO______-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-_________-PRO______-{start_time:%Y%m%d%H%M}-__.bz2']
HRIT_EPI:
file_reader: !!python/name:satpy.readers.seviri_l1b_hrit.HRITMSGEpilogueFileHandler
- file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-_________-EPI______-{start_time:%Y%m%d%H%M}-__']
+ file_patterns: ['{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-_________-EPI______-{start_time:%Y%m%d%H%M}-__', '{rate:1s}-000-{hrit_format:_<6s}-{platform_shortname:4s}_{service:_<7s}-_________-EPI______-{start_time:%Y%m%d%H%M}-__.bz2']
datasets:
HRV:
diff --git a/satpy/etc/readers/seviri_l1b_icare.yaml b/satpy/etc/readers/seviri_l1b_icare.yaml
index ee09a116be..b3565167ec 100644
--- a/satpy/etc/readers/seviri_l1b_icare.yaml
+++ b/satpy/etc/readers/seviri_l1b_icare.yaml
@@ -9,6 +9,8 @@ reader:
long_name: MSG SEVIRI Level 1b in HDF format from ICARE (Lille)
description: >
A reader for L1b SEVIRI data that has been retrieved from the ICARE service as HDF.
+ status: Defunct
+ supports_fsspec: false
sensors: [seviri]
default_channels: [HRV, IR_016, IR_039, IR_087, IR_097, IR_108, IR_120, IR_134, VIS006, VIS008, WV_062, WV_073]
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
diff --git a/satpy/etc/readers/seviri_l1b_native.yaml b/satpy/etc/readers/seviri_l1b_native.yaml
index d08c86b0e5..8fbecd4e59 100644
--- a/satpy/etc/readers/seviri_l1b_native.yaml
+++ b/satpy/etc/readers/seviri_l1b_native.yaml
@@ -1,9 +1,11 @@
reader:
name: seviri_l1b_native
short_name: SEVIRI L1b Native
- long_name: MSG SEVIRI Level 1b (Native)
+ long_name: MSG (Meteosat 8 to 11) SEVIRI data in native format
description: >
Reader for EUMETSAT MSG SEVIRI Level 1b native format files.
+ status: Nominal
+ supports_fsspec: false
sensors: [seviri]
default_channels: [HRV, IR_016, IR_039, IR_087, IR_097, IR_108, IR_120, IR_134, VIS006, VIS008, WV_062, WV_073]
reader: !!python/name:satpy.readers.yaml_reader.GEOFlippableFileYAMLReader
@@ -31,8 +33,8 @@ datasets:
standard_name: toa_bidirectional_reflectance
units: "%"
radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
counts:
standard_name: counts
units: count
@@ -47,8 +49,8 @@ datasets:
standard_name: toa_bidirectional_reflectance
units: "%"
radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
counts:
standard_name: counts
units: count
@@ -63,8 +65,8 @@ datasets:
standard_name: toa_brightness_temperature
units: K
radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
counts:
standard_name: counts
units: count
@@ -79,8 +81,8 @@ datasets:
standard_name: toa_brightness_temperature
units: K
radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
counts:
standard_name: counts
units: count
@@ -95,8 +97,8 @@ datasets:
standard_name: toa_brightness_temperature
units: K
radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
counts:
standard_name: counts
units: count
@@ -111,8 +113,8 @@ datasets:
standard_name: toa_brightness_temperature
units: K
radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
counts:
standard_name: counts
units: count
@@ -127,8 +129,8 @@ datasets:
standard_name: toa_brightness_temperature
units: K
radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
counts:
standard_name: counts
units: count
@@ -143,8 +145,8 @@ datasets:
standard_name: toa_brightness_temperature
units: K
radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
counts:
standard_name: counts
units: count
@@ -159,8 +161,8 @@ datasets:
standard_name: toa_bidirectional_reflectance
units: "%"
radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
counts:
standard_name: counts
units: count
@@ -175,8 +177,8 @@ datasets:
standard_name: toa_bidirectional_reflectance
units: "%"
radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
counts:
standard_name: counts
units: count
@@ -191,8 +193,8 @@ datasets:
standard_name: toa_brightness_temperature
units: "K"
radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
counts:
standard_name: counts
units: count
@@ -207,8 +209,8 @@ datasets:
standard_name: toa_brightness_temperature
units: "K"
radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
counts:
standard_name: counts
units: count
diff --git a/satpy/etc/readers/seviri_l1b_nc.yaml b/satpy/etc/readers/seviri_l1b_nc.yaml
index 7d8753ef3d..1f2d9e7102 100644
--- a/satpy/etc/readers/seviri_l1b_nc.yaml
+++ b/satpy/etc/readers/seviri_l1b_nc.yaml
@@ -4,6 +4,8 @@ reader:
long_name: MSG SEVIRI Level 1b NetCDF4
description: >
NetCDF4 reader for EUMETSAT MSG SEVIRI Level 1b files.
+ status: Beta, HRV channel not supported
+ supports_fsspec: true
sensors: [seviri]
reader: !!python/name:satpy.readers.yaml_reader.GEOFlippableFileYAMLReader
group_keys: ["start_time", "satid"]
@@ -23,8 +25,8 @@ datasets:
standard_name: toa_bidirectional_reflectance
units: "%"
radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
counts:
standard_name: counts
units: count
@@ -40,8 +42,8 @@ datasets:
standard_name: toa_bidirectional_reflectance
units: "%"
radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
counts:
standard_name: counts
units: count
@@ -57,8 +59,8 @@ datasets:
standard_name: toa_brightness_temperature
units: K
radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
counts:
standard_name: counts
units: count
@@ -74,8 +76,8 @@ datasets:
standard_name: toa_brightness_temperature
units: K
radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
counts:
standard_name: counts
units: count
@@ -91,8 +93,8 @@ datasets:
standard_name: toa_brightness_temperature
units: K
radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
counts:
standard_name: counts
units: count
@@ -108,8 +110,8 @@ datasets:
standard_name: toa_brightness_temperature
units: K
radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
counts:
standard_name: counts
units: count
@@ -125,8 +127,8 @@ datasets:
standard_name: toa_brightness_temperature
units: K
radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
counts:
standard_name: counts
units: count
@@ -142,8 +144,8 @@ datasets:
standard_name: toa_brightness_temperature
units: K
radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
counts:
standard_name: counts
units: count
@@ -159,8 +161,8 @@ datasets:
standard_name: toa_bidirectional_reflectance
units: "%"
radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
counts:
standard_name: counts
units: count
@@ -177,8 +179,8 @@ datasets:
standard_name: toa_bidirectional_reflectance
units: "%"
radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
counts:
standard_name: counts
units: count
@@ -194,8 +196,8 @@ datasets:
standard_name: toa_brightness_temperature
units: "K"
radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
counts:
standard_name: counts
units: count
@@ -211,12 +213,10 @@ datasets:
standard_name: toa_brightness_temperature
units: "K"
radiance:
- standard_name: toa_outgoing_radiance_per_unit_wavelength
- units: W m-2 um-1 sr-1
+ standard_name: toa_outgoing_radiance_per_unit_wavenumber
+ units: mW m-2 sr-1 (cm-1)-1
counts:
standard_name: counts
units: count
file_type: seviri_l1b_nc
nc_key: 'ch6'
-
-
diff --git a/satpy/etc/readers/seviri_l2_bufr.yaml b/satpy/etc/readers/seviri_l2_bufr.yaml
index badda828dd..86a8a930be 100644
--- a/satpy/etc/readers/seviri_l2_bufr.yaml
+++ b/satpy/etc/readers/seviri_l2_bufr.yaml
@@ -1,9 +1,13 @@
reader:
- description: SEVIRI L2 BUFR Product Reader
name: seviri_l2_bufr
+ short_name: SEVIRI l2 BUFR
+ long_name: MSG (Meteosat 8 to 11) Level 2 products in BUFR format
+ description: SEVIRI L2 BUFR Product Reader
+ status: Alpha
+ supports_fsspec: false
sensors: [seviri]
default_channels: []
- reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
+ reader: !!python/name:satpy.readers.yaml_reader.GEOFlippableFileYAMLReader
file_types:
seviri_l2_bufr_asr:
@@ -47,13 +51,20 @@ file_types:
- '{spacecraft:s}-SEVI-MSGTOZN-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}.bfr'
- '{spacecraft:s}-SEVI-MSGTOZN-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}'
+ seviri_l2_bufr_amv:
+ file_reader: !!python/name:satpy.readers.seviri_l2_bufr.SeviriL2BufrFileHandler
+ file_patterns:
+ - 'AMVBUFRProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:s}_{satellite:s}_{mission:s}_{subsat:s}'
+ - '{spacecraft:s}-SEVI-MSGAMVE-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}.bfr'
+ - '{spacecraft:s}-SEVI-MSGAMVE-{loc1:s}-{loc2:s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{time1:%Y%m%d%H%M%S}-{ord1:s}'
+
datasets:
latitude:
name: latitude
key: 'latitude'
- resolution: [48000,9000]
- file_type: [seviri_l2_bufr_asr,seviri_l2_bufr_cla,seviri_l2_bufr_csr,seviri_l2_bufr_gii,seviri_l2_bufr_thu,seviri_l2_bufr_toz]
+ resolution: [48006.450653072,9001.209497451,72009.675979608]
+ file_type: [seviri_l2_bufr_asr,seviri_l2_bufr_cla,seviri_l2_bufr_csr,seviri_l2_bufr_gii,seviri_l2_bufr_thu,seviri_l2_bufr_toz,seviri_l2_bufr_amv]
standard_name: latitude
units: degree_north
fill_value: -1.e+100
@@ -61,8 +72,8 @@ datasets:
longitude:
name: longitude
key: 'longitude'
- resolution: [48000,9000]
- file_type: [seviri_l2_bufr_asr,seviri_l2_bufr_cla,seviri_l2_bufr_csr,seviri_l2_bufr_gii,seviri_l2_bufr_thu,seviri_l2_bufr_toz]
+ resolution: [48006.450653072,9001.209497451,72009.675979608]
+ file_type: [seviri_l2_bufr_asr,seviri_l2_bufr_cla,seviri_l2_bufr_csr,seviri_l2_bufr_gii,seviri_l2_bufr_thu,seviri_l2_bufr_toz,seviri_l2_bufr_amv]
standard_name: longitude
units: degree_east
fill_value: -1.e+100
@@ -71,7 +82,8 @@ datasets:
nir39all:
name: nir39all
key: '#19#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [ 3.48, 3.92, 4.36 ]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -83,7 +95,8 @@ datasets:
nir39clr:
name: nir39clr
key: '#20#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [ 3.48, 3.92, 4.36 ]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -95,7 +108,8 @@ datasets:
nir39cld:
name: nir39cld
key: '#21#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [ 3.48, 3.92, 4.36 ]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -107,7 +121,8 @@ datasets:
nir39low:
name: nir39low
key: '#22#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [ 3.48, 3.92, 4.36 ]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -119,7 +134,8 @@ datasets:
nir39med:
name: nir39med
key: '#23#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [ 3.48, 3.92, 4.36 ]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -131,7 +147,8 @@ datasets:
nir39high:
name: nir39high
key: '#24#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [ 3.48, 3.92, 4.36 ]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -143,7 +160,8 @@ datasets:
wv62all:
name: wv62all
key: '#25#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [5.35, 6.25, 7.15]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -155,7 +173,8 @@ datasets:
wv62clr:
name: wv62clr
key: '#26#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [5.35, 6.25, 7.15]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -167,7 +186,8 @@ datasets:
wv62cld:
name: wv62cld
key: '#27#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [5.35, 6.25, 7.15]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -179,7 +199,8 @@ datasets:
wv62low:
name: wv62low
key: '#28#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [5.35, 6.25, 7.15]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -191,7 +212,8 @@ datasets:
wv62med:
name: wv62med
key: '#29#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [5.35, 6.25, 7.15]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -203,7 +225,8 @@ datasets:
wv62high:
name: wv62high
key: '#30#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [5.35, 6.25, 7.15]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -215,7 +238,8 @@ datasets:
wv73all:
name: wv73all
key: '#31#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [6.85, 7.35, 7.85]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -227,7 +251,8 @@ datasets:
wv73clr:
name: wv73clr
key: '#32#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [6.85, 7.35, 7.85]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -239,7 +264,8 @@ datasets:
wv73cld:
name: wv73cld
key: '#33#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [6.85, 7.35, 7.85]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -251,7 +277,8 @@ datasets:
wv73low:
name: wv73low
key: '#34#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [6.85, 7.35, 7.85]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -263,7 +290,8 @@ datasets:
wv73med:
name: wv73med
key: '#35#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [6.85, 7.35, 7.85]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -275,7 +303,8 @@ datasets:
wv73high:
name: wv73high
key: '#36#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [6.85, 7.35, 7.85]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -287,7 +316,8 @@ datasets:
ir87all:
name: ir87all
key: '#37#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [8.3, 8.7, 9.1]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -299,7 +329,8 @@ datasets:
ir87clr:
name: ir87clr
key: '#38#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [8.3, 8.7, 9.1]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -311,7 +342,8 @@ datasets:
ir87cld:
name: ir87cld
key: '#39#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [8.3, 8.7, 9.1]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -323,7 +355,8 @@ datasets:
ir87low:
name: ir87low
key: '#40#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [8.3, 8.7, 9.1]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -335,7 +368,8 @@ datasets:
ir87med:
name: ir87med
key: '#41#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [8.3, 8.7, 9.1]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -347,7 +381,8 @@ datasets:
ir87high:
name: ir87high
key: '#42#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [8.3, 8.7, 9.1]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -359,7 +394,8 @@ datasets:
ir97all:
name: ir97all
key: '#43#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [9.38, 9.66, 9.94]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -371,7 +407,8 @@ datasets:
ir97clr:
name: ir97clr
key: '#44#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [9.38, 9.66, 9.94]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -383,7 +420,8 @@ datasets:
ir97cld:
name: ir97cld
key: '#45#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [9.38, 9.66, 9.94]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -395,7 +433,8 @@ datasets:
ir97low:
name: ir97low
key: '#46#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [9.38, 9.66, 9.94]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -407,7 +446,8 @@ datasets:
ir97med:
name: ir97med
key: '#47#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [9.38, 9.66, 9.94]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -419,7 +459,8 @@ datasets:
ir97high:
name: ir97high
key: '#48#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [9.38, 9.66, 9.94]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -431,7 +472,8 @@ datasets:
ir108all:
name: ir108all
key: '#49#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [9.8, 10.8, 11.8]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -443,7 +485,8 @@ datasets:
ir108clr:
name: ir108clr
key: '#50#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [9.8, 10.8, 11.8]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -455,7 +498,8 @@ datasets:
ir108cld:
name: ir108cld
key: '#51#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [9.8, 10.8, 11.8]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -467,7 +511,8 @@ datasets:
ir108low:
name: ir108low
key: '#52#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [9.8, 10.8, 11.8]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -479,7 +524,8 @@ datasets:
ir108med:
name: ir108med
key: '#53#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [9.8, 10.8, 11.8]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -491,7 +537,8 @@ datasets:
ir108high:
name: ir108high
key: '#54#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [9.8, 10.8, 11.8]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -503,7 +550,8 @@ datasets:
ir120all:
name: ir120all
key: '#55#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [11.0, 12.0, 13.0]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -515,7 +563,8 @@ datasets:
ir120clr:
name: ir120clr
key: '#56#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [11.0, 12.0, 13.0]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -527,7 +576,8 @@ datasets:
ir120cld:
name: ir120cld
key: '#57#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [11.0, 12.0, 13.0]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -539,7 +589,8 @@ datasets:
ir120low:
name: ir120low
key: '#58#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [11.0, 12.0, 13.0]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -551,7 +602,8 @@ datasets:
ir120med:
name: ir120med
key: '#59#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [11.0, 12.0, 13.0]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -563,7 +615,8 @@ datasets:
ir120high:
name: ir120high
key: '#60#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [11.0, 12.0, 13.0]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -575,7 +628,8 @@ datasets:
ir134all:
name: ir134all
key: '#61#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [12.4, 13.4, 14.4]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -587,7 +641,8 @@ datasets:
ir134clr:
name: ir134clr
key: '#62#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [12.4, 13.4, 14.4]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -599,7 +654,8 @@ datasets:
ir134cld:
name: ir134cld
key: '#63#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [12.4, 13.4, 14.4]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -611,7 +667,8 @@ datasets:
ir134low:
name: ir134low
key: '#64#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [12.4, 13.4, 14.4]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -623,7 +680,8 @@ datasets:
ir134med:
name: ir134med
key: '#65#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [12.4, 13.4, 14.4]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -635,7 +693,8 @@ datasets:
ir134high:
name: ir134high
key: '#66#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [12.4, 13.4, 14.4]
standard_name: toa_brightness_temperature
units: K
file_type: seviri_l2_bufr_asr
@@ -647,7 +706,7 @@ datasets:
pcld:
name: pcld
key: '#1#cloudAmountInSegment'
- resolution: 48000
+ resolution: 48006.450653072
standard_name: cloud_area_fraction
units: '%'
file_type: seviri_l2_bufr_asr
@@ -659,7 +718,7 @@ datasets:
pclr:
name: pclr
key: '#1#amountSegmentCloudFree'
- resolution: 48000
+ resolution: 48006.450653072
standard_name: clear_sky_area_fraction
units: '%'
file_type: seviri_l2_bufr_asr
@@ -671,7 +730,7 @@ datasets:
pclrs:
name: pclrs
key: '#2#amountSegmentCloudFree'
- resolution: 48000
+ resolution: 48006.450653072
standard_name: clar_sky_area_fraction
units: '%'
file_type: seviri_l2_bufr_asr
@@ -684,7 +743,7 @@ datasets:
hca:
name: hca
key: '#1#amountOfHighClouds'
- resolution: 48000
+ resolution: 48006.450653072
standard_name: cloud_area_fraction
units: '%'
file_type: seviri_l2_bufr_cla
@@ -696,7 +755,7 @@ datasets:
lca:
name: lca
key: '#1#amountOfLowClouds'
- resolution: 48000
+ resolution: 48006.450653072
standard_name: cloud_area_fraction
units: '%'
file_type: seviri_l2_bufr_cla
@@ -708,7 +767,7 @@ datasets:
mca:
name: mca
key: '#1#amountOfMiddleClouds'
- resolution: 48000
+ resolution: 48006.450653072
standard_name: cloud_area_fraction
units: '%'
file_type: seviri_l2_bufr_cla
@@ -720,7 +779,7 @@ datasets:
tca:
name: tca
key: '#1#cloudAmountInSegment'
- resolution: 48000
+ resolution: 48006.450653072
standard_name: cloud_area_fraction
units: '%'
file_type: seviri_l2_bufr_cla
@@ -733,7 +792,8 @@ datasets:
nir39:
name: nir39
key: '#4#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [3.48, 3.92, 4.36]
standard_name: toa_brightness_temperature
units: "W/sr-1/m-2"
file_type: seviri_l2_bufr_csr
@@ -745,7 +805,8 @@ datasets:
cld39:
name: cld39
key: '#4#cloudAmountInSegment'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [3.48, 3.92, 4.36]
standard_name: cloud_area_fraction
units: '%'
file_type: seviri_l2_bufr_csr
@@ -757,7 +818,8 @@ datasets:
wv62:
name: wv62
key: '#5#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [5.35, 6.25, 7.15]
standard_name: toa_brightness_temperature
units: "W/sr-1/m-2"
file_type: seviri_l2_bufr_csr
@@ -769,7 +831,8 @@ datasets:
cld62:
name: cld62
key: '#5#cloudAmountInSegment'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [5.35, 6.25, 7.15]
standard_name: cloud_area_fraction
units: '%'
file_type: seviri_l2_bufr_csr
@@ -781,7 +844,8 @@ datasets:
wv73:
name: wv73
key: '#6#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [6.85, 7.35, 7.85]
standard_name: toa_brightness_temperature
units: "W/sr-1/m-2"
file_type: seviri_l2_bufr_csr
@@ -793,7 +857,8 @@ datasets:
cld73:
name: cld73
key: '#6#cloudAmountInSegment'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [6.85, 7.35, 7.85]
standard_name: cloud_area_fraction
units: '%'
file_type: seviri_l2_bufr_csr
@@ -805,7 +870,8 @@ datasets:
ir87:
name: ir87
key: '#7#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [8.3, 8.7, 9.1]
standard_name: toa_brightness_temperature
units: "W/sr-1/m-2"
file_type: seviri_l2_bufr_csr
@@ -817,7 +883,8 @@ datasets:
cld87:
name: cld87
key: '#7#cloudAmountInSegment'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [8.3, 8.7, 9.1]
standard_name: cloud_area_fraction
units: '%'
file_type: seviri_l2_bufr_csr
@@ -829,7 +896,8 @@ datasets:
ir97:
name: ir97
key: '#8#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [9.38, 9.66, 9.94]
standard_name: toa_brightness_temperature
units: "W/sr-1/m-2"
file_type: seviri_l2_bufr_csr
@@ -841,7 +909,8 @@ datasets:
cld97:
name: cld97
key: '#8#cloudAmountInSegment'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [9.38, 9.66, 9.94]
standard_name: cloud_area_fraction
units: '%'
file_type: seviri_l2_bufr_csr
@@ -853,7 +922,8 @@ datasets:
ir108:
name: ir108
key: '#9#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [9.8, 10.8, 11.8]
standard_name: toa_brightness_temperature
units: "W/sr-1/m-2"
file_type: seviri_l2_bufr_csr
@@ -865,7 +935,8 @@ datasets:
cld108:
name: cld108
key: '#9#cloudAmountInSegment'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [9.8, 10.8, 11.8]
standard_name: cloud_area_fraction
units: '%'
file_type: seviri_l2_bufr_csr
@@ -877,7 +948,8 @@ datasets:
ir120:
name: ir120
key: '#10#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [11.0, 12.0, 13.0]
standard_name: toa_brightness_temperature
units: "W/sr-1/m-2"
file_type: seviri_l2_bufr_csr
@@ -889,7 +961,8 @@ datasets:
cld120:
name: cld120
key: '#10#cloudAmountInSegment'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [11.0, 12.0, 13.0]
standard_name: cloud_area_fraction
units: '%'
file_type: seviri_l2_bufr_csr
@@ -901,7 +974,8 @@ datasets:
ir134:
name: ir134
key: '#11#brightnessTemperature'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [12.4, 13.4, 14.4]
standard_name: toa_brightness_temperature
units: "W/sr-1/m-2"
file_type: seviri_l2_bufr_csr
@@ -913,7 +987,8 @@ datasets:
cld134:
name: cld134
key: '#11#cloudAmountInSegment'
- resolution: 48000
+ resolution: 48006.450653072
+ wavelength: [12.4, 13.4, 14.4]
standard_name: cloud_area_fraction
units: '%'
file_type: seviri_l2_bufr_csr
@@ -927,7 +1002,7 @@ datasets:
ki:
name: ki
key: '#1#kIndex'
- resolution: 9000
+ resolution: 9001.209497451
standard_name: atmosphere_stability_k_index
coordinates:
- longitude
@@ -939,7 +1014,7 @@ datasets:
ko:
name: ko
key: '#1#koIndex'
- resolution: 9000
+ resolution: 9001.209497451
standard_name: atmosphere_stability_ko_index
coordinates:
- longitude
@@ -951,7 +1026,7 @@ datasets:
li:
name: li
key: '#1#parcelLiftedIndexTo500Hpa'
- resolution: 9000
+ resolution: 9001.209497451
standard_name: atmosphere_stability_lifted_index
coordinates:
- longitude
@@ -963,7 +1038,7 @@ datasets:
lpw1:
name: lpw1
key: '#2#precipitableWater'
- resolution: 9000
+ resolution: 9001.209497451
standard_name: lwe_thickness_of_precipitation_amount
coordinates:
- longitude
@@ -975,7 +1050,7 @@ datasets:
lpw2:
name: lpw2
key: '#3#precipitableWater'
- resolution: 9000
+ resolution: 9001.209497451
standard_name: lwe_thickness_of_precipitation_amount
coordinates:
- longitude
@@ -987,7 +1062,7 @@ datasets:
lpw3:
name: lpw3
key: '#4#precipitableWater'
- resolution: 9000
+ resolution: 9001.209497451
standard_name: lwe_thickness_of_precipitation_amount
coordinates:
- longitude
@@ -999,7 +1074,7 @@ datasets:
mb:
name: mb
key: '#1#maximumBuoyancy'
- resolution: 9000
+ resolution: 9001.209497451
standard_name: atmosphere_stability_maximum_bouyancy_index
coordinates:
- longitude
@@ -1011,7 +1086,7 @@ datasets:
stza:
name: stza
key: '#1#satelliteZenithAngle'
- resolution: 9000
+ resolution: 9001.209497451
standard_name: sensor_zenith_angle
coordinates:
- longitude
@@ -1023,7 +1098,7 @@ datasets:
tpw:
name: tpw
key: '#1#precipitableWater'
- resolution: 9000
+ resolution: 9001.209497451
standard_name: lwe_thickness_of_precipitation_amount
coordinates:
- longitude
@@ -1036,7 +1111,7 @@ datasets:
thu62:
name: thu62
key: '#1#relativeHumidity'
- resolution: 48000
+ resolution: 48006.450653072
standard_name: relative_humidity
units: '%'
file_type: seviri_l2_bufr_thu
@@ -1048,7 +1123,7 @@ datasets:
thu73:
name: thu73
key: '#2#relativeHumidity'
- resolution: 48000
+ resolution: 48006.450653072
standard_name: relative_humidity
units: '%'
file_type: seviri_l2_bufr_thu
@@ -1061,7 +1136,7 @@ datasets:
toz:
name: toz
key: '#1#totalOzone'
- resolution: 9000
+ resolution: 9001.209497451
standard_name: atmosphere_mass_content_of_ozone
units: dobson
file_type: seviri_l2_bufr_toz
@@ -1073,7 +1148,7 @@ datasets:
qual:
name: qual
key: '#1#totalOzone->totalOzoneQuality'
- resolution: 9000
+ resolution: 9001.209497451
standard_name: total_ozone_quality
units: ""
file_type: seviri_l2_bufr_toz
@@ -1081,3 +1156,40 @@ datasets:
- longitude
- latitude
fill_value: 0
+
+ # ---- AMV products ------------
+ speed:
+ name: speed
+ key: '#1#windSpeed'
+ resolution: 72009.675979608
+ file_type: seviri_l2_bufr_amv
+ standard_name: wind_speed
+ units: m s-1
+ fill_value: -1.e+100
+ coordinates:
+ - longitude
+ - latitude
+
+ direction:
+ name: direction
+ key: '#1#windDirection'
+ resolution: 72009.675979608
+ file_type: seviri_l2_bufr_amv
+ standard_name: wind_to_direction
+ units: deg
+ fill_value: -1.e+100
+ coordinates:
+ - longitude
+ - latitude
+
+ pressure:
+ name: pressure
+ key: '#1#pressure'
+ resolution: 72009.675979608
+ file_type: seviri_l2_bufr_amv
+ standard_name: wind_pressure
+ units: Pa
+ fill_value: -1.e+100
+ coordinates:
+ - longitude
+ - latitude
diff --git a/satpy/etc/readers/seviri_l2_grib.yaml b/satpy/etc/readers/seviri_l2_grib.yaml
index 3995f60fd8..f238d53122 100644
--- a/satpy/etc/readers/seviri_l2_grib.yaml
+++ b/satpy/etc/readers/seviri_l2_grib.yaml
@@ -1,10 +1,13 @@
reader:
name: seviri_l2_grib
short_name: SEVIRI L2 GRIB
- long_name: MSG SEVIRI L2 (GRIB)
+ long_name: MSG (Meteosat 8 to 11) Level 2 products in GRIB2 format
description: Reader for EUMETSAT MSG SEVIRI L2 files in GRIB format.
+ status: Nominal
+ supports_fsspec: false
sensors: [seviri]
- reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
+ reader: !!python/name:satpy.readers.yaml_reader.GEOFlippableFileYAMLReader
+
file_types:
# EUMETSAT MSG SEVIRI L2 Cloud Mask files in GRIB format
@@ -70,7 +73,7 @@ datasets:
cloud_mask:
name: cloud_mask
- resolution: 3000
+ resolution: 3000.403165817
file_type: grib_seviri_clm
parameter_number: 7
units: "1"
@@ -78,7 +81,7 @@ datasets:
pixel_scene_type:
name: pixel_scene_type
- resolution: 3000
+ resolution: 3000.403165817
file_type: grib_seviri_oca
parameter_number: 8
units: "1"
@@ -86,7 +89,7 @@ datasets:
measurement_cost:
name: measurement_cost
- resolution: 3000
+ resolution: 3000.403165817
file_type: grib_seviri_oca
parameter_number: 30
units: "1"
@@ -94,7 +97,7 @@ datasets:
upper_layer_cloud_optical_depth:
name: upper_layer_cloud_optical_depth
- resolution: 3000
+ resolution: 3000.403165817
file_type: grib_seviri_oca
parameter_number: 31
units: "1"
@@ -102,7 +105,7 @@ datasets:
upper_layer_cloud_top_pressure:
name: upper_layer_cloud_top_pressure
- resolution: 3000
+ resolution: 3000.403165817
file_type: grib_seviri_oca
parameter_number: 32
units: Pa
@@ -110,7 +113,7 @@ datasets:
upper_layer_cloud_effective_radius:
name: upper_layer_cloud_effective_radius
- resolution: 3000
+ resolution: 3000.403165817
file_type: grib_seviri_oca
parameter_number: 33
units: m
@@ -118,7 +121,7 @@ datasets:
error_in_upper_layer_cloud_optical_depth:
name: error_in_upper_layer_cloud_optical_depth
- resolution: 3000
+ resolution: 3000.403165817
file_type: grib_seviri_oca
parameter_number: 34
units: "1"
@@ -126,7 +129,7 @@ datasets:
error_in_upper_layer_cloud_top_pressure:
name: error_in_upper_layer_cloud_top_pressure
- resolution: 3000
+ resolution: 3000.403165817
file_type: grib_seviri_oca
parameter_number: 35
units: Pa
@@ -134,7 +137,7 @@ datasets:
error_in_upper_layer_cloud_effective_radius:
name: error_in_upper_layer_cloud_effective_radius
- resolution: 3000
+ resolution: 3000.403165817
file_type: grib_seviri_oca
parameter_number: 36
units: m
@@ -142,7 +145,7 @@ datasets:
lower_layer_cloud_optical_depth:
name: lower_layer_cloud_optical_depth
- resolution: 3000
+ resolution: 3000.403165817
file_type: grib_seviri_oca
parameter_number: 37
units: "1"
@@ -150,7 +153,7 @@ datasets:
lower_layer_cloud_top_pressure:
name: lower_layer_cloud_top_pressure
- resolution: 3000
+ resolution: 3000.403165817
file_type: grib_seviri_oca
parameter_number: 38
units: Pa
@@ -158,7 +161,7 @@ datasets:
error_in_lower_layer_cloud_optical_depth:
name: error_in_lower_layer_cloud_optical_depth
- resolution: 3000
+ resolution: 3000.403165817
file_type: grib_seviri_oca
parameter_number: 39
units: "1"
@@ -166,7 +169,7 @@ datasets:
error_in_lower_layer_cloud_top_pressure:
name: error_in_lower_layer_cloud_top_pressure
- resolution: 3000
+ resolution: 3000.403165817
file_type: grib_seviri_oca
parameter_number: 40
units: Pa
@@ -174,7 +177,7 @@ datasets:
fire_probability:
name: fire_probability
- resolution: 3000
+ resolution: 3000.403165817
file_type: grib_seviri_fir
parameter_number: 192
units: "%"
@@ -182,7 +185,7 @@ datasets:
active_fires:
name: active_fires
- resolution: 3000
+ resolution: 3000.403165817
file_type: grib_seviri_fir
parameter_number: 9
units: "1"
@@ -190,31 +193,31 @@ datasets:
aerosol_optical_thickness_vis06:
name: aerosol_optical_thickness_vis06
- resolution: 3000
+ resolution: 9001.209497451
file_type: grib_seviri_aes
parameter_number: 20
- units: "um"
+ units: "1"
long_name: aerosol_optical_thickness_vis06
aerosol_optical_thickness_vis08:
name: aerosol_optical_thickness_vis08
- resolution: 3000
+ resolution: 9001.209497451
file_type: grib_seviri_aes
parameter_number: 21
- units: "um"
+ units: "1"
long_name: aerosol_optical_thickness_vis08
aerosol_optical_thickness_vis16:
name: aerosol_optical_thickness_vis16
- resolution: 3000
+ resolution: 9001.209497451
file_type: grib_seviri_aes
parameter_number: 22
- units: "um"
- long_name: aerosol_optical_thickness_vis06
+ units: "1"
+ long_name: aerosol_optical_thickness_vis16
angstroem_coefficient:
name: angstroem_coefficient
- resolution: 3000
+ resolution: 9001.209497451
file_type: grib_seviri_aes
parameter_number: 23
units: "1"
@@ -222,7 +225,7 @@ datasets:
aes_quality:
name: aes_quality
- resolution: 3000
+ resolution: 9001.209497451
file_type: grib_seviri_aes
parameter_number: 192
units: "1"
@@ -230,15 +233,15 @@ datasets:
cloud_top_height:
name: cloud_top_height
- resolution: 3000
+ resolution: 9001.209497451
file_type: grib_seviri_cth
parameter_number: 2
- units: Pa
+ units: m
long_name: cloud_top_height
cloud_top_quality:
name: cloud_top_quality
- resolution: 3000
+ resolution: 9001.209497451
file_type: grib_seviri_cth
parameter_number: 3
units: "1"
@@ -246,7 +249,8 @@ datasets:
vis_refl_06:
name: vis_refl_06
- resolution: 3000
+ resolution: 3000.403165817
+ wavelength: [0.56, 0.635, 0.71]
file_type: grib_seviri_crm
parameter_number: 9
units: "%"
@@ -254,7 +258,8 @@ datasets:
vis_refl_08:
name: vis_refl_08
- resolution: 3000
+ resolution: 3000.403165817
+ wavelength: [0.74, 0.81, 0.88]
file_type: grib_seviri_crm
parameter_number: 10
units: "%"
@@ -262,7 +267,8 @@ datasets:
vis_refl_16:
name: vis_refl_16
- resolution: 3000
+ resolution: 3000.403165817
+ wavelength: [1.5, 1.64, 1.78]
file_type: grib_seviri_crm
parameter_number: 11
units: "%"
@@ -270,7 +276,8 @@ datasets:
nir_refl_39:
name: nir_refl_39
- resolution: 3000
+ resolution: 3000.403165817
+ wavelength: [3.48, 3.92, 4.36]
file_type: grib_seviri_crm
parameter_number: 12
units: "%"
@@ -278,23 +285,23 @@ datasets:
num_accumulations:
name: num_accumulations
- resolution: 3000
+ resolution: 3000.403165817
file_type: grib_seviri_crm
parameter_number: 6
units: "1"
long_name: num_accumulations
- azimuth_angle:
- name: azimuth_angle
- resolution: 3000
+ solar_zenith_angle:
+ name: solar_zenith_angle
+ resolution: 3000.403165817
file_type: grib_seviri_crm
parameter_number: 7
units: degrees
- long_name: azimuth_angle
+ long_name: solar_zenith_angle
relative_azimuth_angle:
name: relative_azimuth_angle
- resolution: 3000
+ resolution: 3000.403165817
file_type: grib_seviri_crm
parameter_number: 8
units: degrees
@@ -302,7 +309,7 @@ datasets:
instantaneous_rain_rate:
name: instantaneous_rain_rate
- resolution: 3000
+ resolution: 3000.403165817
file_type: grib_seviri_mpe
parameter_number: 1
units: "kg m-2 s-1"
diff --git a/satpy/etc/readers/slstr_l1b.yaml b/satpy/etc/readers/slstr_l1b.yaml
index 73af1da9b7..85c875cca2 100644
--- a/satpy/etc/readers/slstr_l1b.yaml
+++ b/satpy/etc/readers/slstr_l1b.yaml
@@ -1,6 +1,10 @@
reader:
- description: NC Reader for SLSTR data
name: slstr_l1b
+ short_name: SLSTR l1b
+ long_name: Sentinel-3 A and B SLSTR data in netCDF4 format
+ description: NC Reader for SLSTR data
+ status: Alpha
+ supports_fsspec: false
sensors: [slstr]
default_channels: []
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
@@ -89,6 +93,16 @@ datasets:
standard_name: latitude
units: degree
+ elevation:
+ name: elevation
+ resolution: [500, 1000]
+ view: [nadir, oblique]
+ stripe: [a, b, i, f]
+ file_type: esa_geo
+ file_key: elevation_{stripe:1s}{view:1s}
+ standard_name: elevation
+ units: m
+
# The channels S1-S3 are available in nadir (default) and oblique view.
S1:
name: S1
diff --git a/satpy/etc/readers/slstr_l2.yaml b/satpy/etc/readers/slstr_l2.yaml
index 76850ec257..7924cb198a 100644
--- a/satpy/etc/readers/slstr_l2.yaml
+++ b/satpy/etc/readers/slstr_l2.yaml
@@ -1,6 +1,10 @@
reader:
- description: NC Reader for Sentinel-3 SLSTR Level 2 data
name: slstr_l2
+ short_name: SLSTR l2
+ long_name: Sentinel-3 SLSTR Level 2 data in netCDF format
+ description: NC Reader for Sentinel-3 SLSTR Level 2 data
+ status: defunct
+ supports_fsspec: false
sensors: [slstr_l2]
default_channels: []
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
diff --git a/satpy/etc/readers/smos_l2_wind.yaml b/satpy/etc/readers/smos_l2_wind.yaml
index cad092074e..d4cb0b8680 100644
--- a/satpy/etc/readers/smos_l2_wind.yaml
+++ b/satpy/etc/readers/smos_l2_wind.yaml
@@ -1,6 +1,10 @@
reader:
- description: SMOS Level 2 Wind NetCDF reader
name: smos_l2_wind
+ short_name: SMOS l2
+ long_name: SMOS level 2 wind data in NetCDF4 format
+ description: SMOS Level 2 Wind NetCDF reader
+ status: Beta
+ supports_fsspec: false
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
sensors: [smos]
diff --git a/satpy/etc/readers/tropomi_l2.yaml b/satpy/etc/readers/tropomi_l2.yaml
index 7e164c5f82..3e961f7d56 100644
--- a/satpy/etc/readers/tropomi_l2.yaml
+++ b/satpy/etc/readers/tropomi_l2.yaml
@@ -1,6 +1,10 @@
reader:
- description: TROPOMI Level 2 NetCDF reader
name: tropomi_l2
+ short_name: TROPOMI l2
+ long_name: TROPOMI Level 2 data in NetCDF4 format
+ description: TROPOMI Level 2 NetCDF reader
+ status: Beta
+ supports_fsspec: false
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
sensors: [tropomi]
@@ -42,16 +46,16 @@ datasets:
file_type: tropomi_l2
file_key: 'PRODUCT/SUPPORT_DATA/GEOLOCATIONS/longitude_bounds'
standard_name: assembled_longitude_bounds
- offset_time:
- name: 'offset_time'
+ delta_time:
+ name: 'delta_time'
file_type: tropomi_l2
file_key: 'PRODUCT/delta_time'
- standard_name: offset_time
- ref_time:
- name: 'ref_time'
+ standard_name: delta_time
+ time:
+ name: 'time'
file_type: tropomi_l2
file_key: 'PRODUCT/time'
- standard_name: ref_time
+ standard_name: time
tm5_constant_a:
name: 'tm5_constant_a'
file_type: tropomi_l2
@@ -67,4 +71,3 @@ datasets:
file_type: tropomi_l2
file_key: 'PRODUCT/time_utc'
standard_name: time_utc
-
diff --git a/satpy/etc/readers/vaisala_gld360.yaml b/satpy/etc/readers/vaisala_gld360.yaml
index eb8a9b0eaf..6744d95c23 100644
--- a/satpy/etc/readers/vaisala_gld360.yaml
+++ b/satpy/etc/readers/vaisala_gld360.yaml
@@ -1,6 +1,10 @@
reader:
- description: Vaisala Global Lightning Dataset 360 reader
name: vaisala_gld360
+ short_name: Vaisala GLD360
+ long_name: Vaisala Global Lightning Dataset GLD360 data in ASCII format
+ description: Vaisala Global Lightning Dataset 360 reader
+ status: Beta
+ supports_fsspec: false
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
sensors: [vaisala_gld360]
diff --git a/satpy/etc/readers/vii_l1b_nc.yaml b/satpy/etc/readers/vii_l1b_nc.yaml
index 993878580c..3d200341dd 100644
--- a/satpy/etc/readers/vii_l1b_nc.yaml
+++ b/satpy/etc/readers/vii_l1b_nc.yaml
@@ -1,9 +1,11 @@
reader:
name: vii_l1b_nc
short_name: VII L1B RAD NetCDF4
- long_name: EPS-SG VII L1B Radiance (NetCDF4)
+ long_name: EPS-SG Visual Infrafred Imager (VII) Level 1B Radiance data in netCDF4 format
description: >
- Reader for EUMETSAT EPSG-SG Visual Infrared Imager Level 1B Radiance files in NetCDF4 format.
+ Reader for EUMETSAT EPS-SG Visual Infrared Imager Level 1B Radiance files in NetCDF4 format per FS V4A.
+ status: Beta
+ supports_fsspec: false
sensors: [vii]
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
@@ -11,9 +13,7 @@ file_types:
# EUMETSAT EPSG-SG Visual Infrared Imager Level 1B Radiance files in NetCDF4 format
nc_vii_l1b_rad:
file_reader: !!python/name:satpy.readers.vii_l1b_nc.ViiL1bNCFileHandler
- file_patterns: ['W_DE-AIRBUSDS-Friedrichshafen,SAT,{spacecraft_name:s}-VII-1B-RAD_C_EUM_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc',
- 'W_xx-eumetsat-darmstadt,SAT,{spacecraft_name:s}-VII-1B-RAD_C_EUM_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc',
- 'W_xx-eumetsat-darmstadt,SAT,{spacecraft_name:s}-VII-1B-RAD_C_EUMT_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc']
+ file_patterns: ['W_XX-EUMETSAT-Darmstadt,SAT,{spacecraft_name:s}-VII-1B-RAD_C_EUMT_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc']
cached_longitude: data/measurement_data/longitude
cached_latitude: data/measurement_data/latitude
@@ -51,7 +51,7 @@ datasets:
name: vii_443
file_type: nc_vii_l1b_rad
file_key: data/measurement_data/vii_443
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
calibration:
reflectance:
standard_name: toa_bidirectional_reflectance
@@ -65,7 +65,7 @@ datasets:
name: vii_555
file_type: nc_vii_l1b_rad
file_key: data/measurement_data/vii_555
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
calibration:
reflectance:
standard_name: toa_bidirectional_reflectance
@@ -79,7 +79,7 @@ datasets:
name: vii_668
file_type: nc_vii_l1b_rad
file_key: data/measurement_data/vii_668
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
calibration:
reflectance:
standard_name: toa_bidirectional_reflectance
@@ -93,7 +93,7 @@ datasets:
name: vii_752
file_type: nc_vii_l1b_rad
file_key: data/measurement_data/vii_752
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
calibration:
reflectance:
standard_name: toa_bidirectional_reflectance
@@ -107,7 +107,7 @@ datasets:
name: vii_763
file_type: nc_vii_l1b_rad
file_key: data/measurement_data/vii_763
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
calibration: [reflectance, radiance]
chan_solar_index: 4
wavelength: [0.75695, 0.7627, 0.76845]
@@ -116,7 +116,7 @@ datasets:
name: vii_865
file_type: nc_vii_l1b_rad
file_key: data/measurement_data/vii_865
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
calibration:
reflectance:
standard_name: toa_bidirectional_reflectance
@@ -130,7 +130,7 @@ datasets:
name: vii_914
file_type: nc_vii_l1b_rad
file_key: data/measurement_data/vii_914
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
calibration:
reflectance:
standard_name: toa_bidirectional_reflectance
@@ -144,7 +144,7 @@ datasets:
name: vii_1240
file_type: nc_vii_l1b_rad
file_key: data/measurement_data/vii_1240
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
calibration:
reflectance:
standard_name: toa_bidirectional_reflectance
@@ -158,7 +158,7 @@ datasets:
name: vii_1375
file_type: nc_vii_l1b_rad
file_key: data/measurement_data/vii_1375
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
calibration:
reflectance:
standard_name: toa_bidirectional_reflectance
@@ -172,7 +172,7 @@ datasets:
name: vii_1630
file_type: nc_vii_l1b_rad
file_key: data/measurement_data/vii_1630
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
calibration:
reflectance:
standard_name: toa_bidirectional_reflectance
@@ -186,7 +186,7 @@ datasets:
name: vii_2250
file_type: nc_vii_l1b_rad
file_key: data/measurement_data/vii_2250
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
calibration:
reflectance:
standard_name: toa_bidirectional_reflectance
@@ -200,7 +200,7 @@ datasets:
name: vii_3740
file_type: nc_vii_l1b_rad
file_key: data/measurement_data/vii_3740
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
calibration:
brightness_temperature:
standard_name: toa_brightness_temperature
@@ -214,7 +214,7 @@ datasets:
name: vii_3959
file_type: nc_vii_l1b_rad
file_key: data/measurement_data/vii_3959
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
calibration:
brightness_temperature:
standard_name: toa_brightness_temperature
@@ -228,7 +228,7 @@ datasets:
name: vii_4050
file_type: nc_vii_l1b_rad
file_key: data/measurement_data/vii_4050
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
calibration:
brightness_temperature:
standard_name: toa_brightness_temperature
@@ -242,7 +242,7 @@ datasets:
name: vii_6725
file_type: nc_vii_l1b_rad
file_key: data/measurement_data/vii_6725
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
calibration:
brightness_temperature:
standard_name: toa_brightness_temperature
@@ -256,7 +256,7 @@ datasets:
name: vii_7325
file_type: nc_vii_l1b_rad
file_key: data/measurement_data/vii_7325
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
calibration:
brightness_temperature:
standard_name: toa_brightness_temperature
@@ -270,7 +270,7 @@ datasets:
name: vii_8540
file_type: nc_vii_l1b_rad
file_key: data/measurement_data/vii_8540
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
calibration:
brightness_temperature:
standard_name: toa_brightness_temperature
@@ -284,7 +284,7 @@ datasets:
name: vii_10690
file_type: nc_vii_l1b_rad
file_key: data/measurement_data/vii_10690
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
calibration:
brightness_temperature:
standard_name: toa_brightness_temperature
@@ -298,7 +298,7 @@ datasets:
name: vii_12020
file_type: nc_vii_l1b_rad
file_key: data/measurement_data/vii_12020
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
calibration:
brightness_temperature:
standard_name: toa_brightness_temperature
@@ -312,7 +312,7 @@ datasets:
name: vii_13345
file_type: nc_vii_l1b_rad
file_key: data/measurement_data/vii_13345
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
calibration:
brightness_temperature:
standard_name: toa_brightness_temperature
@@ -329,28 +329,28 @@ datasets:
standard_name: solar_zenith_angle
file_type: nc_vii_l1b_rad
file_key: data/measurement_data/solar_zenith
- coordinates: [lon_tie_points, lat_tie_points]
+ coordinates: [lat_tie_points, lon_tie_points]
solar_azimuth_tie_points:
name: solar_azimuth_tie_points
standard_name: solar_azimuth_angle
file_type: nc_vii_l1b_rad
file_key: data/measurement_data/solar_azimuth
- coordinates: [lon_tie_points, lat_tie_points]
+ coordinates: [lat_tie_points, lon_tie_points]
observation_zenith_tie_points:
name: observation_zenith_tie_points
standard_name: sensor_zenith_angle
file_type: nc_vii_l1b_rad
file_key: data/measurement_data/observation_zenith
- coordinates: [lon_tie_points, lat_tie_points]
+ coordinates: [lat_tie_points, lon_tie_points]
observation_azimuth_tie_points:
name: observation_azimuth_tie_points
standard_name: sensor_azimuth_angle
file_type: nc_vii_l1b_rad
file_key: data/measurement_data/observation_azimuth
- coordinates: [lon_tie_points, lat_tie_points]
+ coordinates: [lat_tie_points, lon_tie_points]
solar_zenith:
name: solar_zenith
@@ -358,7 +358,7 @@ datasets:
file_type: nc_vii_l1b_rad
file_key: data/measurement_data/solar_zenith
interpolate: True
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
solar_azimuth:
name: solar_azimuth
@@ -366,7 +366,7 @@ datasets:
file_type: nc_vii_l1b_rad
file_key: data/measurement_data/solar_azimuth
interpolate: True
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
observation_zenith:
name: observation_zenith
@@ -374,7 +374,7 @@ datasets:
file_type: nc_vii_l1b_rad
file_key: data/measurement_data/observation_zenith
interpolate: True
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
observation_azimuth:
name: observation_azimuth
@@ -382,19 +382,19 @@ datasets:
file_type: nc_vii_l1b_rad
file_key: data/measurement_data/observation_azimuth
interpolate: True
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
# --- Orthorectification data ---
delta_lat_N_dem:
name: delta_lat_N_dem
file_type: nc_vii_l1b_rad
file_key: data/measurement_data/delta_lat_N_dem
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
standard_name: parallax_delta_latitude
delta_lon_N_dem:
name: delta_lon_N_dem
file_type: nc_vii_l1b_rad
file_key: data/measurement_data/delta_lon_N_dem
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
standard_name: parallax_delta_longitude
diff --git a/satpy/etc/readers/vii_l2_nc.yaml b/satpy/etc/readers/vii_l2_nc.yaml
index e1b146eeb9..340f1598aa 100644
--- a/satpy/etc/readers/vii_l2_nc.yaml
+++ b/satpy/etc/readers/vii_l2_nc.yaml
@@ -1,17 +1,20 @@
-reader:
+ reader:
name: vii_l2_nc
short_name: VII L2 NetCDF4
- long_name: EPS-SG VII L2 (NetCDF4)
+ long_name: EPS-SG Visual Infrared Imager (VII) Level 2 data in netCDF4 format
description: >
Reader for EUMETSAT EPSG-SG Visual Infrared Imager Level 2 files in NetCDF4 format.
+ status: Beta
+ supports_fsspec: false
sensors: [vii]
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
-file_types:
+ file_types:
# EUMETSAT EPSG-SG Visual Infrared Imager Level 2 Cloud Mask files in NetCDF4 format
nc_vii_l2_cld:
file_reader: !!python/name:satpy.readers.vii_l2_nc.ViiL2NCFileHandler
- file_patterns: ['W_xx-eumetsat-darmstadt,SAT,{spacecraft_name:s}-VII-02-CLD_C_EUM_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc']
+ file_patterns: ['W_XX-EUMETSAT-Darmstadt,SAT,{spacecraft_name:s}-VII-02-CLD_C_EUMT_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc']
+ #W_XX-EUMETSAT-Darmstadt,SAT,SGA1-VII-02-CLD_C_EUMT_20211110105731_G_D_20070912084303_20070912084403_T_B____
cached_longitude: data/measurement_data/longitude
cached_latitude: data/measurement_data/latitude
orthorect: False
@@ -19,28 +22,28 @@ file_types:
# EUMETSAT EPSG-SG Visual Infrared Imager Level 2 Cloud Top Pressure (using the Oxygen-A Band) files in NetCDF4 format
nc_vii_l2_ctp:
file_reader: !!python/name:satpy.readers.vii_l2_nc.ViiL2NCFileHandler
- file_patterns: ['W_xx-eumetsat-darmstadt,SAT,{spacecraft_name:s}-VII-02-CTP_C_EUM_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc']
+ file_patterns: ['W_XX-EUMETSAT-Darmstadt,SAT,{spacecraft_name:s}-VII-02-CTP_C_EUMT_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc']
cached_longitude: data/measurement_data/longitude
cached_latitude: data/measurement_data/latitude
# EUMETSAT EPSG-SG Visual Infrared Imager Level 2 Cloud Mask and First Guess Cloud Properties files in NetCDF4 format
nc_vii_l2_icm:
file_reader: !!python/name:satpy.readers.vii_l2_nc.ViiL2NCFileHandler
- file_patterns: ['W_xx-eumetsat-darmstadt,SAT,{spacecraft_name:s}-VII-02-ICM_C_EUM_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc']
+ file_patterns: ['W_XX-EUMETSAT-Darmstadt,SAT,{spacecraft_name:s}-VII-02-ICM_C_EUMT_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc']
cached_longitude: data/measurement_data/longitude
cached_latitude: data/measurement_data/latitude
# EUMETSAT EPSG-SG Visual Infrared Imager Level 2 Optimal Cloud Analysis files in NetCDF4 format
nc_vii_l2_oca:
file_reader: !!python/name:satpy.readers.vii_l2_nc.ViiL2NCFileHandler
- file_patterns: ['W_xx-eumetsat-darmstadt,SAT,{spacecraft_name:s}-VII-02-OCA_C_EUM_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc']
+ file_patterns: ['W_XX-EUMETSAT-Darmstadt,SAT,{spacecraft_name:s}-VII-02-OCA_C_EUMT_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc']
cached_longitude: data/measurement_data/longitude
cached_latitude: data/measurement_data/latitude
# EUMETSAT EPSG-SG Visual Infrared Imager Level 2 Total Precipitable Water (from VII visible/near-infrared) files in NetCDF4 format
nc_vii_l2_wvv:
file_reader: !!python/name:satpy.readers.vii_l2_nc.ViiL2NCFileHandler
- file_patterns: ['W_xx-eumetsat-darmstadt,SAT,{spacecraft_name:s}-VII-02-WVV_C_EUM_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc']
+ file_patterns: ['W_XX-EUMETSAT-Darmstadt,SAT,{spacecraft_name:s}-VII-02-WVV_C_EUMT_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc']
cached_longitude: data/measurement_data/longitude
cached_latitude: data/measurement_data/latitude
interpolate: False
@@ -49,13 +52,13 @@ file_types:
# EUMETSAT EPSG-SG Visual Infrared Imager Level 2 Total Precipitable Water (from VII thermal infra-red) files in NetCDF4 format
nc_vii_l2_wvi:
file_reader: !!python/name:satpy.readers.vii_l2_nc.ViiL2NCFileHandler
- file_patterns: ['W_xx-eumetsat-darmstadt,SAT,{spacecraft_name:s}-VII-02-WVI_C_EUM_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc']
+ file_patterns: ['W_XX-EUMETSAT-Darmstadt,SAT,{spacecraft_name:s}-VII-02-WVI_C_EUMT_{creation_time:%Y%m%d%H%M%S}_{mission_type:s}_{environment:s}_{sensing_start_time:%Y%m%d%H%M%S}_{sensing_end_time:%Y%m%d%H%M%S}_{disposition_mode:s}_{processing_mode:s}____.nc']
cached_longitude: data/measurement_data/longitude
cached_latitude: data/measurement_data/latitude
interpolate: False
- orthorect: False
+ #orthorect: False
-datasets:
+ datasets:
# --- Coordinates ---
# TODO Coordinates on tie points are kept for test purposes
@@ -116,210 +119,210 @@ datasets:
name: cs_confidence
file_type: [nc_vii_l2_cld, nc_vii_l2_icm]
file_key: data/measurement_data/cs_confidence
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
standard_name: cloud_area_fraction
flag_cm:
name: flag_cm
file_type: [nc_vii_l2_cld, nc_vii_l2_icm]
file_key: data/measurement_data/flag_cm
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
standard_name: cloud_mask_classification
surface_type:
name: surface_type
file_type: [nc_vii_l2_cld, nc_vii_l2_icm]
file_key: data/measurement_data/surface_type
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
standard_name: surface_type
ctp_o2:
name: ctp_o2
file_type: nc_vii_l2_ctp
file_key: data/measurement_data/ctp_o2
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
standard_name: air_pressure_at_cloud_top
log10_ctp_o2_err:
name: log10_ctp_o2_err
file_type: nc_vii_l2_ctp
file_key: data/measurement_data/log10_ctp_o2_err
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
standard_name: air_pressure_at_cloud_top
log10_cot_o2:
name: log10_cot_o2
file_type: nc_vii_l2_ctp
file_key: data/measurement_data/log10_cot_o2
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
standard_name: cloud_optical_depth
log10_cot_o2_err:
name: log10_cot_o2_err
file_type: nc_vii_l2_ctp
file_key: data/measurement_data/log10_cot_o2_err
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
standard_name: cloud_optical_depth
vii_ch_sel1:
name: vii_ch_sel1
file_type: nc_vii_l2_icm
file_key: data/measurement_data/vii_ch_sel1
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
standard_name: toa_outgoing_radiance_per_unit_wavelength
vii_ch_sel2:
name: vii_ch_sel2
file_type: nc_vii_l2_icm
file_key: data/measurement_data/vii_ch_sel2
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
standard_name: toa_outgoing_radiance_per_unit_wavelength
vii_ch_sel3:
name: vii_ch_sel3
file_type: nc_vii_l2_icm
file_key: data/measurement_data/vii_ch_sel3
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
standard_name: toa_outgoing_radiance_per_unit_wavelength
flag_cph:
name: flag_cph
file_type: nc_vii_l2_icm
file_key: data/measurement_data/flag_cph
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
standard_name: thermodynamic_phase_of_cloud_water_particles_at_cloud_top
log10_cot_fg:
name: log10_cot_fg
file_type: nc_vii_l2_icm
file_key: data/measurement_data/log10_cot_fg
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
standard_name: cloud_optical_depth
log10_err_cot_fg:
name: log10_err_cot_fg
file_type: nc_vii_l2_icm
file_key: data/measurement_data/log10_err_cot_fg
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
standard_name: cloud_optical_depth
cth_fg:
name: cth_fg
file_type: nc_vii_l2_icm
file_key: data/measurement_data/cth_fg
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
standard_name: height_at_cloud_top
err_cth_fg:
name: err_cth_fg
file_type: nc_vii_l2_icm
file_key: data/measurement_data/err_cth_fg
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
standard_name: height_at_cloud_top
moca_model_final:
name: moca_model_final
file_type: nc_vii_l2_oca
file_key: data/measurement_data/moca_model_final
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
standard_name: scene_classification
log10_cot:
name: log10_cot
file_type: nc_vii_l2_oca
file_key: data/measurement_data/log10_cot
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
standard_name: cloud_optical_depth
log10_err_cot:
name: log10_err_cot
file_type: nc_vii_l2_oca
file_key: data/measurement_data/log10_err_cot
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
standard_name: cloud_optical_depth
cre:
name: cre
file_type: nc_vii_l2_oca
file_key: data/measurement_data/cre
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
standard_name: effective_radius_of_cloud_condensed_water_particles_at_cloud_top
log10_err_cre:
name: log10_err_cre
file_type: nc_vii_l2_oca
file_key: data/measurement_data/log10_err_cre
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
standard_name: effective_radius_of_cloud_condensed_water_particles_at_cloud_top
ctp:
name: ctp
file_type: nc_vii_l2_oca
file_key: data/measurement_data/ctp
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
standard_name: air_pressure_at_cloud_top
log10_err_ctp:
name: log10_err_ctp
file_type: nc_vii_l2_oca
file_key: data/measurement_data/log10_err_ctp
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
standard_name: air_pressure_at_cloud_top
ctt:
name: ctt
file_type: nc_vii_l2_oca
file_key: data/measurement_data/ctt
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
standard_name: air_temperature_at_cloud_top
log10_cot2:
name: log10_cot2
file_type: nc_vii_l2_oca
file_key: data/measurement_data/log10_cot2
- coordinates: [lon_pixels2, lat_pixels2]
+ coordinates: [lat_pixels2, lon_pixels2]
standard_name: cloud_optical_depth
log10_err_cot2:
name: log10_err_cot2
file_type: nc_vii_l2_oca
file_key: data/measurement_data/log10_err_cot2
- coordinates: [lon_pixels2, lat_pixels2]
+ coordinates: [lat_pixels2, lon_pixels2]
standard_name: cloud_optical_depth
ctp2:
name: ctp2
file_type: nc_vii_l2_oca
file_key: data/measurement_data/ctp2
- coordinates: [lon_pixels2, lat_pixels2]
+ coordinates: [lat_pixels2, lon_pixels2]
standard_name: air_pressure_at_cloud_top
log10_err_ctp2:
name: log10_err_ctp2
file_type: nc_vii_l2_oca
file_key: data/measurement_data/log10_err_ctp2
- coordinates: [lon_pixels2, lat_pixels2]
+ coordinates: [lat_pixels2, lon_pixels2]
standard_name: air_pressure_at_cloud_top
ctt2:
name: ctt2
file_type: nc_vii_l2_oca
file_key: data/measurement_data/ctt2
- coordinates: [lon_pixels2, lat_pixels2]
+ coordinates: [lat_pixels2, lon_pixels2]
standard_name: air_temperature_at_cloud_top
tpw:
name: tpw
- file_type: [nc_vii_l2_wvi, nc_vii_l2_wvv]
+ file_type: [nc_vii_l2_wvi nc_vii_l2_wvv]
file_key: data/measurement_data/tpw
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
standard_name: mass_of_water_in_air
tpw_err:
name: tpw_err
file_type: [nc_vii_l2_wvi, nc_vii_l2_wvv]
file_key: data/measurement_data/tpw_err
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels_no_orthorect, lon_pixels_no_orthorect]
standard_name: mass_of_water_in_air
# --- Geometric data ---
@@ -329,28 +332,28 @@ datasets:
standard_name: solar_zenith_angle
file_type: [nc_vii_l2_cld, nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca]
file_key: data/measurement_data/solar_zenith
- coordinates: [lon_tie_points, lat_tie_points]
+ coordinates: [lat_tie_points, lon_tie_points]
solar_azimuth_tie_points:
name: solar_azimuth_tie_points
standard_name: solar_azimuth_angle
file_type: [nc_vii_l2_cld, nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca]
file_key: data/measurement_data/solar_azimuth
- coordinates: [lon_tie_points, lat_tie_points]
+ coordinates: [lat_tie_points, lon_tie_points]
observation_zenith_tie_points:
name: observation_zenith_tie_points
standard_name: sensor_zenith_angle
file_type: [nc_vii_l2_cld, nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca]
file_key: data/measurement_data/observation_zenith
- coordinates: [lon_tie_points, lat_tie_points]
+ coordinates: [lat_tie_points, lon_tie_points]
observation_azimuth_tie_points:
name: observation_azimuth_tie_points
standard_name: sensor_azimuth_angle
file_type: [nc_vii_l2_cld, nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca]
file_key: data/measurement_data/observation_azimuth
- coordinates: [lon_tie_points, lat_tie_points]
+ coordinates: [lat_tie_points, lon_tie_points]
solar_zenith:
name: solar_zenith
@@ -358,7 +361,7 @@ datasets:
file_type: [nc_vii_l2_cld, nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca, nc_vii_l2_wvi, nc_vii_l2_wvv]
file_key: data/measurement_data/solar_zenith
interpolate: True
- coordinates: [lon_pixels_no_orthorect, lat_pixels_no_orthorect]
+ coordinates: [lat_pixels_no_orthorect, lon_pixels_no_orthorect]
solar_azimuth:
name: solar_azimuth
@@ -366,7 +369,7 @@ datasets:
file_type: [nc_vii_l2_cld, nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca, nc_vii_l2_wvi, nc_vii_l2_wvv]
file_key: data/measurement_data/solar_azimuth
interpolate: True
- coordinates: [lon_pixels_no_orthorect, lat_pixels_no_orthorect]
+ coordinates: [lat_pixels_no_orthorect, lon_pixels_no_orthorect]
observation_zenith:
name: observation_zenith
@@ -374,7 +377,7 @@ datasets:
file_type: [nc_vii_l2_cld, nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca, nc_vii_l2_wvi, nc_vii_l2_wvv]
file_key: data/measurement_data/observation_zenith
interpolate: True
- coordinates: [lon_pixels_no_orthorect, lat_pixels_no_orthorect]
+ coordinates: [lat_pixels_no_orthorect, lon_pixels_no_orthorect]
observation_azimuth:
name: observation_azimuth
@@ -382,35 +385,35 @@ datasets:
file_type: [nc_vii_l2_cld, nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca, nc_vii_l2_wvi, nc_vii_l2_wvv]
file_key: data/measurement_data/observation_azimuth
interpolate: True
- coordinates: [lon_pixels_no_orthorect, lat_pixels_no_orthorect]
+ coordinates: [lat_pixels_no_orthorect, lon_pixels_no_orthorect]
# --- Orthorectification data ---
delta_lat:
name: delta_lat
file_type: [nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca]
file_key: data/measurement_data/delta_lat
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
standard_name: parallax_delta_latitude
delta_lon:
name: delta_lon
file_type: [nc_vii_l2_ctp, nc_vii_l2_icm, nc_vii_l2_oca]
file_key: data/measurement_data/delta_lon
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
standard_name: parallax_delta_longitude
delta_lat_cloud2:
name: delta_lat_cloud2
file_type: nc_vii_l2_oca
file_key: data/measurement_data/delta_lat_cloud2
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
standard_name: parallax_delta_latitude
delta_lon_cloud2:
name: delta_lon_cloud2
file_type: nc_vii_l2_oca
file_key: data/measurement_data/delta_lon_cloud2
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
standard_name: parallax_delta_longitude
# --- Quality Information data ---
@@ -418,19 +421,19 @@ datasets:
name: log10_j
file_type: [nc_vii_l2_ctp, nc_vii_l2_oca, nc_vii_l2_wvi, nc_vii_l2_wvv]
file_key: data/quality_information/log10_j
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
standard_name: cost_function
flag_ml:
name: flag_ml
file_type: nc_vii_l2_ctp
file_key: data/quality_information/flag_ml
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
standard_name: cloud_multilayer_classification
qi_forecast:
name: qi_forecast
file_type: [nc_vii_l2_wvi, nc_vii_l2_wvv]
file_key: data/quality_information/qi_forecast
- coordinates: [lon_pixels, lat_pixels]
+ coordinates: [lat_pixels, lon_pixels]
standard_name: mass_of_water_in_air
diff --git a/satpy/etc/readers/viirs_compact.yaml b/satpy/etc/readers/viirs_compact.yaml
index c0305fa987..31f4201930 100644
--- a/satpy/etc/readers/viirs_compact.yaml
+++ b/satpy/etc/readers/viirs_compact.yaml
@@ -1,6 +1,10 @@
reader:
- description: Generic Eumetsat Compact VIIRS Reader
name: viirs_compact
+ short_name: VIIRS Compact
+ long_name: SNPP VIIRS SDR data in HDF5 Compact format
+ description: Generic Eumetsat Compact VIIRS Reader
+ status: Nominal
+ supports_fsspec: false
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
sensors: [viirs]
default_datasets:
diff --git a/satpy/etc/readers/viirs_edr_active_fires.yaml b/satpy/etc/readers/viirs_edr_active_fires.yaml
index 94dbd95f07..7c497f8d4c 100644
--- a/satpy/etc/readers/viirs_edr_active_fires.yaml
+++ b/satpy/etc/readers/viirs_edr_active_fires.yaml
@@ -1,13 +1,17 @@
reader:
- description: VIIRS Active Fires Reader
name: viirs_edr_active_fires
+ short_name: VIIRS active fires
+ long_name: VIIRS EDR Active Fires data in netCDF4 & CSV .txt format
+ description: VIIRS Active Fires Reader
+ status: Beta
+ supports_fsspec: false
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
sensors: [viirs]
file_types:
fires_netcdf_img:
file_reader: !!python/name:satpy.readers.viirs_edr_active_fires.VIIRSActiveFiresFileHandler
- variable_prefix: ""
+ variable_prefix: "Fire Pixels/"
file_patterns:
- 'AFIMG_{satellite_name}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time}_{source}.nc'
fires_netcdf:
@@ -81,4 +85,4 @@ datasets:
file_key: "{variable_prefix}FP_T4"
coordinates: [longitude, latitude]
standard_name: toa_brightness_temperature
- units: 'K'
\ No newline at end of file
+ units: 'K'
diff --git a/satpy/etc/readers/viirs_edr_flood.yaml b/satpy/etc/readers/viirs_edr_flood.yaml
index dedaa7cdf6..c0cfdb69fb 100644
--- a/satpy/etc/readers/viirs_edr_flood.yaml
+++ b/satpy/etc/readers/viirs_edr_flood.yaml
@@ -1,6 +1,10 @@
reader:
- description: VIIRS flood HDF4 reader
name: viirs_edr_flood
+ short_name: VIIRS flood
+ long_name: VIIRS EDR Flood data in HDF4 format
+ description: VIIRS flood HDF4 reader
+ status: Beta
+ supports_fsspec: false
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
sensors: [viirs]
diff --git a/satpy/etc/readers/viirs_l1b.yaml b/satpy/etc/readers/viirs_l1b.yaml
index a47d00d210..f078c4247d 100644
--- a/satpy/etc/readers/viirs_l1b.yaml
+++ b/satpy/etc/readers/viirs_l1b.yaml
@@ -1,6 +1,10 @@
reader:
- description: Generic NASA VIIRS L1B Reader
name: viirs_l1b
+ short_name: VIIRS l1b
+ long_name: SNPP VIIRS Level 1b data in netCDF4 format
+ description: Generic NASA VIIRS L1B Reader
+ status: Nominal
+ supports_fsspec: false
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
sensors: [viirs]
default_datasets:
@@ -35,36 +39,42 @@ file_types:
- 'VGEOI_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S}_c{creation_time:%Y%m%d%H%M%S}.nc'
- 'V{platform_shortname:2s}03IMG.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}{creator}.nc'
- 'V{platform_shortname:2s}03IMG_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.nc'
+ - 'V{platform_shortname:2s}03IMG_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}.nc'
vgeom:
file_reader: !!python/name:satpy.readers.viirs_l1b.VIIRSL1BFileHandler
file_patterns:
- 'VGEOM_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S}_c{creation_time:%Y%m%d%H%M%S}.nc'
- 'V{platform_shortname:2s}03MOD.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}{creator}.nc'
- 'V{platform_shortname:2s}03MOD_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.nc'
+ - 'V{platform_shortname:2s}03MOD_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}.nc'
vgeod:
file_reader: !!python/name:satpy.readers.viirs_l1b.VIIRSL1BFileHandler
file_patterns:
- 'VGEOD_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S}_c{creation_time:%Y%m%d%H%M%S}.nc'
- 'V{platform_shortname:2s}03DNB.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}{creator}.nc'
- 'V{platform_shortname:2s}03DNB_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.nc'
+ - 'V{platform_shortname:2s}03DNB_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}.nc'
vl1bi:
file_reader: !!python/name:satpy.readers.viirs_l1b.VIIRSL1BFileHandler
file_patterns:
- 'VL1BI_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S}_c{creation_time:%Y%m%d%H%M%S}.nc'
- 'V{platform_shortname:2s}02IMG.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}{creator}.nc'
- 'V{platform_shortname:2s}02IMG_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.nc'
+ - 'V{platform_shortname:2s}02IMG_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}.nc'
vl1bm:
file_reader: !!python/name:satpy.readers.viirs_l1b.VIIRSL1BFileHandler
file_patterns:
- 'VL1BM_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S}_c{creation_time:%Y%m%d%H%M%S}.nc'
- 'V{platform_shortname:2s}02MOD.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}{creator}.nc'
- 'V{platform_shortname:2s}02MOD_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.nc'
+ - 'V{platform_shortname:2s}02MOD_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}.nc'
vl1bd:
file_reader: !!python/name:satpy.readers.viirs_l1b.VIIRSL1BFileHandler
file_patterns:
- 'VL1BD_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S}_c{creation_time:%Y%m%d%H%M%S}.nc'
- 'V{platform_shortname:2s}02DNB.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}{creator}.nc'
- 'V{platform_shortname:2s}02DNB_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.nc'
+ - 'V{platform_shortname:2s}02DNB_NRT.A{start_time:%Y%j.%H%M}.{collection_number:3d}.{creation_time:%Y%j%H%M%S}.nc'
datasets:
i_lon:
@@ -175,7 +185,7 @@ datasets:
coordinates: [i_lon, i_lat]
file_type: vl1bi
I_SOLZ:
- name: i_solar_zenith_angle
+ name: solar_zenith_angle
standard_name: solar_zenith_angle
resolution: 371
units: degrees
@@ -183,7 +193,7 @@ datasets:
file_type: vgeoi
file_key: geolocation_data/solar_zenith
I_SOLA:
- name: i_solar_azimuth_angle
+ name: solar_azimuth_angle
standard_name: solar_azimuth_angle
resolution: 371
units: degrees
@@ -191,7 +201,7 @@ datasets:
file_type: vgeoi
file_key: geolocation_data/solar_azimuth
I_SENZ:
- name: i_satellite_zenith_angle
+ name: satellite_zenith_angle
standard_name: sensor_zenith_angle
resolution: 371
units: degrees
@@ -199,7 +209,7 @@ datasets:
file_type: vgeoi
file_key: geolocation_data/sensor_zenith
I_SENA:
- name: i_satellite_azimuth_angle
+ name: satellite_azimuth_angle
standard_name: sensor_azimuth_angle
resolution: 371
units: degrees
diff --git a/satpy/etc/readers/viirs_sdr.yaml b/satpy/etc/readers/viirs_sdr.yaml
index b3886b0198..e85c7f4f70 100644
--- a/satpy/etc/readers/viirs_sdr.yaml
+++ b/satpy/etc/readers/viirs_sdr.yaml
@@ -1,6 +1,10 @@
reader:
name: viirs_sdr
+ short_name: VIIRS SDR
+ long_name: SNPP VIIRS data in HDF5 SDR format
description: VIIRS SDR Reader
+ status: Nominal
+ supports_fsspec: false
reader: !!python/name:satpy.readers.viirs_sdr.VIIRSSDRReader
sensors: [viirs]
# file pattern keys to sort files by with 'satpy.utils.group_files'
diff --git a/satpy/etc/readers/virr_l1b.yaml b/satpy/etc/readers/virr_l1b.yaml
index 515391a87b..2178920fa6 100644
--- a/satpy/etc/readers/virr_l1b.yaml
+++ b/satpy/etc/readers/virr_l1b.yaml
@@ -1,6 +1,10 @@
reader:
- description: reader for VIRR data
name: virr_l1b
+ short_name: VIRR
+ long_name: VIRR data in HDF5 format
+ description: reader for VIRR data
+ status: Beta
+ supports_fsspec: false
sensors: [virr]
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
diff --git a/satpy/etc/writers/awips_tiled.yaml b/satpy/etc/writers/awips_tiled.yaml
index 90afd30c08..e761414904 100644
--- a/satpy/etc/writers/awips_tiled.yaml
+++ b/satpy/etc/writers/awips_tiled.yaml
@@ -66,23 +66,23 @@ templates:
# value: "${ORGANIZATION}"
awips_id: {}
# value: "{awips_id}" # special variable created by awips_tiled.py
- creating_entity:
+ physical_element: {}
+# value: "{physical_element}" #special variable created by awips_tiled.py
+ satellite_id:
value: "{platform_name!u}-{sensor!u}"
- sector_id: {} # special handler in awips_tiled.py
+
coordinates:
x:
attributes:
- units: {}
- standard_name:
- value: '{standard_name}'
+ units:
+ value: "{units}"
encoding:
dtype: "int16"
_Unsigned: "true"
y:
attributes:
- units: {}
- standard_name:
- value: '{standard_name}'
+ units:
+ value: "{units}"
encoding:
dtype: "int16"
_Unsigned: "true"
@@ -111,27 +111,36 @@ templates:
attributes:
physical_element:
raw_value: ACSPO SST
+ units: {}
# CLAVR-x Products
default_clavrx:
reader: clavrx
var_name: data
attributes:
+ units: {}
physical_element:
- raw_value: 'CLAVR-x {name}'
+ value: 'CLAVR-x {name}'
clavrx_cloud_type:
reader: clavrx
name: cloud_type
var_name: data
attributes:
- attributes:
- physical_element:
- raw_value: CLAVR-x Cloud Type
+ physical_element:
+ raw_value: CLAVR-x Cloud Type
+ units: {}
+ encoding:
+ dtype: int16
+ _Unsigned: "true"
+ scale_factor: 0.5
+ add_offset: 0.0
+ _FillValue: -128
clavrx_cld_temp_acha:
reader: clavrx
name: cld_temp_acha
var_name: data
attributes:
+ units: {}
physical_element:
raw_value: CLAVR-x Cloud Top Temperature (ACHA)
clavrx_cld_height_acha:
@@ -139,6 +148,7 @@ templates:
name: cld_height_acha
var_name: data
attributes:
+ units: {}
physical_element:
raw_value: CLAVR-x Cloud Top Height (ACHA)
clavrx_cloud_phase:
@@ -146,13 +156,21 @@ templates:
name: cloud_phase
var_name: data
attributes:
+ units: {}
physical_element:
raw_value: CLAVR-x Cloud Phase
+ encoding:
+ dtype: int16
+ _Unsigned: "true"
+ scale_factor: 0.5
+ add_offset: 0.0
+ _FillValue: -128
clavrx_cld_opd_dcomp:
reader: clavrx
name: cld_opd_dcomp
var_name: data
attributes:
+ units: {}
physical_element:
raw_value: CLAVR-x Cloud Optical Depth (dcomp)
clavrx_clld_opd_nlcomp:
@@ -160,6 +178,7 @@ templates:
name: cloud_opd_nlcomp
var_name: data
attributes:
+ units: {}
physical_element:
raw_value: CLAVR-x Cloud Optical Depth (nlcomp)
clavrx_cld_reff_dcomp:
@@ -167,6 +186,7 @@ templates:
name: cld_reff_dcomp
var_name: data
attributes:
+ units: {}
physical_element:
raw_value: CLAVR-x Cloud Effective Radius (dcomp)
clavrx_cld_reff_nlcomp:
@@ -174,6 +194,7 @@ templates:
name: cld_reff_nlcomp
var_name: data
attributes:
+ units: {}
physical_element:
raw_value: CLAVR-x Cloud Effective Radius (nlcomp)
clavrx_cld_emiss_acha:
@@ -181,6 +202,7 @@ templates:
name: cld_emiss_acha
var_name: data
attributes:
+ units: {}
physical_element:
raw_value: CLAVR-x Cloud Emissivity (ACHA)
clavrx_refl_lunar_dnb_nom:
@@ -188,6 +210,7 @@ templates:
name: refl_lunar_dnb_nom
var_name: data
attributes:
+ units: {}
physical_element:
raw_value: CLAVR-x Cloud Lunar Reflectance
clavrx_rain_rate:
@@ -195,6 +218,7 @@ templates:
name: rain_rate
var_name: data
attributes:
+ units: {}
physical_element:
raw_value: CLAVR-x Rain Rate
@@ -205,36 +229,42 @@ templates:
attributes:
physical_element:
raw_value: 0.63 um
+ units: {}
avhrr_band2_vis:
name: band2_vis
var_name: data
attributes:
physical_element:
raw_value: 0.86 um
+ units: {}
avhrr_band3a_vis:
name: band3a_vis
var_name: data
attributes:
physical_element:
raw_value: 1.61 um
+ units: {}
avhrr_band3b_bt:
name: band3b_bt
var_name: data
attributes:
physical_element:
raw_value: 3.74 um
+ units: {}
avhrr_band4_bt:
name: band4_bt
var_name: data
attributes:
physical_element:
raw_value: 10.8 um
+ units: {}
avhrr_band5_bt:
name: band5_bt
var_name: data
attributes:
physical_element:
raw_value: 12.0 um
+ units: {}
# VIIRS SDRs
viirs_i01:
@@ -243,490 +273,182 @@ templates:
attributes:
physical_element:
raw_value: 0.64 um
+ units: {}
viirs_i02:
name: I02
var_name: data
attributes:
physical_element:
- raw_value: 0.86 um
+ raw_value: 0.87 um
+ units: {}
viirs_i03:
name: I03
var_name: data
attributes:
physical_element:
raw_value: 1.61 um
+ units: {}
viirs_i04:
name: I04
var_name: data
attributes:
physical_element:
raw_value: 3.74 um
+ units: {}
viirs_i05:
name: I05
var_name: data
attributes:
physical_element:
raw_value: 11.5 um
+ units: {}
viirs_histogram_dnb:
name: histogram_dnb
var_name: data
attributes:
physical_element:
raw_value: Histogram DNB
+ units: {}
viirs_adaptive_dnb:
name: adaptive_dnb
var_name: data
attributes:
physical_element:
raw_value: Adaptive DNB
+ units: {}
viirs_dynamic_dnb:
name: dynamic_dnb
var_name: data
attributes:
physical_element:
raw_value: Dynamic DNB
+ units: {}
viirs_hncc_dnb:
name: hncc_dnb
var_name: data
attributes:
physical_element:
raw_value: HNCC DNB
+ units: {}
viirs_ifog:
name: ssec_fog
var_name: data
attributes:
physical_element:
raw_value: Fog
+ units: {}
viirs_m01:
name: M01
var_name: data
attributes:
physical_element:
raw_value: 0.41 um
+ units: {}
viirs_m02:
name: M02
var_name: data
attributes:
physical_element:
raw_value: 0.45 um
+ units: {}
viirs_m03:
name: M03
var_name: data
attributes:
physical_element:
raw_value: 0.49 um
+ units: {}
viirs_m04:
name: M04
var_name: data
attributes:
physical_element:
raw_value: 0.56 um
+ units: {}
viirs_m05:
name: M05
var_name: data
attributes:
physical_element:
raw_value: 0.67 um
+ units: {}
viirs_m06:
name: M06
var_name: data
attributes:
physical_element:
raw_value: 0.75 um
+ units: {}
viirs_m07:
name: M07
var_name: data
attributes:
physical_element:
raw_value: 0.86 um
+ units: {}
viirs_m08:
name: M08
var_name: data
attributes:
physical_element:
raw_value: 1.24 um
+ units: {}
viirs_m09:
name: M09
var_name: data
attributes:
physical_element:
raw_value: 1.38 um
+ units: {}
viirs_m10:
name: M10
var_name: data
attributes:
physical_element:
raw_value: 1.61 um
+ units: {}
viirs_m11:
name: M11
var_name: data
attributes:
physical_element:
raw_value: 2.25 um
+ units: {}
viirs_m12:
name: M12
var_name: data
attributes:
physical_element:
raw_value: 3.70 um
+ units: {}
viirs_m13:
name: M13
var_name: data
attributes:
physical_element:
raw_value: 4.05 um
+ units: {}
viirs_m14:
name: M14
var_name: data
attributes:
physical_element:
raw_value: 8.6 um
+ units: {}
viirs_m15:
name: M15
var_name: data
attributes:
physical_element:
raw_value: 10.8 um
+ units: {}
viirs_m16:
name: M16
var_name: data
attributes:
physical_element:
raw_value: 12.0 um
-
- # VIIRS Corrected Reflectance
- # viirs_viirs_crefl01:
- # name: viirs_crefl01
- # attributes:
-# physical_element:
-# raw_value: 0.67 um CR
- # viirs_viirs_crefl02:
- # name: viirs_crefl02
- # attributes:
-# physical_element:
-# raw_value: 0.87 um CR
- # viirs_viirs_crefl03:
- # name: viirs_crefl03
- # attributes:
-# physical_element:
-# raw_value: 0.49 um CR
- # viirs_viirs_crefl04:
- # name: viirs_crefl04
- # attributes:
-# physical_element:
-# raw_value: 0.56 um CR
- # viirs_viirs_crefl05:
- # name: viirs_crefl05
- # attributes:
-# physical_element:
-# raw_value: 1.24 um CR
- # viirs_viirs_crefl06:
- # name: viirs_crefl06
- # attributes:
-# physical_element:
-# raw_value: 1.61 um CR
- # viirs_crefl07:
- # name: viirs_crefl07
- # attributes:
-# physical_element:
-# raw_value: 2.25 um CR
- # viirs_crefl08:
- # name: viirs_crefl08
- # attributes:
-# physical_element:
-# raw_value: 0.64 um CR
- # viirs_crefl09:
- # name: viirs_crefl09
- # attributes:
-# physical_element:
-# raw_value: 0.87 um CR
- # viirs_crefl10:
- # name: viirs_crefl10
- # attributes:
-# physical_element:
-# raw_value: 1.61 um CR
-
- # MODIS L1B Products
-# modis_vis01:
-# name: vis01
-# physical_element: 0.65 um
-# modis_vis02:
-# name: vis02
-# physical_element: 0.86 um
-# modis_vis03:
-# name: vis03
-# physical_element: 0.47 um
-# modis_vis04:
-# name: vis04
-# physical_element: 0.56 um
-# modis_vis05:
-# name: vis05
-# physical_element: 1.24 um
-# modis_vis06:
-# name: vis06
-# physical_element: 1.64 um
-# modis_vis07:
-# name: vis07
-# physical_element: 2.13 um
-# modis_vis26:
-# name: vis26
-# physical_element: 1.38 um
-# modis_bt20:
-# name: bt20
-# physical_element: 3.75 um
-# modis_bt21:
-# name: bt21
-# physical_element: Fire
-# modis_bt22:
-# name: bt22
-# physical_element: 3.96 um
-# modis_bt23:
-# name: bt23
-# physical_element: 4.05 um
-# modis_bt24:
-# name: bt24
-# physical_element: 4.47 um
-# modis_bt25:
-# name: bt25
-# physical_element: 4.52 um
-# modis_bt27:
-# name: bt27
-# physical_element: 6.7 um
-# modis_bt28:
-# name: bt28
-# physical_element: 7.3 um
-# modis_bt29:
-# name: bt29
-# physical_element: 8.6 um
-# modis_bt30:
-# name: bt30
-# physical_element: 9.7 um
-# modis_bt31:
-# name: bt31
-# physical_element: 11.0 um
-# modis_bt32:
-# name: bt32
-# physical_element: 12.0 um
-# modis_bt33:
-# name: bt33
-# physical_element: 13.3 um
-# modis_bt34:
-# name: bt34
-# physical_element: 13.6 um
-# modis_bt35:
-# name: bt35
-# physical_element: 13.9 um
-# modis_bt36:
-# name: bt36
-# physical_element: 14.2 um
-# modis_sst:
-# name: sst
-# physical_element: SST
-# modis_lst:
-# name: lst
-# physical_element: LST
-# modis_slst:
-# name: slst
-# physical_element: LSTSUM
-# modis_fog:
-# name: ssec_fog
-# physical_element: Fog
-# modis_ctt:
-# name: ctt
-# physical_element: CTT
-# modis_ndvi:
-# name: ndvi
-# physical_element: NDVI
-# modis_tpw:
-# name: tpw
-# physical_element: TPW
-# modis_ice_concentration:
-# name: ice_concentration
-# physical_element: Ice Concentration
-# modis_ist:
-# name: ist
-# physical_element: Ice Surface Temperature
-
- # MODIS L1B Corrected Reflectances
-# modis_crefl01_250m:
-# name: modis_crefl01_250m
-# physical_element: 0.65 um CR
-# modis_crefl01_500m:
-# name: modis_crefl01_250m
-# physical_element: 0.65 um CR
-# modis_crefl01_1000m:
-# name: modis_crefl01_1000m
-# physical_element: 0.65 um CR
-# modis_crefl02_250m:
-# name: modis_crefl02_250m
-# physical_element: 0.86 um CR
-# modis_crefl02_500m:
-# name: modis_crefl02_500m
-# physical_element: 0.86 um CR
-# modis_crefl02_1000m:
-# name: modis_crefl02_1000m
-# physical_element: 0.86 um CR
-# modis_crefl03_250m:
-# name: modis_crefl03_250m
-# physical_element: 0.47 um CR
-# modis_crefl03_500m:
-# name: modis_crefl03_500m
-# physical_element: 0.47 um CR
-# modis_crefl03_1000m:
-# name: modis_crefl03_1000m
-# physical_element: 0.47 um CR
-# modis_crefl04_250m:
-# name: modis_crefl04_250m
-# physical_element: 0.56 um CR
-# modis_crefl04_500m:
-# name: modis_crefl04_500m
-# physical_element: 0.56 um CR
-# modis_crefl04_1000m:
-# name: modis_crefl04_1000m
-# physical_element: 0.56 um CR
-# modis_crefl05_500m:
-# name: modis_crefl05_500m
-# physical_element: 1.24 um CR
-# modis_crefl05_1000m:
-# name: modis_crefl05_1000m
-# physical_element: 1.24 um CR
-# modis_crefl06_500m:
-# name: modis_crefl06_500m
-# physical_element: 1.64 um CR
-# modis_crefl06_1000m:
-# name: modis_crefl06_1000m
-# physical_element: 1.64 um CR
-# modis_crefl07_500m:
-# name: modis_crefl07_500m
-# physical_element: 2.13 um CR
-# modis_crefl07_1000m:
-# name: modis_crefl07_1000m
-# physical_element: 2.13 um CR
-
- # MIRS Products
-# mirs_btemp_23v:
-# name: btemp_23v
-# physical_element: MIRS 23 GHZ V
-# mirs_btemp_31v:
-# name: btemp_31v
-# physical_element: MIRS 31 GHZ V
-# mirs_btemp_50h:
-# name: btemp_50h
-# physical_element: MIRS 50 GHZ H
-# mirs_btemp_51h:
-# name: btemp_51h
-# physical_element: MIRS 51 GHZ H
-# mirs_btemp_52h:
-# name: btemp_52h
-# physical_element: MIRS 52 GHZ H
-# mirs_btemp_53h:
-# name: btemp_53h
-# physical_element: MIRS 53 GHZ H
-# mirs_btemp_54h1:
-# name: btemp_54h1
-# physical_element: MIRS 54 GHZ H-1
-# mirs_btemp_54h2:
-# name: btemp_54h2
-# physical_element: MIRS 54 GHZ H-2
-# mirs_btemp_55h:
-# name: btemp_55h
-# physical_element: MIRS 55 GHZ H
-# mirs_btemp_57h1:
-# name: btemp_57h1
-# physical_element: MIRS 57 GHZ H-1
-# mirs_btemp_57h2:
-# name: btemp_57h2
-# physical_element: MIRS 57 GHZ H-2
-# mirs_btemp_57h3:
-# name: btemp_57h3
-# physical_element: MIRS 57 GHZ H-3
-# mirs_btemp_57h4:
-# name: btemp_57h4
-# physical_element: MIRS 57 GHZ H-4
-# mirs_btemp_57h5:
-# name: btemp_57h5
-# physical_element: MIRS 57 GHZ H-5
-# mirs_btemp_57h6:
-# name: btemp_57h6
-# physical_element: MIRS 57 GHZ H-6
-# mirs_btemp_88v:
-# name: btemp_88v
-# physical_element: MIRS 88 GHZ V
-# mirs_btemp_165h:
-# name: btemp_165h
-# physical_element: MIRS 165 GHZ H
-# mirs_btemp_183h1:
-# name: btemp_183h1
-# physical_element: MIRS 183 GHZ H-1
-# mirs_btemp_183h2:
-# name: btemp_183h2
-# physical_element: MIRS 183 GHZ H-2
-# mirs_btemp_183h3:
-# name: btemp_183h3
-# physical_element: MIRS 183 GHZ H-3
-# mirs_btemp_183h4:
-# name: btemp_183h4
-# physical_element: MIRS 183 GHZ H-4
-# mirs_btemp_183h5:
-# name: btemp_183h5
-# physical_element: MIRS 183 GHZ H-5
- # MIRS BTs - NOAA-18 - AMSU-A MHS
- # MIRS BTs - NOAA-19 - AMSU-A MHS
- # MIRS BTs - M1 (metopb) - AMSU-A MHS
- # MIRS BTs - M2 (metopa) - AMSU-A MHS
-# mirs_btemp_50v:
-# name: btemp_50v
-# physical_element: MIRS 50 GHZ V
-# mirs_btemp_52v:
-# name: btemp_52v
-# physical_element: MIRS 52 GHZ V
-# mirs_btemp_54h:
-# name: btemp_54h
-# physical_element: MIRS 54 GHZ H
-# mirs_btemp_54v:
-# name: btemp_54v
-# physical_element: MIRS 54 GHZ V
-# mirs_btemp_89v1:
-# name: btemp_89v1
-# physical_element: MIRS 89 GHZ V-1
-# mirs_btemp_89v2:
-# name: btemp_89v2
-# physical_element: MIRS 89 GHZ V-2
- # 157h on OPSO NOAA site
-# mirs_btemp_157v:
-# name: btemp_157v
-# physical_element: MIRS 157 GHZ V
-# mirs_btemp_190v:
-# name: btemp_190v
-# physical_element: MIRS 190 GHZ V
-# mirs_rain_rate:
-# reader: mirs
-# name: rain_rate
-# physical_element: MIRS Rain Rate
-# mirs_snow_cover:
-# reader: mirs
-# name: snow_cover
-# physical_element: MIRS Snow Cover
-# mirs_sea_ice:
-# reader: mirs
-# name: sea_ice
-# physical_element: MIRS Sea Ice
-# mirs_swe:
-# reader: mirs
-# name: swe
-# physical_element: MIRS SWE
-# mirs_clw:
-# reader: mirs
-# name: clw
-# physical_element: MIRS CLW
-# mirs_tpw:
-# reader: mirs
-# name: tpw
-# physical_element: MIRS TPW
-# mirs_tskin:
-# reader: mirs
-# name: tskin
-# physical_element: MIRS Skin Temperature
+ units: {}
# AMSR-2 L1B
amsr2_btemp_36.5h:
@@ -735,36 +457,42 @@ templates:
attributes:
physical_element:
raw_value: 36.5 GHz H
+ units: {}
amsr2_btemp_36.5v:
name: btemp_36.5v
var_name: data
attributes:
physical_element:
raw_value: 36.5 GHz V
+ units: {}
amsr2_btemp_89.0ah:
name: btemp_89.0ah
var_name: data
attributes:
physical_element:
raw_value: 89.0 GHz AH
+ units: {}
amsr2_btemp_89.0av:
name: btemp_89.0av
var_name: data
attributes:
physical_element:
raw_value: 89.0 GHz AV
+ units: {}
amsr2_btemp_89.0bh:
name: btemp_89.0bh
var_name: data
attributes:
physical_element:
raw_value: 89.0 GHz BH
+ units: {}
amsr2_btemp_89.0bv:
name: btemp_89.0bv
var_name: data
attributes:
physical_element:
raw_value: 89.0 GHz BV
+ units: {}
# GEOCAT Level 1 Products
geocat_surface_type:
@@ -773,6 +501,7 @@ templates:
attributes:
physical_element:
raw_value: Surface Type
+ units: {}
# GEOCAT Level 2 Products
glm_l2_radc:
single_variable: false
@@ -801,17 +530,15 @@ templates:
coordinates:
x:
attributes:
- units: {}
- standard_name:
- value: '{standard_name}'
+ units:
+ value: "{units}"
encoding:
dtype: "int16"
# _Unsigned: "true"
y:
attributes:
- units: {}
- standard_name:
- value: '{standard_name}'
+ units:
+ value: "{units}"
encoding:
dtype: "int16"
# _Unsigned: "true"
@@ -842,6 +569,10 @@ templates:
attributes:
# physical_element:
# raw_value: "GLM_Flash_Extent_Density"
+ # units are actually "Count per nominal 3136 microradian^2 pixel per 1.0 min"
+ # but this is not understood by AWIPS
+ units:
+ raw_value: "1"
standard_name:
value: "{standard_name}"
long_name:
@@ -855,6 +586,10 @@ templates:
attributes:
# physical_element:
# raw_value: "GLM_Flash_Extent_Density_Window"
+ # units are actually "Count per nominal 3136 microradian^2 pixel per 1.0 min"
+ # but this is not understood by AWIPS
+ units:
+ raw_value: "1"
standard_name:
value: "{standard_name}"
long_name:
@@ -976,17 +711,15 @@ templates:
coordinates:
x:
attributes:
- units: {}
- standard_name:
- value: '{standard_name}'
+ units:
+ value: "{units}"
encoding:
dtype: "int16"
# _Unsigned: "true"
y:
attributes:
- units: {}
- standard_name:
- value: '{standard_name}'
+ units:
+ value: "{units}"
encoding:
dtype: "int16"
# _Unsigned: "true"
@@ -1017,6 +750,10 @@ templates:
attributes:
# physical_element:
# raw_value: "GLM_Flash_Extent_Density"
+ # units are actually "Count per nominal 3136 microradian^2 pixel per 1.0 min"
+ # but this is not understood by AWIPS
+ units:
+ raw_value: "1"
standard_name:
value: "{standard_name}"
long_name:
@@ -1033,6 +770,10 @@ templates:
attributes:
# physical_element:
# raw_value: "GLM_Flash_Extent_Density_Window"
+ # units are actually "Count per nominal 3136 microradian^2 pixel per 1.0 min"
+ # but this is not understood by AWIPS
+ units:
+ raw_value: "1"
standard_name:
value: "{standard_name}"
long_name:
diff --git a/satpy/etc/writers/geotiff.yaml b/satpy/etc/writers/geotiff.yaml
index 9c8f2b6b07..e2f16daa9d 100644
--- a/satpy/etc/writers/geotiff.yaml
+++ b/satpy/etc/writers/geotiff.yaml
@@ -4,4 +4,4 @@ writer:
writer: !!python/name:satpy.writers.geotiff.GeoTIFFWriter
filename: '{name}_{start_time:%Y%m%d_%H%M%S}.tif'
compress: DEFLATE
- zlevel: 6
\ No newline at end of file
+ zlevel: 6
diff --git a/satpy/etc/writers/ninjogeotiff.yaml b/satpy/etc/writers/ninjogeotiff.yaml
new file mode 100644
index 0000000000..e6c3afe881
--- /dev/null
+++ b/satpy/etc/writers/ninjogeotiff.yaml
@@ -0,0 +1,7 @@
+writer:
+ name: ninjogeotiff
+ description: GeoTIFF Writer with NinJo tags in GDALMetaData
+ writer: !!python/name:satpy.writers.ninjogeotiff.NinJoGeoTIFFWriter
+ filename: '{name}_{start_time:%Y%m%d_%H%M%S}.tif'
+ compress: DEFLATE
+ zlevel: 6
diff --git a/satpy/modifiers/__init__.py b/satpy/modifiers/__init__.py
index 985326c611..a0888167b3 100644
--- a/satpy/modifiers/__init__.py
+++ b/satpy/modifiers/__init__.py
@@ -19,7 +19,11 @@
# file deepcode ignore W0611: Ignore unused imports in init module
-from .base import ModifierBase # noqa: F401
-from .spectral import NIREmissivePartFromReflectance, NIRReflectance # noqa: F401
-from .geometry import SunZenithCorrector, EffectiveSolarPathLengthCorrector # noqa: F401
-from .atmosphere import PSPRayleighReflectance, PSPAtmosphericalCorrection, CO2Corrector # noqa: F401
+from .base import ModifierBase # noqa: F401, isort: skip
+from .atmosphere import CO2Corrector # noqa: F401
+from .atmosphere import PSPAtmosphericalCorrection # noqa: F401
+from .atmosphere import PSPRayleighReflectance # noqa: F401
+from .geometry import EffectiveSolarPathLengthCorrector # noqa: F401
+from .geometry import SunZenithCorrector # noqa: F401
+from .spectral import NIREmissivePartFromReflectance # noqa: F401
+from .spectral import NIRReflectance # noqa: F401
diff --git a/satpy/modifiers/_crefl.py b/satpy/modifiers/_crefl.py
index 0dfe2f5b6f..de71d96147 100644
--- a/satpy/modifiers/_crefl.py
+++ b/satpy/modifiers/_crefl.py
@@ -21,11 +21,10 @@
import warnings
import numpy as np
-import xarray as xr
-from dask import array as da
+
from satpy.aux_download import DataDownloadMixin, retrieve
from satpy.modifiers import ModifierBase
-from satpy.utils import get_satpos
+from satpy.modifiers.angles import get_angles
LOG = logging.getLogger(__name__)
@@ -73,34 +72,21 @@ def _get_registered_dem_cache_key(self):
def __call__(self, datasets, optional_datasets, **info):
"""Create modified DataArray object by applying the crefl algorithm."""
- from satpy.composites.crefl_utils import get_coefficients
- refl_data, *angles = self._get_data_and_angles(datasets, optional_datasets)
- coefficients = get_coefficients(refl_data.attrs["sensor"],
- refl_data.attrs["wavelength"],
- refl_data.attrs["resolution"])
- results = self._call_crefl(refl_data, coefficients, angles)
+ refl_data, angles = self._extract_angle_data_arrays(datasets, optional_datasets)
+ results = self._call_crefl(refl_data, angles)
info.update(refl_data.attrs)
info["rayleigh_corrected"] = True
results.attrs = info
self.apply_modifier_info(refl_data, results)
return results
- def _call_crefl(self, refl_data, coefficients, angles):
- from satpy.composites.crefl_utils import run_crefl
+ def _call_crefl(self, refl_data, angles):
+ from satpy.modifiers._crefl_utils import run_crefl
avg_elevation = self._get_average_elevation()
- lons, lats = refl_data.attrs['area'].get_lonlats(chunks=refl_data.chunks)
- is_percent = refl_data.attrs["units"] == "%"
- use_abi = refl_data.attrs['sensor'] == 'abi'
results = run_crefl(refl_data,
- coefficients,
- lons,
- lats,
*angles,
avg_elevation=avg_elevation,
- percent=is_percent,
- use_abi=use_abi)
- factor = 100. if is_percent else 1.
- results = results * factor
+ )
return results
def _get_average_elevation(self):
@@ -110,64 +96,54 @@ def _get_average_elevation(self):
LOG.debug("Loading CREFL averaged elevation information from: %s",
self.dem_cache_key)
local_filename = retrieve(self.dem_cache_key)
+ avg_elevation = self._read_var_from_hdf4_file(local_filename, self.dem_sds).astype(np.float64)
+ if isinstance(avg_elevation, np.ma.MaskedArray):
+ avg_elevation = avg_elevation.filled(np.nan)
+ return avg_elevation
+
+ @staticmethod
+ def _read_var_from_hdf4_file(local_filename, var_name):
+ try:
+ return ReflectanceCorrector._read_var_from_hdf4_file_pyhdf(local_filename, var_name)
+ except (ImportError, OSError):
+ return ReflectanceCorrector._read_var_from_hdf4_file_netcdf4(local_filename, var_name)
+
+ @staticmethod
+ def _read_var_from_hdf4_file_netcdf4(local_filename, var_name):
from netCDF4 import Dataset as NCDataset
+
# HDF4 file, NetCDF library needs to be compiled with HDF4 support
nc = NCDataset(local_filename, "r")
# average elevation is stored as a 16-bit signed integer but with
# scale factor 1 and offset 0, convert it to float here
- avg_elevation = nc.variables[self.dem_sds][:].astype(np.float64)
- if isinstance(avg_elevation, np.ma.MaskedArray):
- avg_elevation = avg_elevation.filled(np.nan)
- return avg_elevation
-
- def _get_data_and_angles(self, datasets, optional_datasets):
- angles = self._extract_angle_data_arrays(datasets, optional_datasets)
- angles = [xr.DataArray(dask_arr, dims=('y', 'x')) for dask_arr in angles]
- return [datasets[0]] + angles
+ return nc.variables[var_name][:]
+
+ @staticmethod
+ def _read_var_from_hdf4_file_pyhdf(local_filename, var_name):
+ from pyhdf.SD import SD, SDC
+ f = SD(local_filename, SDC.READ)
+ var = f.select(var_name)
+ data = var[:]
+ fill = ReflectanceCorrector._read_fill_value_from_hdf4(var, data.dtype)
+ return np.ma.MaskedArray(data, data == fill)
+
+ @staticmethod
+ def _read_fill_value_from_hdf4(var, dtype):
+ from pyhdf.error import HDF4Error
+ try:
+ return var.getfillvalue()
+ except HDF4Error:
+ return np.iinfo(dtype).min
def _extract_angle_data_arrays(self, datasets, optional_datasets):
all_datasets = datasets + optional_datasets
if len(all_datasets) == 1:
vis = self.match_data_arrays(datasets)[0]
- return self.get_angles(vis)
+ return vis, get_angles(vis)
if len(all_datasets) == 5:
vis, *angles = self.match_data_arrays(
datasets + optional_datasets)
- # get the dask array underneath
- return [data_arr.data for data_arr in angles]
+ return vis, angles
raise ValueError("Not sure how to handle provided dependencies. "
"Either all 4 angles must be provided or none of "
"of them.")
-
- def get_angles(self, vis):
- """Get sun and satellite angles to use in crefl calculations."""
- lons, lats = self._get_valid_lonlats(vis)
- sun_angles = self._get_sun_angles(vis, lons, lats)
- sat_angles = self._get_sensor_angles(vis, lons, lats)
- # sata, satz, suna, sunz
- return sat_angles + sun_angles
-
- def _get_valid_lonlats(self, vis):
- lons, lats = vis.attrs['area'].get_lonlats(chunks=vis.data.chunks)
- lons = da.where(lons >= 1e30, np.nan, lons)
- lats = da.where(lats >= 1e30, np.nan, lats)
- return lons, lats
-
- def _get_sun_angles(self, vis, lons, lats):
- from pyorbital.astronomy import get_alt_az, sun_zenith_angle
- suna = get_alt_az(vis.attrs['start_time'], lons, lats)[1]
- suna = np.rad2deg(suna)
- sunz = sun_zenith_angle(vis.attrs['start_time'], lons, lats)
- return suna, sunz
-
- def _get_sensor_angles(self, vis, lons, lats):
- from pyorbital.orbital import get_observer_look
- sat_lon, sat_lat, sat_alt = get_satpos(vis)
- sata, satel = get_observer_look(
- sat_lon,
- sat_lat,
- sat_alt / 1000.0, # km
- vis.attrs['start_time'],
- lons, lats, 0)
- satz = 90 - satel
- return sata, satz
diff --git a/satpy/modifiers/_crefl_utils.py b/satpy/modifiers/_crefl_utils.py
new file mode 100644
index 0000000000..ddde8c9765
--- /dev/null
+++ b/satpy/modifiers/_crefl_utils.py
@@ -0,0 +1,627 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2010-2018 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Shared utilities for correcting reflectance data using the 'crefl' algorithm.
+
+The CREFL algorithm in this module is based on the `NASA CREFL SPA`_ software,
+the `NASA CVIIRS SPA`_, and customizations of these algorithms for ABI/AHI by
+Ralph Kuehn and Min Oo at the Space Science and Engineering Center (SSEC).
+
+The CREFL SPA documentation page describes the algorithm by saying:
+
+ The CREFL_SPA processes MODIS Aqua and Terra Level 1B DB data to create the
+ MODIS Level 2 Corrected Reflectance product. The algorithm performs a simple
+ atmospheric correction with MODIS visible, near-infrared, and short-wave
+ infrared bands (bands 1 through 16).
+
+ It corrects for molecular (Rayleigh) scattering and gaseous absorption (water
+ vapor and ozone) using climatological values for gas contents. It requires no
+ real-time input of ancillary data. The algorithm performs no aerosol
+ correction. The Corrected Reflectance products created by CREFL_SPA are very
+ similar to the MODIS Land Surface Reflectance product (MOD09) in clear
+ atmospheric conditions, since the algorithms used to derive both are based on
+ the 6S Radiative Transfer Model. The products show differences in the presence
+ of aerosols, however, because the MODIS Land Surface Reflectance product uses
+ a more complex atmospheric correction algorithm that includes a correction for
+ aerosols.
+
+The additional logic to support ABI (AHI support not included) was originally
+written by Ralph Kuehn and Min Oo at SSEC. Additional modifications were
+performed by Martin Raspaud, David Hoese, and Will Roberts to make the code
+work together and be more dask compatible.
+
+The AHI/ABI implementation is based on the MODIS collection 6 algorithm, where
+a spherical-shell atmosphere was assumed rather than a plane-parallel. See
+Appendix A in: "The Collection 6 MODIS aerosol products over land and ocean"
+Atmos. Meas. Tech., 6, 2989–3034, 2013 www.atmos-meas-tech.net/6/2989/2013/
+:doi:`10.5194/amt-6-2989-2013`.
+
+
+The original CREFL code is similar to what is described in appendix A1 (page
+74) of the ATBD for the `MODIS MOD04/MYD04`_ data product.
+
+.. _NASA CREFL SPA: https://directreadout.sci.gsfc.nasa.gov/?id=dspContent&cid=92&type=software
+.. _NASA CVIIRS SPA: https://directreadout.sci.gsfc.nasa.gov/?id=dspContent&cid=277&type=software
+.. _MODIS MOD04/MYD04: https://modis.gsfc.nasa.gov/data/atbd/atbd_mod02.pdf
+
+
+"""
+from __future__ import annotations
+
+import logging
+from typing import Optional, Type, Union
+
+import dask.array as da
+import numpy as np
+import xarray as xr
+
+from satpy.dataset.dataid import WavelengthRange
+
+LOG = logging.getLogger(__name__)
+
+UO3_MODIS = 0.319
+UH2O_MODIS = 2.93
+UO3_VIIRS = 0.285
+UH2O_VIIRS = 2.93
+
+MAXSOLZ = 86.5
+MAXAIRMASS = 18
+SCALEHEIGHT = 8000
+FILL_INT16 = 32767
+TAUSTEP4SPHALB_ABI = .0003
+TAUSTEP4SPHALB = .0001
+
+MAXNUMSPHALBVALUES = 4000 # with no aerosol taur <= 0.4 in all bands everywhere
+REFLMIN = -0.01
+REFLMAX = 1.6
+
+
+class _Coefficients:
+ LUTS: list[np.ndarray] = []
+ # resolution -> wavelength -> coefficient index
+ # resolution -> band name -> coefficient index
+ COEFF_INDEX_MAP: dict[int, dict[Union[tuple, str], int]] = {}
+
+ def __init__(self, wavelength_range, resolution=0):
+ self._wv_range = wavelength_range
+ self._resolution = resolution
+
+ def __call__(self):
+ idx = self._find_coefficient_index(self._wv_range, resolution=self._resolution)
+ band_luts = [lut_array[idx] for lut_array in self.LUTS]
+ return band_luts
+
+ def _find_coefficient_index(self, wavelength_range, resolution=0):
+ """Return index in to coefficient arrays for this band's wavelength.
+
+ This function search through the `COEFF_INDEX_MAP` dictionary and
+ finds the first key where the nominal wavelength of `wavelength_range`
+ falls between the minimum wavelength and maximum wavelength of the key.
+ `wavelength_range` can also be the standard name of the band. For
+ example, "M05" for VIIRS or "1" for MODIS.
+
+ Args:
+ wavelength_range: 3-element tuple of
+ (min wavelength, nominal wavelength, max wavelength) or the
+ string name of the band.
+ resolution: resolution of the band to be corrected
+
+ Returns:
+ index in to coefficient arrays like `aH2O`, `aO3`, etc.
+ None is returned if no matching wavelength is found
+
+ """
+ index_map = self.COEFF_INDEX_MAP
+ # Find the best resolution of coefficients
+ for res in sorted(index_map.keys()):
+ if resolution <= res:
+ index_map = index_map[res]
+ break
+ else:
+ raise ValueError("Unrecognized data resolution: {}", resolution)
+ # Find the best wavelength of coefficients
+ if isinstance(wavelength_range, str):
+ # wavelength range is actually a band name
+ return index_map[wavelength_range]
+ for lut_wvl_range, v in index_map.items():
+ if isinstance(lut_wvl_range, str):
+ # we are analyzing wavelengths and ignoring dataset names
+ continue
+ if wavelength_range[1] in lut_wvl_range:
+ return v
+ raise ValueError(f"Can't find LUT for {wavelength_range}.")
+
+
+class _ABICoefficients(_Coefficients):
+ RG_FUDGE = .55 # This number is what Ralph says "looks good" for ABI/AHI
+ LUTS = [
+ # aH2O
+ np.array([2.4111e-003, 7.8454e-003 * RG_FUDGE, 7.9258e-3, 9.3392e-003, 2.53e-2]),
+ # aO2 (bH2O for other instruments)
+ np.array([1.2360e-003, 3.7296e-003, 177.7161e-006, 10.4899e-003, 1.63e-2]),
+ # aO3
+ np.array([4.2869e-003, 25.6509e-003 * RG_FUDGE, 802.4319e-006, 0.0000e+000, 2e-5]),
+ # taur0
+ np.array([184.7200e-003, 52.3490e-003, 15.8450e-003, 1.3074e-003, 311.2900e-006]),
+ ]
+ # resolution -> wavelength -> coefficient index
+ # resolution -> band name -> coefficient index
+ COEFF_INDEX_MAP = {
+ 2000: {
+ WavelengthRange(0.450, 0.470, 0.490): 0, # C01
+ "C01": 0,
+ WavelengthRange(0.590, 0.640, 0.690): 1, # C02
+ "C02": 1,
+ WavelengthRange(0.8455, 0.865, 0.8845): 2, # C03
+ "C03": 2,
+ # WavelengthRange((1.3705, 1.378, 1.3855)): None, # C04 - No coefficients yet
+ # "C04": None,
+ WavelengthRange(1.580, 1.610, 1.640): 3, # C05
+ "C05": 3,
+ WavelengthRange(2.225, 2.250, 2.275): 4, # C06
+ "C06": 4
+ },
+ }
+
+
+class _VIIRSCoefficients(_Coefficients):
+ # Values from crefl 1.7.1
+ LUTS = [
+ # aH2O
+ np.array([0.000406601, 0.0015933, 0, 1.78644e-05, 0.00296457, 0.000617252, 0.000996563, 0.00222253, 0.00094005,
+ 0.000563288, 0, 0, 0, 0, 0, 0]),
+ # bH2O
+ np.array([0.812659, 0.832931, 1., 0.8677850, 0.806816, 0.944958, 0.78812, 0.791204, 0.900564, 0.942907, 0, 0,
+ 0, 0, 0, 0]),
+ # aO3
+ np.array([0.0433461, 0.0, 0.0178299, 0.0853012, 0, 0, 0, 0.0813531, 0, 0, 0.0663, 0.0836, 0.0485, 0.0395,
+ 0.0119, 0.00263]),
+ # taur0
+ np.array([0.04350, 0.01582, 0.16176, 0.09740, 0.00369, 0.00132, 0.00033, 0.05373, 0.01561, 0.00129, 0.1131,
+ 0.0994, 0.0446, 0.0416, 0.0286, 0.0155]),
+ ]
+ # resolution -> wavelength -> coefficient index
+ # resolution -> band name -> coefficient index
+ COEFF_INDEX_MAP = {
+ 1000: {
+ WavelengthRange(0.662, 0.6720, 0.682): 0, # M05
+ "M05": 0,
+ WavelengthRange(0.846, 0.8650, 0.885): 1, # M07
+ "M07": 1,
+ WavelengthRange(0.478, 0.4880, 0.498): 2, # M03
+ "M03": 2,
+ WavelengthRange(0.545, 0.5550, 0.565): 3, # M04
+ "M04": 3,
+ WavelengthRange(1.230, 1.2400, 1.250): 4, # M08
+ "M08": 4,
+ WavelengthRange(1.580, 1.6100, 1.640): 5, # M10
+ "M10": 5,
+ WavelengthRange(2.225, 2.2500, 2.275): 6, # M11
+ "M11": 6,
+ },
+ 500: {
+ WavelengthRange(0.600, 0.6400, 0.680): 7, # I01
+ "I01": 7,
+ WavelengthRange(0.845, 0.8650, 0.884): 8, # I02
+ "I02": 8,
+ WavelengthRange(1.580, 1.6100, 1.640): 9, # I03
+ "I03": 9,
+ },
+ }
+
+
+class _MODISCoefficients(_Coefficients):
+ # Values from crefl 1.7.1
+ LUTS = [
+ # aH2O
+ np.array([-5.60723, -5.25251, 0, 0, -6.29824, -7.70944, -3.91877, 0, 0, 0, 0, 0, 0, 0, 0, 0]),
+ # bH2O
+ np.array([0.820175, 0.725159, 0, 0, 0.865732, 0.966947, 0.745342, 0, 0, 0, 0, 0, 0, 0, 0, 0]),
+ # aO3
+ np.array([0.0715289, 0, 0.00743232, 0.089691, 0, 0, 0, 0.001, 0.00383, 0.0225, 0.0663,
+ 0.0836, 0.0485, 0.0395, 0.0119, 0.00263]),
+ # taur0
+ np.array([0.05100, 0.01631, 0.19325, 0.09536, 0.00366, 0.00123, 0.00043, 0.3139, 0.2375, 0.1596, 0.1131,
+ 0.0994, 0.0446, 0.0416, 0.0286, 0.0155]),
+ ]
+ # Map of pixel resolutions -> wavelength -> coefficient index
+ # Map of pixel resolutions -> band name -> coefficient index
+ COEFF_INDEX_MAP = {
+ 1000: {
+ WavelengthRange(0.620, 0.6450, 0.670): 0,
+ "1": 0,
+ WavelengthRange(0.841, 0.8585, 0.876): 1,
+ "2": 1,
+ WavelengthRange(0.459, 0.4690, 0.479): 2,
+ "3": 2,
+ WavelengthRange(0.545, 0.5550, 0.565): 3,
+ "4": 3,
+ WavelengthRange(1.230, 1.2400, 1.250): 4,
+ "5": 4,
+ WavelengthRange(1.628, 1.6400, 1.652): 5,
+ "6": 5,
+ WavelengthRange(2.105, 2.1300, 2.155): 6,
+ "7": 6,
+ }
+ }
+ COEFF_INDEX_MAP[500] = COEFF_INDEX_MAP[1000]
+ COEFF_INDEX_MAP[250] = COEFF_INDEX_MAP[1000]
+
+
+def run_crefl(refl,
+ sensor_azimuth,
+ sensor_zenith,
+ solar_azimuth,
+ solar_zenith,
+ avg_elevation=None,
+ ):
+ """Run main crefl algorithm.
+
+ All input parameters are per-pixel values meaning they are the same size
+ and shape as the input reflectance data, unless otherwise stated.
+
+ :param refl: tuple of reflectance band arrays
+ :param sensor_azimuth: input swath sensor azimuth angle array
+ :param sensor_zenith: input swath sensor zenith angle array
+ :param solar_azimuth: input swath solar azimuth angle array
+ :param solar_zenith: input swath solar zenith angle array
+ :param avg_elevation: average elevation (usually pre-calculated and stored in CMGDEM.hdf)
+
+ """
+ runner_cls = _runner_class_for_sensor(refl.attrs['sensor'])
+ runner = runner_cls(refl)
+ corr_refl = runner(sensor_azimuth, sensor_zenith, solar_azimuth, solar_zenith, avg_elevation)
+ return corr_refl
+
+
+class _CREFLRunner:
+ def __init__(self, refl_data_arr):
+ self._is_percent = refl_data_arr.attrs["units"] == "%"
+ if self._is_percent:
+ attrs = refl_data_arr.attrs
+ refl_data_arr = refl_data_arr / 100.0
+ refl_data_arr.attrs = attrs
+ self._refl = refl_data_arr
+
+ @property
+ def coeffs_cls(self) -> Type[_Coefficients]:
+ raise NotImplementedError()
+
+ def __call__(self, sensor_azimuth, sensor_zenith, solar_azimuth, solar_zenith, avg_elevation):
+ refl = self._refl
+ height = self._height_from_avg_elevation(avg_elevation)
+ coeffs_helper = self.coeffs_cls(refl.attrs["wavelength"], refl.attrs["resolution"])
+ coeffs = coeffs_helper()
+ mus = np.cos(np.deg2rad(solar_zenith))
+ mus = mus.where(mus >= 0)
+ muv = np.cos(np.deg2rad(sensor_zenith))
+ phi = solar_azimuth - sensor_azimuth
+ corr_refl = self._run_crefl(mus, muv, phi, solar_zenith, sensor_zenith, height, coeffs)
+ if self._is_percent:
+ corr_refl = corr_refl * 100.0
+ return xr.DataArray(corr_refl, dims=refl.dims, coords=refl.coords, attrs=refl.attrs)
+
+ def _run_crefl(self, mus, muv, phi, solar_zenith, sensor_zenith, height, coeffs):
+ raise NotImplementedError()
+
+ def _height_from_avg_elevation(self, avg_elevation: Optional[np.ndarray]) -> da.Array:
+ """Get digital elevation map data for our granule with ocean fill value set to 0."""
+ if avg_elevation is None:
+ LOG.debug("No average elevation information provided in CREFL")
+ # height = np.zeros(lon.shape, dtype=np.float64)
+ height = 0.
+ else:
+ LOG.debug("Using average elevation information provided to CREFL")
+ lon, lat = self._refl.attrs['area'].get_lonlats(chunks=self._refl.chunks)
+ height = da.map_blocks(_space_mask_height, lon, lat, avg_elevation,
+ chunks=lon.chunks, dtype=avg_elevation.dtype)
+ return height
+
+
+class _ABICREFLRunner(_CREFLRunner):
+ @property
+ def coeffs_cls(self) -> Type[_Coefficients]:
+ return _ABICoefficients
+
+ def _run_crefl(self, mus, muv, phi, solar_zenith, sensor_zenith, height, coeffs):
+ LOG.debug("Using ABI CREFL algorithm")
+ return da.map_blocks(_run_crefl_abi, self._refl.data, mus.data, muv.data, phi.data,
+ solar_zenith.data, sensor_zenith.data, height, *coeffs,
+ meta=np.ndarray((), dtype=self._refl.dtype),
+ chunks=self._refl.chunks, dtype=self._refl.dtype,
+ )
+
+
+class _VIIRSMODISCREFLRunner(_CREFLRunner):
+ def _run_crefl(self, mus, muv, phi, solar_zenith, sensor_zenith, height, coeffs):
+ return da.map_blocks(_run_crefl, self._refl.data, mus.data, muv.data, phi.data,
+ height, self._refl.attrs.get("sensor"), *coeffs,
+ meta=np.ndarray((), dtype=self._refl.dtype),
+ chunks=self._refl.chunks, dtype=self._refl.dtype,
+ )
+
+
+class _VIIRSCREFLRunner(_VIIRSMODISCREFLRunner):
+ @property
+ def coeffs_cls(self) -> Type[_Coefficients]:
+ return _VIIRSCoefficients
+
+ def _run_crefl(self, mus, muv, phi, solar_zenith, sensor_zenith, height, coeffs):
+ LOG.debug("Using VIIRS CREFL algorithm")
+ return super()._run_crefl(mus, muv, phi, solar_zenith, sensor_zenith, height, coeffs)
+
+
+class _MODISCREFLRunner(_VIIRSMODISCREFLRunner):
+ @property
+ def coeffs_cls(self) -> Type[_Coefficients]:
+ return _MODISCoefficients
+
+ def _run_crefl(self, mus, muv, phi, solar_zenith, sensor_zenith, height, coeffs):
+ LOG.debug("Using MODIS CREFL algorithm")
+ return super()._run_crefl(mus, muv, phi, solar_zenith, sensor_zenith, height, coeffs)
+
+
+_SENSOR_TO_RUNNER = {
+ "abi": _ABICREFLRunner,
+ "viirs": _VIIRSCREFLRunner,
+ "modis": _MODISCREFLRunner,
+}
+
+
+def _runner_class_for_sensor(sensor_name: str) -> Type[_CREFLRunner]:
+ try:
+ return _SENSOR_TO_RUNNER[sensor_name]
+ except KeyError:
+ raise NotImplementedError(f"Don't know how to apply CREFL to data from sensor {sensor_name}.")
+
+
+def _space_mask_height(lon, lat, avg_elevation):
+ lat[(lat <= -90) | (lat >= 90)] = np.nan
+ lon[(lon <= -180) | (lon >= 180)] = np.nan
+ row = ((90.0 - lat) * avg_elevation.shape[0] / 180.0).astype(np.int32)
+ col = ((lon + 180.0) * avg_elevation.shape[1] / 360.0).astype(np.int32)
+ space_mask = np.isnan(lon) | np.isnan(lat)
+ row[space_mask] = 0
+ col[space_mask] = 0
+
+ height = avg_elevation[row, col]
+ # negative heights aren't allowed, clip to 0
+ height[(height < 0.0) | np.isnan(height) | space_mask] = 0.0
+ return height
+
+
+def _run_crefl(refl, mus, muv, phi, height, sensor_name, *coeffs):
+ atm_vars_cls = _VIIRSAtmosphereVariables if sensor_name.lower() == "viirs" else _MODISAtmosphereVariables
+ atm_vars = atm_vars_cls(mus, muv, phi, height, *coeffs)
+ sphalb, rhoray, TtotraytH2O, tOG = atm_vars()
+ return _correct_refl(refl, tOG, rhoray, TtotraytH2O, sphalb)
+
+
+def _run_crefl_abi(refl, mus, muv, phi, solar_zenith, sensor_zenith, height,
+ *coeffs):
+ a_O3 = [268.45, 0.5, 115.42, -3.2922]
+ a_H2O = [0.0311, 0.1, 92.471, -1.3814]
+ a_O2 = [0.4567, 0.007, 96.4884, -1.6970]
+ G_O3 = _G_calc(solar_zenith, a_O3) + _G_calc(sensor_zenith, a_O3)
+ G_H2O = _G_calc(solar_zenith, a_H2O) + _G_calc(sensor_zenith, a_H2O)
+ G_O2 = _G_calc(solar_zenith, a_O2) + _G_calc(sensor_zenith, a_O2)
+ # Note: bh2o values are actually ao2 values for abi
+ atm_vars = _ABIAtmosphereVariables(G_O3, G_H2O, G_O2,
+ mus, muv, phi, height, *coeffs)
+ sphalb, rhoray, TtotraytH2O, tOG = atm_vars()
+ return _correct_refl(refl, tOG, rhoray, TtotraytH2O, sphalb)
+
+
+def _G_calc(zenith, a_coeff):
+ return (np.cos(np.deg2rad(zenith))+(a_coeff[0]*(zenith**a_coeff[1])*(a_coeff[2]-zenith)**a_coeff[3]))**-1
+
+
+def _correct_refl(refl, tOG, rhoray, TtotraytH2O, sphalb):
+ corr_refl = (refl / tOG - rhoray) / TtotraytH2O
+ corr_refl /= (1.0 + corr_refl * sphalb)
+ return corr_refl.clip(REFLMIN, REFLMAX)
+
+
+class _AtmosphereVariables:
+ def __init__(self, mus, muv, phi, height, ah2o, bh2o, ao3, tau):
+ self._mus = mus
+ self._muv = muv
+ self._phi = phi
+ self._height = height
+ self._ah2o = ah2o
+ self._bh2o = bh2o
+ self._ao3 = ao3
+ self._tau = tau
+ self._taustep4sphalb = TAUSTEP4SPHALB
+
+ def __call__(self):
+ tau_step = np.linspace(
+ self._taustep4sphalb,
+ MAXNUMSPHALBVALUES * self._taustep4sphalb,
+ MAXNUMSPHALBVALUES)
+ sphalb0 = _csalbr(tau_step)
+ taur = self._tau * np.exp(-self._height / SCALEHEIGHT)
+ rhoray, trdown, trup = _chand(self._phi, self._muv, self._mus, taur)
+ sphalb = sphalb0[(taur / self._taustep4sphalb + 0.5).astype(np.int32)]
+ Ttotrayu = ((2 / 3. + self._muv) + (2 / 3. - self._muv) * trup) / (4 / 3. + taur)
+ Ttotrayd = ((2 / 3. + self._mus) + (2 / 3. - self._mus) * trdown) / (4 / 3. + taur)
+
+ tH2O = self._get_th2o()
+ TtotraytH2O = Ttotrayu * Ttotrayd * tH2O
+
+ tO2 = self._get_to2()
+ tO3 = self._get_to3()
+ tOG = tO3 * tO2
+ return sphalb, rhoray, TtotraytH2O, tOG
+
+ def _get_to2(self):
+ return 1.0
+
+ def _get_to3(self):
+ raise NotImplementedError()
+
+ def _get_th2o(self):
+ raise NotImplementedError()
+
+
+class _ABIAtmosphereVariables(_AtmosphereVariables):
+ def __init__(self, G_O3, G_H2O, G_O2, *args):
+ super().__init__(*args)
+ self._G_O3 = G_O3
+ self._G_H2O = G_H2O
+ self._G_O2 = G_O2
+ self._taustep4sphalb = TAUSTEP4SPHALB_ABI
+
+ def _get_to2(self):
+ # NOTE: bh2o is actually ao2 for ABI
+ return np.exp(-self._G_O2 * self._bh2o)
+
+ def _get_to3(self):
+ return np.exp(-self._G_O3 * self._ao3) if self._ao3 != 0 else 1.0
+
+ def _get_th2o(self):
+ return np.exp(-self._G_H2O * self._ah2o) if self._ah2o != 0 else 1.0
+
+
+class _VIIRSAtmosphereVariables(_AtmosphereVariables):
+ def __init__(self, *args):
+ super().__init__(*args)
+ self._airmass = self._compute_airmass()
+
+ def _compute_airmass(self):
+ air_mass = 1.0 / self._mus + 1 / self._muv
+ air_mass[air_mass > MAXAIRMASS] = -1.0
+ return air_mass
+
+ def _get_to3(self):
+ if self._ao3 == 0:
+ return 1.0
+ return np.exp(-self._airmass * UO3_VIIRS * self._ao3)
+
+ def _get_th2o(self):
+ if self._bh2o == 0:
+ return 1.0
+ return np.exp(-(self._ah2o * ((self._airmass * UH2O_VIIRS) ** self._bh2o)))
+
+
+class _MODISAtmosphereVariables(_VIIRSAtmosphereVariables):
+ def _get_to3(self):
+ if self._ao3 == 0:
+ return 1.0
+ return np.exp(-self._airmass * UO3_MODIS * self._ao3)
+
+ def _get_th2o(self):
+ if self._bh2o == 0:
+ return 1.0
+ return np.exp(-np.exp(self._ah2o + self._bh2o * np.log(self._airmass * UH2O_MODIS)))
+
+
+def _csalbr(tau):
+ # Previously 3 functions csalbr fintexp1, fintexp3
+ a = [-.57721566, 0.99999193, -0.24991055, 0.05519968, -0.00976004,
+ 0.00107857]
+ # xx = a[0] + a[1] * tau + a[2] * tau**2 + a[3] * tau**3 + a[4] * tau**4 + a[5] * tau**5
+ # xx = np.polyval(a[::-1], tau)
+
+ # xx = a[0]
+ # xftau = 1.0
+ # for i in xrange(5):
+ # xftau = xftau*tau
+ # xx = xx + a[i] * xftau
+ fintexp1 = np.polyval(a[::-1], tau) - np.log(tau)
+ fintexp3 = (np.exp(-tau) * (1.0 - tau) + tau**2 * fintexp1) / 2.0
+
+ return (3.0 * tau - fintexp3 *
+ (4.0 + 2.0 * tau) + 2.0 * np.exp(-tau)) / (4.0 + 3.0 * tau)
+
+
+def _chand(phi, muv, mus, taur):
+ # FROM FUNCTION CHAND
+ # phi: azimuthal difference between sun and observation in degree
+ # (phi=0 in backscattering direction)
+ # mus: cosine of the sun zenith angle
+ # muv: cosine of the observation zenith angle
+ # taur: molecular optical depth
+ # rhoray: molecular path reflectance
+ # constant xdep: depolarization factor (0.0279)
+ # xfd = (1-xdep/(2-xdep)) / (1 + 2*xdep/(2-xdep)) = 2 * (1 - xdep) / (2 + xdep) = 0.958725775
+ # */
+ xfd = 0.958725775
+ xbeta2 = 0.5
+ # float pl[5];
+ # double fs01, fs02, fs0, fs1, fs2;
+ as0 = [0.33243832, 0.16285370, -0.30924818, -0.10324388, 0.11493334,
+ -6.777104e-02, 1.577425e-03, -1.240906e-02, 3.241678e-02,
+ -3.503695e-02]
+ as1 = [0.19666292, -5.439061e-02]
+ as2 = [0.14545937, -2.910845e-02]
+ # float phios, xcos1, xcos2, xcos3;
+ # float xph1, xph2, xph3, xitm1, xitm2;
+ # float xlntaur, xitot1, xitot2, xitot3;
+ # int i, ib;
+
+ xph1 = 1.0 + (3.0 * mus * mus - 1.0) * (3.0 * muv * muv - 1.0) * xfd / 8.0
+ xph2 = -xfd * xbeta2 * 1.5 * mus * muv * np.sqrt(
+ 1.0 - mus * mus) * np.sqrt(1.0 - muv * muv)
+ xph3 = xfd * xbeta2 * 0.375 * (1.0 - mus * mus) * (1.0 - muv * muv)
+
+ # pl[0] = 1.0
+ # pl[1] = mus + muv
+ # pl[2] = mus * muv
+ # pl[3] = mus * mus + muv * muv
+ # pl[4] = mus * mus * muv * muv
+
+ fs01 = as0[0] + (mus + muv) * as0[1] + (mus * muv) * as0[2] + (
+ mus * mus + muv * muv) * as0[3] + (mus * mus * muv * muv) * as0[4]
+ fs02 = as0[5] + (mus + muv) * as0[6] + (mus * muv) * as0[7] + (
+ mus * mus + muv * muv) * as0[8] + (mus * mus * muv * muv) * as0[9]
+ # for (i = 0; i < 5; i++) {
+ # fs01 += (double) (pl[i] * as0[i]);
+ # fs02 += (double) (pl[i] * as0[5 + i]);
+ # }
+
+ # for refl, (ah2o, bh2o, ao3, tau) in zip(reflectance_bands, coefficients):
+
+ # ib = _find_coefficient_index(center_wl)
+ # if ib is None:
+ # raise ValueError("Can't handle band with wavelength '{}'".format(center_wl))
+
+ xlntaur = np.log(taur)
+
+ fs0 = fs01 + fs02 * xlntaur
+ fs1 = as1[0] + xlntaur * as1[1]
+ fs2 = as2[0] + xlntaur * as2[1]
+ del xlntaur, fs01, fs02
+
+ trdown = np.exp(-taur / mus)
+ trup = np.exp(-taur / muv)
+
+ xitm1 = (1.0 - trdown * trup) / 4.0 / (mus + muv)
+ xitm2 = (1.0 - trdown) * (1.0 - trup)
+ xitot1 = xph1 * (xitm1 + xitm2 * fs0)
+ xitot2 = xph2 * (xitm1 + xitm2 * fs1)
+ xitot3 = xph3 * (xitm1 + xitm2 * fs2)
+ del xph1, xph2, xph3, xitm1, xitm2, fs0, fs1, fs2
+
+ phios = np.deg2rad(phi + 180.0)
+ xcos1 = 1.0
+ xcos2 = np.cos(phios)
+ xcos3 = np.cos(2.0 * phios)
+ del phios
+
+ rhoray = xitot1 * xcos1 + xitot2 * xcos2 * 2.0 + xitot3 * xcos3 * 2.0
+ return rhoray, trdown, trup
diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py
new file mode 100644
index 0000000000..302c0ceb39
--- /dev/null
+++ b/satpy/modifiers/angles.py
@@ -0,0 +1,537 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Utilties for getting various angles for a dataset.."""
+from __future__ import annotations
+
+import hashlib
+import os
+import shutil
+import warnings
+from datetime import datetime
+from functools import update_wrapper
+from glob import glob
+from typing import Any, Callable, Optional, Union
+
+import dask
+import numpy as np
+import xarray as xr
+from dask import array as da
+from pyorbital.astronomy import cos_zen as pyob_cos_zen
+from pyorbital.astronomy import get_alt_az
+from pyorbital.orbital import get_observer_look
+from pyresample.geometry import AreaDefinition, StackedAreaDefinition, SwathDefinition
+
+import satpy
+from satpy.utils import PerformanceWarning, get_satpos, ignore_invalid_float_warnings
+
+PRGeometry = Union[SwathDefinition, AreaDefinition, StackedAreaDefinition]
+
+# Arbitrary time used when computing sensor angles that is passed to
+# pyorbital's get_observer_look function.
+# The difference is on the order of 1e-10 at most as time changes so we force
+# it to a single time for easier caching. It is *only* used if caching.
+STATIC_EARTH_INERTIAL_DATETIME = datetime(2000, 1, 1, 12, 0, 0)
+DEFAULT_UNCACHE_TYPES = (SwathDefinition, xr.DataArray, da.Array)
+HASHABLE_GEOMETRIES = (AreaDefinition, StackedAreaDefinition)
+
+
+class ZarrCacheHelper:
+ """Helper for caching function results to on-disk zarr arrays.
+
+ It is recommended to use this class through the :func:`cache_to_zarr_if`
+ decorator rather than using it directly.
+
+ Currently the cache does not perform any limiting or removal of cache
+ content. That is left up to the user to manage. Caching is based on
+ arguments passed to the decorated function but will only be performed
+ if the arguments are of a certain type (see ``uncacheable_arg_types``).
+ The cache value to use is purely based on the hash value of all of the
+ provided arguments along with the "cache version" (see below).
+
+ Note that the zarr format requires regular chunking of data. That is,
+ chunks must be all the same size per dimension except for the last chunk.
+ To work around this limitation, this class will determine a good regular
+ chunking based on the existing chunking scheme, rechunk the input
+ arguments, and then rechunk the results before returning them to the user.
+ This rechunking is only done if caching is enabled.
+
+ Args:
+ func: Function that will be called to generate the value to cache.
+ cache_config_key: Name of the boolean ``satpy.config`` parameter to
+ use to determine if caching should be done.
+ uncacheable_arg_types: Types that if present in the passed arguments
+ should trigger caching to *not* happen. By default this includes
+ ``SwathDefinition``, ``xr.DataArray``, and ``da.Array`` objects.
+ sanitize_args_func: Optional function to call to sanitize provided
+ arguments before they are considered for caching. This can be used
+ to make arguments more "cacheable" by replacing them with similar
+ values that will result in more cache hits. Note that the sanitized
+ arguments are only passed to the underlying function if caching
+ will be performed, otherwise the original arguments are passed.
+ cache_version: Version number used to distinguish one version of a
+ decorated function from future versions.
+
+ Notes:
+ * Caching only supports dask array values.
+
+ * This helper allows for an additional ``cache_dir`` parameter to
+ override the use of the ``satpy.config`` ``cache_dir`` parameter.
+
+ Examples:
+ To use through the :func:`cache_to_zarr_if` decorator::
+
+ @cache_to_zarr_if("cache_my_stuff")
+ def generate_my_stuff(area_def: AreaDefinition, some_factor: int) -> da.Array:
+ # Generate
+ return my_dask_arr
+
+ To use the decorated function::
+
+ with satpy.config.set(cache_my_stuff=True):
+ my_stuff = generate_my_stuff(area_def, 5)
+
+ """
+
+ def __init__(self,
+ func: Callable,
+ cache_config_key: str,
+ uncacheable_arg_types=DEFAULT_UNCACHE_TYPES,
+ sanitize_args_func: Callable = None,
+ cache_version: int = 1,
+ ):
+ """Hold on to provided arguments for future use."""
+ self._func = func
+ self._cache_config_key = cache_config_key
+ self._uncacheable_arg_types = uncacheable_arg_types
+ self._sanitize_args_func = sanitize_args_func
+ self._cache_version = cache_version
+
+ def cache_clear(self, cache_dir: Optional[str] = None):
+ """Remove all on-disk files associated with this function.
+
+ Intended to mimic the :func:`functools.cache` behavior.
+ """
+ cache_dir = self._get_cache_dir_from_config(cache_dir)
+ zarr_pattern = self._zarr_pattern("*", cache_version="*").format("*")
+ for zarr_dir in glob(os.path.join(cache_dir, zarr_pattern)):
+ shutil.rmtree(zarr_dir, ignore_errors=True)
+
+ def _zarr_pattern(self, arg_hash, cache_version: Union[int, str] = None) -> str:
+ if cache_version is None:
+ cache_version = self._cache_version
+ return f"{self._func.__name__}_v{cache_version}" + "_{}_" + f"{arg_hash}.zarr"
+
+ def __call__(self, *args, cache_dir: Optional[str] = None) -> Any:
+ """Call the decorated function."""
+ new_args = self._sanitize_args_func(*args) if self._sanitize_args_func is not None else args
+ arg_hash = _hash_args(*new_args, unhashable_types=self._uncacheable_arg_types)
+ should_cache, cache_dir = self._get_should_cache_and_cache_dir(new_args, cache_dir)
+ zarr_fn = self._zarr_pattern(arg_hash)
+ zarr_format = os.path.join(cache_dir, zarr_fn)
+ zarr_paths = glob(zarr_format.format("*"))
+ if not should_cache or not zarr_paths:
+ # use sanitized arguments if we are caching, otherwise use original arguments
+ args_to_use = new_args if should_cache else args
+ res = self._func(*args_to_use)
+ if should_cache and not zarr_paths:
+ self._warn_if_irregular_input_chunks(args, args_to_use)
+ self._cache_results(res, zarr_format)
+ # if we did any caching, let's load from the zarr files
+ if should_cache:
+ # re-calculate the cached paths
+ zarr_paths = sorted(glob(zarr_format.format("*")))
+ if not zarr_paths:
+ raise RuntimeError("Data was cached to disk but no files were found")
+ new_chunks = _get_output_chunks_from_func_arguments(args)
+ res = tuple(da.from_zarr(zarr_path, chunks=new_chunks) for zarr_path in zarr_paths)
+ return res
+
+ def _get_should_cache_and_cache_dir(self, args, cache_dir: Optional[str]) -> tuple[bool, str]:
+ should_cache: bool = satpy.config.get(self._cache_config_key, False)
+ can_cache = not any(isinstance(arg, self._uncacheable_arg_types) for arg in args)
+ should_cache = should_cache and can_cache
+ cache_dir = self._get_cache_dir_from_config(cache_dir)
+ return should_cache, cache_dir
+
+ @staticmethod
+ def _get_cache_dir_from_config(cache_dir: Optional[str]) -> str:
+ cache_dir = cache_dir or satpy.config.get("cache_dir")
+ if cache_dir is None:
+ raise RuntimeError("Can't use zarr caching. No 'cache_dir' configured.")
+ return cache_dir
+
+ @staticmethod
+ def _warn_if_irregular_input_chunks(args, modified_args):
+ arg_chunks = _get_output_chunks_from_func_arguments(args)
+ new_chunks = _get_output_chunks_from_func_arguments(modified_args)
+ if _chunks_are_irregular(arg_chunks):
+ warnings.warn(
+ "Calling cached function with irregular dask chunks. The data "
+ "has been rechunked for caching, but this is not optimal for "
+ "future calculations. "
+ f"Original chunks: {arg_chunks}; New chunks: {new_chunks}",
+ PerformanceWarning)
+
+ def _cache_results(self, res, zarr_format):
+ os.makedirs(os.path.dirname(zarr_format), exist_ok=True)
+ new_res = []
+ for idx, sub_res in enumerate(res):
+ if not isinstance(sub_res, da.Array):
+ raise ValueError("Zarr caching currently only supports dask "
+ f"arrays. Got {type(sub_res)}")
+ zarr_path = zarr_format.format(idx)
+ # See https://github.com/dask/dask/issues/8380
+ with dask.config.set({"optimization.fuse.active": False}):
+ new_sub_res = sub_res.to_zarr(zarr_path, compute=False)
+ new_res.append(new_sub_res)
+ # actually compute the storage to zarr
+ da.compute(new_res)
+
+
+def _get_output_chunks_from_func_arguments(args):
+ """Determine what the desired output chunks are.
+
+ It is assumed a tuple of tuples of integers is defining chunk sizes. If
+ a tuple like this is not found then arguments are checked for array-like
+ objects with a ``.chunks`` attribute.
+
+ """
+ chunked_args = [arg for arg in args if hasattr(arg, "chunks")]
+ tuple_args = [arg for arg in args if _is_chunk_tuple(arg)]
+ if not tuple_args and not chunked_args:
+ raise RuntimeError("Cannot determine desired output chunksize for cached function.")
+ new_chunks = tuple_args[-1] if tuple_args else chunked_args[0].chunks
+ return new_chunks
+
+
+def cache_to_zarr_if(
+ cache_config_key: str,
+ uncacheable_arg_types=DEFAULT_UNCACHE_TYPES,
+ sanitize_args_func: Callable = None,
+) -> Callable:
+ """Decorate a function and cache the results as a zarr array on disk.
+
+ This only happens if the ``satpy.config`` boolean value for the provided
+ key is ``True`` as well as some other conditions. See
+ :class:`ZarrCacheHelper` for more information. Most importantly, this
+ decorator does not limit how many items can be cached and does not clear
+ out old entries. It is up to the user to manage the size of the cache.
+
+ """
+ def _decorator(func: Callable) -> Callable:
+ zarr_cacher = ZarrCacheHelper(func,
+ cache_config_key,
+ uncacheable_arg_types,
+ sanitize_args_func)
+ wrapper = update_wrapper(zarr_cacher, func)
+ return wrapper
+ return _decorator
+
+
+def _hash_args(*args, unhashable_types=DEFAULT_UNCACHE_TYPES):
+ import json
+ hashable_args = []
+ for arg in args:
+ if isinstance(arg, unhashable_types):
+ continue
+ if isinstance(arg, HASHABLE_GEOMETRIES):
+ arg = hash(arg)
+ elif isinstance(arg, datetime):
+ arg = arg.isoformat(" ")
+ hashable_args.append(arg)
+ arg_hash = hashlib.sha1() # nosec
+ arg_hash.update(json.dumps(tuple(hashable_args)).encode('utf8'))
+ return arg_hash.hexdigest()
+
+
+def _sanitize_observer_look_args(*args):
+ new_args = []
+ for arg in args:
+ if isinstance(arg, datetime):
+ new_args.append(STATIC_EARTH_INERTIAL_DATETIME)
+ elif isinstance(arg, (float, np.float64, np.float32)):
+ # round floating point numbers to nearest tenth
+ new_args.append(round(arg, 1))
+ elif _is_chunk_tuple(arg) and _chunks_are_irregular(arg):
+ new_chunks = _regular_chunks_from_irregular_chunks(arg)
+ new_args.append(new_chunks)
+ else:
+ new_args.append(arg)
+ return new_args
+
+
+def _sanitize_args_with_chunks(*args):
+ new_args = []
+ for arg in args:
+ if _is_chunk_tuple(arg) and _chunks_are_irregular(arg):
+ new_chunks = _regular_chunks_from_irregular_chunks(arg)
+ new_args.append(new_chunks)
+ else:
+ new_args.append(arg)
+ return new_args
+
+
+def _is_chunk_tuple(some_obj: Any) -> bool:
+ if not isinstance(some_obj, tuple):
+ return False
+ if not all(isinstance(sub_obj, tuple) for sub_obj in some_obj):
+ return False
+ sub_elements = [sub_obj_elem for sub_obj in some_obj for sub_obj_elem in sub_obj]
+ return all(isinstance(sub_obj_elem, int) for sub_obj_elem in sub_elements)
+
+
+def _regular_chunks_from_irregular_chunks(
+ old_chunks: tuple[tuple[int, ...], ...]
+) -> tuple[tuple[int, ...], ...]:
+ shape = tuple(sum(dim_chunks) for dim_chunks in old_chunks)
+ new_dim_chunks = tuple(max(dim_chunks) for dim_chunks in old_chunks)
+ return da.core.normalize_chunks(new_dim_chunks, shape=shape)
+
+
+def _chunks_are_irregular(chunks_tuple: tuple) -> bool:
+ """Determine if an array is irregularly chunked.
+
+ Zarr does not support saving data in irregular chunks. Regular chunking
+ is when all chunks are the same size (except for the last one).
+
+ """
+ return any(len(set(chunks[:-1])) > 1 for chunks in chunks_tuple)
+
+
+def _geo_dask_to_data_array(arr: da.Array) -> xr.DataArray:
+ return xr.DataArray(arr, dims=('y', 'x'))
+
+
+def compute_relative_azimuth(sat_azi: xr.DataArray, sun_azi: xr.DataArray) -> xr.DataArray:
+ """Compute the relative azimuth angle.
+
+ Args:
+ sat_azi: DataArray for the satellite azimuth angles, typically in 0-360 degree range.
+ sun_azi: DataArray for the solar azimuth angles, should be in same range as sat_azi.
+ Returns:
+ A DataArray containing the relative azimuth angle in the 0-180 degree range.
+
+ NOTE: Relative azimuth is defined such that:
+ Relative azimuth is 0 when sun and satellite are aligned on one side of a pixel (back scatter).
+ Relative azimuth is 180 when sun and satellite are directly opposite each other (forward scatter).
+ """
+ ssadiff = np.absolute(sun_azi - sat_azi)
+ ssadiff = np.minimum(ssadiff, 360 - ssadiff)
+
+ return ssadiff
+
+
+def get_angles(data_arr: xr.DataArray) -> tuple[xr.DataArray, xr.DataArray, xr.DataArray, xr.DataArray]:
+ """Get sun and satellite azimuth and zenith angles.
+
+ Note that this function can benefit from the ``satpy.config`` parameters
+ :ref:`cache_lonlats ` and
+ :ref:`cache_sensor_angles `
+ being set to ``True``.
+
+ Args:
+ data_arr: DataArray to get angles for. Information extracted from this
+ object are ``.attrs["area"]``,``.attrs["start_time"]``, and
+ ``.attrs["orbital_parameters"]``. See :func:`satpy.utils.get_satpos`
+ and :ref:`dataset_metadata` for more information.
+ Additionally, the dask array chunk size is used when generating
+ new arrays. The actual data of the object is not used.
+
+ Returns:
+ Four DataArrays representing sensor azimuth angle, sensor zenith angle,
+ solar azimuth angle, and solar zenith angle. All values are in degrees.
+ Sensor angles are provided in the [0, 360] degree range.
+ Solar angles are provided in the [-180, 180] degree range.
+
+ """
+ sata, satz = _get_sensor_angles(data_arr)
+ suna, sunz = _get_sun_angles(data_arr)
+ return sata, satz, suna, sunz
+
+
+def get_satellite_zenith_angle(data_arr: xr.DataArray) -> xr.DataArray:
+ """Generate satellite zenith angle for the provided data.
+
+ Note that this function can benefit from the ``satpy.config`` parameters
+ :ref:`cache_lonlats ` and
+ :ref:`cache_sensor_angles `
+ being set to ``True``. Values are in degrees.
+
+ """
+ satz = _get_sensor_angles(data_arr)[1]
+ return satz
+
+
+def get_cos_sza(data_arr: xr.DataArray) -> xr.DataArray:
+ """Generate the cosine of the solar zenith angle for the provided data.
+
+ Returns:
+ DataArray with the same shape as ``data_arr``.
+
+ """
+ chunks = _geo_chunks_from_data_arr(data_arr)
+ lons, lats = _get_valid_lonlats(data_arr.attrs["area"], chunks)
+ cos_sza = _get_cos_sza(data_arr.attrs["start_time"], lons, lats)
+ return _geo_dask_to_data_array(cos_sza)
+
+
+@cache_to_zarr_if("cache_lonlats", sanitize_args_func=_sanitize_args_with_chunks)
+def _get_valid_lonlats(area: PRGeometry, chunks: Union[int, str, tuple] = "auto") -> tuple[da.Array, da.Array]:
+ with ignore_invalid_float_warnings():
+ lons, lats = area.get_lonlats(chunks=chunks)
+ lons = da.where(lons >= 1e30, np.nan, lons)
+ lats = da.where(lats >= 1e30, np.nan, lats)
+ return lons, lats
+
+
+def _get_sun_angles(data_arr: xr.DataArray) -> tuple[xr.DataArray, xr.DataArray]:
+ chunks = _geo_chunks_from_data_arr(data_arr)
+ lons, lats = _get_valid_lonlats(data_arr.attrs["area"], chunks)
+ suna = da.map_blocks(_get_sun_azimuth_ndarray, lons, lats,
+ data_arr.attrs["start_time"],
+ dtype=lons.dtype, meta=np.array((), dtype=lons.dtype),
+ chunks=lons.chunks)
+ cos_sza = _get_cos_sza(data_arr.attrs["start_time"], lons, lats)
+ sunz = np.rad2deg(np.arccos(cos_sza))
+ suna = _geo_dask_to_data_array(suna)
+ sunz = _geo_dask_to_data_array(sunz)
+ return suna, sunz
+
+
+def _get_cos_sza(utc_time, lons, lats):
+ cos_sza = da.map_blocks(_cos_zen_ndarray,
+ lons, lats, utc_time,
+ meta=np.array((), dtype=lons.dtype),
+ dtype=lons.dtype,
+ chunks=lons.chunks)
+ return cos_sza
+
+
+def _cos_zen_ndarray(lons, lats, utc_time):
+ with ignore_invalid_float_warnings():
+ return pyob_cos_zen(utc_time, lons, lats)
+
+
+def _get_sun_azimuth_ndarray(lons: np.ndarray, lats: np.ndarray, start_time: datetime) -> np.ndarray:
+ with ignore_invalid_float_warnings():
+ suna = get_alt_az(start_time, lons, lats)[1]
+ suna = np.rad2deg(suna)
+
+ # The get_alt_az function returns values in the range -180 to 180 degrees.
+ # Satpy expects values in the 0 - 360 range, which is what is returned for the
+ # satellite azimuth angles.
+ # Here this is corrected so both sun and sat azimuths are in the same range.
+ suna = suna % 360.
+ return suna
+
+
+def _get_sensor_angles(data_arr: xr.DataArray) -> tuple[xr.DataArray, xr.DataArray]:
+ preference = satpy.config.get('sensor_angles_position_preference', 'actual')
+ sat_lon, sat_lat, sat_alt = get_satpos(data_arr, preference=preference)
+ area_def = data_arr.attrs["area"]
+ chunks = _geo_chunks_from_data_arr(data_arr)
+ sata, satz = _get_sensor_angles_from_sat_pos(sat_lon, sat_lat, sat_alt,
+ data_arr.attrs["start_time"],
+ area_def, chunks)
+ sata = _geo_dask_to_data_array(sata)
+ satz = _geo_dask_to_data_array(satz)
+ return sata, satz
+
+
+def _geo_chunks_from_data_arr(data_arr: xr.DataArray) -> tuple:
+ x_dim_index = _dim_index_with_default(data_arr.dims, "x", -1)
+ y_dim_index = _dim_index_with_default(data_arr.dims, "y", -2)
+ chunks = (data_arr.chunks[y_dim_index], data_arr.chunks[x_dim_index])
+ return chunks
+
+
+def _dim_index_with_default(dims: tuple, dim_name: str, default: int) -> int:
+ try:
+ return dims.index(dim_name)
+ except ValueError:
+ return default
+
+
+@cache_to_zarr_if("cache_sensor_angles", sanitize_args_func=_sanitize_observer_look_args)
+def _get_sensor_angles_from_sat_pos(sat_lon, sat_lat, sat_alt, start_time, area_def, chunks):
+ lons, lats = _get_valid_lonlats(area_def, chunks)
+ res = da.map_blocks(_get_sensor_angles_ndarray, lons, lats, start_time, sat_lon, sat_lat, sat_alt,
+ dtype=lons.dtype, meta=np.array((), dtype=lons.dtype), new_axis=[0],
+ chunks=(2,) + lons.chunks)
+ return res[0], res[1]
+
+
+def _get_sensor_angles_ndarray(lons, lats, start_time, sat_lon, sat_lat, sat_alt) -> np.ndarray:
+ with ignore_invalid_float_warnings():
+ sata, satel = get_observer_look(
+ sat_lon,
+ sat_lat,
+ sat_alt / 1000.0, # km
+ start_time,
+ lons, lats, 0)
+ satz = 90 - satel
+ return np.stack([sata, satz])
+
+
+def sunzen_corr_cos(data: da.Array,
+ cos_zen: da.Array,
+ limit: float = 88.,
+ max_sza: Optional[float] = 95.) -> da.Array:
+ """Perform Sun zenith angle correction.
+
+ The correction is based on the provided cosine of the zenith
+ angle (``cos_zen``). The correction is limited
+ to ``limit`` degrees (default: 88.0 degrees). For larger zenith
+ angles, the correction is the same as at the ``limit`` if ``max_sza``
+ is `None`. The default behavior is to gradually reduce the correction
+ past ``limit`` degrees up to ``max_sza`` where the correction becomes
+ 0. Both ``data`` and ``cos_zen`` should be 2D arrays of the same shape.
+
+ """
+ return da.map_blocks(_sunzen_corr_cos_ndarray,
+ data, cos_zen, limit, max_sza,
+ meta=np.array((), dtype=data.dtype),
+ chunks=data.chunks)
+
+
+def _sunzen_corr_cos_ndarray(data: np.ndarray,
+ cos_zen: np.ndarray,
+ limit: float,
+ max_sza: Optional[float]) -> np.ndarray:
+ # Convert the zenith angle limit to cosine of zenith angle
+ limit_rad = np.deg2rad(limit)
+ limit_cos = np.cos(limit_rad)
+ max_sza_rad = np.deg2rad(max_sza) if max_sza is not None else max_sza
+
+ # Cosine correction
+ corr = 1. / cos_zen
+ if max_sza is not None:
+ # gradually fall off for larger zenith angle
+ grad_factor = (np.arccos(cos_zen) - limit_rad) / (max_sza_rad - limit_rad)
+ # invert the factor so maximum correction is done at `limit` and falls off later
+ with np.errstate(invalid='ignore'): # we expect space pixels to be invalid
+ grad_factor = 1. - np.log(grad_factor + 1) / np.log(2)
+ # make sure we don't make anything negative
+ grad_factor = grad_factor.clip(0.)
+ else:
+ # Use constant value (the limit) for larger zenith angles
+ grad_factor = 1.
+ corr = np.where(cos_zen > limit_cos, corr, grad_factor / limit_cos)
+ # Force "night" pixels to 0 (where SZA is invalid)
+ corr[np.isnan(cos_zen)] = 0
+ return data * corr
diff --git a/satpy/modifiers/atmosphere.py b/satpy/modifiers/atmosphere.py
index d49949508d..25d6104c64 100644
--- a/satpy/modifiers/atmosphere.py
+++ b/satpy/modifiers/atmosphere.py
@@ -18,15 +18,14 @@
"""Modifiers related to atmospheric corrections or adjustments."""
import logging
-from weakref import WeakValueDictionary
-import numpy as np
import dask.array as da
+import numpy as np
import xarray as xr
from satpy.modifiers import ModifierBase
from satpy.modifiers._crefl import ReflectanceCorrector # noqa
-from satpy.utils import get_satpos
+from satpy.modifiers.angles import compute_relative_azimuth, get_angles, get_satellite_zenith_angle
logger = logging.getLogger(__name__)
@@ -34,30 +33,6 @@
class PSPRayleighReflectance(ModifierBase):
"""Pyspectral-based rayleigh corrector for visible channels."""
- _rayleigh_cache = WeakValueDictionary()
-
- def get_angles(self, vis):
- """Get the sun and satellite angles from the current dataarray."""
- from pyorbital.astronomy import get_alt_az, sun_zenith_angle
- from pyorbital.orbital import get_observer_look
-
- lons, lats = vis.attrs['area'].get_lonlats(chunks=vis.data.chunks)
- lons = da.where(lons >= 1e30, np.nan, lons)
- lats = da.where(lats >= 1e30, np.nan, lats)
- sunalt, suna = get_alt_az(vis.attrs['start_time'], lons, lats)
- suna = np.rad2deg(suna)
- sunz = sun_zenith_angle(vis.attrs['start_time'], lons, lats)
-
- sat_lon, sat_lat, sat_alt = get_satpos(vis)
- sata, satel = get_observer_look(
- sat_lon,
- sat_lat,
- sat_alt / 1000.0, # km
- vis.attrs['start_time'],
- lons, lats, 0)
- satz = 90 - satel
- return sata, satz, suna, sunz
-
def __call__(self, projectables, optional_datasets=None, **info):
"""Get the corrected reflectance when removing Rayleigh scattering.
@@ -66,39 +41,31 @@ def __call__(self, projectables, optional_datasets=None, **info):
from pyspectral.rayleigh import Rayleigh
if not optional_datasets or len(optional_datasets) != 4:
vis, red = self.match_data_arrays(projectables)
- sata, satz, suna, sunz = self.get_angles(vis)
- red.data = da.rechunk(red.data, vis.data.chunks)
+ sata, satz, suna, sunz = get_angles(vis)
else:
vis, red, sata, satz, suna, sunz = self.match_data_arrays(
projectables + optional_datasets)
- sata, satz, suna, sunz = optional_datasets
- # get the dask array underneath
- sata = sata.data
- satz = satz.data
- suna = suna.data
- sunz = sunz.data
-
- # First make sure the two azimuth angles are in the range 0-360:
- sata = sata % 360.
- suna = suna % 360.
- ssadiff = da.absolute(suna - sata)
- ssadiff = da.minimum(ssadiff, 360 - ssadiff)
+ # First make sure the two azimuth angles are in the range 0-360:
+ sata = sata % 360.
+ suna = suna % 360.
+
+ # get the dask array underneath
+ sata = sata.data
+ satz = satz.data
+ suna = suna.data
+ sunz = sunz.data
+
+ ssadiff = compute_relative_azimuth(sata, suna)
del sata, suna
atmosphere = self.attrs.get('atmosphere', 'us-standard')
aerosol_type = self.attrs.get('aerosol_type', 'marine_clean_aerosol')
- rayleigh_key = (vis.attrs['platform_name'],
- vis.attrs['sensor'], atmosphere, aerosol_type)
logger.info("Removing Rayleigh scattering with atmosphere '%s' and "
"aerosol type '%s' for '%s'",
atmosphere, aerosol_type, vis.attrs['name'])
- if rayleigh_key not in self._rayleigh_cache:
- corrector = Rayleigh(vis.attrs['platform_name'], vis.attrs['sensor'],
- atmosphere=atmosphere,
- aerosol_type=aerosol_type)
- self._rayleigh_cache[rayleigh_key] = corrector
- else:
- corrector = self._rayleigh_cache[rayleigh_key]
+ corrector = Rayleigh(vis.attrs['platform_name'], vis.attrs['sensor'],
+ atmosphere=atmosphere,
+ aerosol_type=aerosol_type)
try:
refl_cor_band = corrector.get_reflectance(sunz, satz, ssadiff,
@@ -116,6 +83,14 @@ def __call__(self, projectables, optional_datasets=None, **info):
return proj
+def _call_mapped_correction(satz, band_data, corrector, band_name):
+ # need to convert to masked array
+ orig_dtype = band_data.dtype
+ band_data = np.ma.masked_where(np.isnan(band_data), band_data)
+ res = corrector.get_correction(satz, band_name, band_data)
+ return res.filled(np.nan).astype(orig_dtype, copy=False)
+
+
class PSPAtmosphericalCorrection(ModifierBase):
"""Correct for atmospheric effects."""
@@ -131,26 +106,17 @@ def __call__(self, projectables, optional_datasets=None, **info):
if optional_datasets:
satz = optional_datasets[0]
else:
- from pyorbital.orbital import get_observer_look
- lons, lats = band.attrs['area'].get_lonlats(chunks=band.data.chunks)
- sat_lon, sat_lat, sat_alt = get_satpos(band)
- try:
- dummy, satel = get_observer_look(sat_lon,
- sat_lat,
- sat_alt / 1000.0, # km
- band.attrs['start_time'],
- lons, lats, 0)
- except KeyError:
- raise KeyError(
- 'Band info is missing some meta data!')
- satz = 90 - satel
- del satel
+ satz = get_satellite_zenith_angle(band)
+ satz = satz.data # get dask array underneath
logger.info('Correction for limb cooling')
corrector = AtmosphericalCorrection(band.attrs['platform_name'],
band.attrs['sensor'])
- atm_corr = corrector.get_correction(satz, band.attrs['name'], band)
+ atm_corr = da.map_blocks(_call_mapped_correction, satz, band.data,
+ corrector=corrector,
+ band_name=band.attrs['name'],
+ meta=np.array((), dtype=band.dtype))
proj = xr.DataArray(atm_corr, attrs=band.attrs,
dims=band.dims, coords=band.coords)
self.apply_modifier_info(band, proj)
diff --git a/satpy/modifiers/geometry.py b/satpy/modifiers/geometry.py
index 00b42d9843..ecd83f80e5 100644
--- a/satpy/modifiers/geometry.py
+++ b/satpy/modifiers/geometry.py
@@ -17,15 +17,15 @@
# satpy. If not, see .
"""Modifier classes for corrections based on sun and other angles."""
+from __future__ import annotations
+
import logging
-import time
-from weakref import WeakValueDictionary
import numpy as np
-import xarray as xr
from satpy.modifiers import ModifierBase
-from satpy.utils import sunzen_corr_cos, atmospheric_path_length_correction
+from satpy.modifiers.angles import sunzen_corr_cos
+from satpy.utils import atmospheric_path_length_correction
logger = logging.getLogger(__name__)
@@ -33,8 +33,6 @@
class SunZenithCorrectorBase(ModifierBase):
"""Base class for sun zenith correction modifiers."""
- coszen = WeakValueDictionary()
-
def __init__(self, max_sza=95.0, **kwargs):
"""Collect custom configuration values.
@@ -52,38 +50,24 @@ def __call__(self, projectables, **info):
projectables = self.match_data_arrays(list(projectables) + list(info.get('optional_datasets', [])))
vis = projectables[0]
if vis.attrs.get("sunz_corrected"):
- logger.debug("Sun zen correction already applied")
+ logger.debug("Sun zenith correction already applied")
return vis
- area_name = hash(vis.attrs['area'])
- key = (vis.attrs["start_time"], area_name)
- tic = time.time()
logger.debug("Applying sun zen correction")
- coszen = self.coszen.get(key)
- if coszen is None and not info.get('optional_datasets'):
- # we were not given SZA, generate SZA then calculate cos(SZA)
- from pyorbital.astronomy import cos_zen
+ if not info.get('optional_datasets'):
+ # we were not given SZA, generate cos(SZA)
logger.debug("Computing sun zenith angles.")
- lons, lats = vis.attrs["area"].get_lonlats(chunks=vis.data.chunks)
-
- coords = {}
- if 'y' in vis.coords and 'x' in vis.coords:
- coords['y'] = vis['y']
- coords['x'] = vis['x']
- coszen = xr.DataArray(cos_zen(vis.attrs["start_time"], lons, lats),
- dims=['y', 'x'], coords=coords)
+ from .angles import get_cos_sza
+ coszen = get_cos_sza(vis)
if self.max_sza is not None:
coszen = coszen.where(coszen >= self.max_sza_cos)
- self.coszen[key] = coszen
- elif coszen is None:
+ else:
# we were given the SZA, calculate the cos(SZA)
coszen = np.cos(np.deg2rad(projectables[1]))
- self.coszen[key] = coszen
proj = self._apply_correction(vis, coszen)
proj.attrs = vis.attrs.copy()
self.apply_modifier_info(vis, proj)
- logger.debug("Sun-zenith correction applied. Computation time: %5.1f (sec)", time.time() - tic)
return proj
def _apply_correction(self, proj, coszen):
@@ -105,7 +89,7 @@ class SunZenithCorrector(SunZenithCorrectorBase):
.. code-block:: yaml
sunz_corrected:
- compositor: !!python/name:satpy.composites.SunZenithCorrector
+ modifier: !!python/name:satpy.modifiers.SunZenithCorrector
max_sza: !!null
optional_prerequisites:
- solar_zenith_angle
@@ -128,7 +112,9 @@ def __init__(self, correction_limit=88., **kwargs):
def _apply_correction(self, proj, coszen):
logger.debug("Apply the standard sun-zenith correction [1/cos(sunz)]")
- return sunzen_corr_cos(proj, coszen, limit=self.correction_limit, max_sza=self.max_sza)
+ res = proj.copy()
+ res.data = sunzen_corr_cos(proj.data, coszen.data, limit=self.correction_limit, max_sza=self.max_sza)
+ return res
class EffectiveSolarPathLengthCorrector(SunZenithCorrectorBase):
@@ -149,7 +135,7 @@ class EffectiveSolarPathLengthCorrector(SunZenithCorrectorBase):
.. code-block:: yaml
effective_solar_pathlength_corrected:
- compositor: !!python/name:satpy.composites.EffectiveSolarPathLengthCorrector
+ modifier: !!python/name:satpy.modifiers.EffectiveSolarPathLengthCorrector
max_sza: !!null
optional_prerequisites:
- solar_zenith_angle
diff --git a/satpy/modifiers/parallax.py b/satpy/modifiers/parallax.py
new file mode 100644
index 0000000000..9b399be41d
--- /dev/null
+++ b/satpy/modifiers/parallax.py
@@ -0,0 +1,551 @@
+# Copyright (c) 2021-2022 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Parallax correction.
+
+Routines related to parallax correction using datasets involving height, such
+as cloud top height.
+
+The geolocation of (geostationary) satellite imagery is calculated by
+agencies or in satpy readers with the assumption of a clear view from
+the satellite to the geoid. When a cloud blocks the view of the Earth
+surface or the surface is above sea level, the geolocation is not accurate
+for the cloud or mountain top. This module contains routines to correct
+imagery such that pixels are shifted or interpolated to correct for this
+parallax effect.
+
+Parallax correction is currently only supported for (cloud top) height
+that arrives on an :class:`~pyresample.geometry.AreaDefinition`, such
+as is standard for geostationary satellites. Parallax correction with
+data described by a :class:`~pyresample.geometry.SwathDefinition`,
+such as is common for polar satellites, is not (yet) supported.
+
+See also the :doc:`../modifiers` page in the documentation for an introduction to
+parallax correction as a modifier in Satpy.
+"""
+
+import datetime
+import inspect
+import logging
+import warnings
+
+import dask.array as da
+import numpy as np
+import xarray as xr
+from pyorbital.orbital import A as EARTH_RADIUS
+from pyorbital.orbital import get_observer_look
+from pyproj import Geod
+from pyresample.bucket import BucketResampler
+from pyresample.geometry import SwathDefinition
+
+from satpy.modifiers import ModifierBase
+from satpy.resample import resample_dataset
+from satpy.utils import get_satpos, lonlat2xyz, xyz2lonlat
+
+logger = logging.getLogger(__name__)
+
+
+class MissingHeightError(ValueError):
+ """Raised when heights do not overlap with area to be corrected."""
+
+
+class IncompleteHeightWarning(UserWarning):
+ """Raised when heights only partially overlap with area to be corrected."""
+
+
+def get_parallax_corrected_lonlats(sat_lon, sat_lat, sat_alt, lon, lat, height):
+ """Calculate parallax corrected lon/lats.
+
+ Satellite geolocation generally assumes an unobstructed view of a smooth
+ Earth surface. In reality, this view may be obstructed by clouds or
+ mountains.
+
+ If the view of a pixel at location (lat, lon) is blocked by a cloud
+ at height h, this function calculates the (lat, lon) coordinates
+ of the cloud above/in front of the invisible surface.
+
+ For scenes that are only partly cloudy, the user might set the cloud top
+ height for clear-sky pixels to NaN. This function will return a corrected
+ lat/lon as NaN as well. The user can use the original lat/lon for those
+ pixels or use the higher level :class:`ParallaxCorrection` class.
+
+ This function assumes a spherical Earth.
+
+ .. note::
+
+ Be careful with units! This code expects ``sat_alt`` and
+ ``height`` to be in meter above the Earth's surface. You may
+ have to convert your input correspondingly. Cloud Top Height
+ is usually reported in meters above the Earth's surface, rarely
+ in km. Satellite altitude may be reported in either m or km, but
+ orbital parameters are usually in relation to the Earth's centre.
+ The Earth radius from pyresample is reported in km.
+
+ Args:
+ sat_lon (number): Satellite longitude in geodetic coordinates [degrees]
+ sat_lat (number): Satellite latitude in geodetic coordinates [degrees]
+ sat_alt (number): Satellite altitude above the Earth surface [m]
+ lon (array or number): Longitudes of pixel or pixels to be corrected,
+ in geodetic coordinates [degrees]
+ lat (array or number): Latitudes of pixel/pixels to be corrected, in
+ geodetic coordinates [degrees]
+ height (array or number): Heights of pixels on which the correction
+ will be based. Typically this is the cloud top height. [m]
+
+ Returns:
+ tuple[float, float]: Corrected geolocation
+ Corrected geolocation ``(lon, lat)`` in geodetic coordinates for
+ the pixel(s) to be corrected. [degrees]
+ """
+ elevation = _get_satellite_elevation(sat_lon, sat_lat, sat_alt, lon, lat)
+ parallax_distance = _calculate_slant_cloud_distance(height, elevation)
+ shifted_xyz = _get_parallax_shift_xyz(
+ sat_lon, sat_lat, sat_alt, lon, lat, parallax_distance)
+
+ return xyz2lonlat(
+ shifted_xyz[..., 0], shifted_xyz[..., 1], shifted_xyz[..., 2])
+
+
+def get_surface_parallax_displacement(
+ sat_lon, sat_lat, sat_alt, lon, lat, height):
+ """Calculate surface parallax displacement.
+
+ Calculate the displacement due to parallax error. Input parameters are
+ identical to :func:`get_parallax_corrected_lonlats`.
+
+ Returns:
+ number or array: parallax displacement in meter
+ """
+ (corr_lon, corr_lat) = get_parallax_corrected_lonlats(sat_lon, sat_lat, sat_alt, lon, lat, height)
+ # Get parallax displacement
+ geod = Geod(ellps="sphere")
+ _, _, parallax_dist = geod.inv(corr_lon, corr_lat, lon, lat)
+ return parallax_dist
+
+
+def _get_parallax_shift_xyz(sat_lon, sat_lat, sat_alt, lon, lat, parallax_distance):
+ """Calculate the parallax shift in cartesian coordinates.
+
+ From satellite position and cloud position, get the parallax shift in
+ cartesian coordinates:
+
+ Args:
+ sat_lon (number): Satellite longitude in geodetic coordinates [degrees]
+ sat_lat (number): Satellite latitude in geodetic coordinates [degrees]
+ sat_alt (number): Satellite altitude above the Earth surface [m]
+ lon (array or number): Longitudes of pixel or pixels to be corrected,
+ in geodetic coordinates [degrees]
+ lat (array or number): Latitudes of pixel/pixels to be corrected, in
+ geodetic coordinates [degrees]
+ parallax_distance (array or number): Cloud to ground distance with parallax
+ effect [m].
+
+ Returns:
+ Parallax shift in cartesian coordinates in meter.
+ """
+ sat_xyz = np.hstack(lonlat2xyz(sat_lon, sat_lat)) * sat_alt
+ cth_xyz = np.stack(lonlat2xyz(lon, lat), axis=-1) * EARTH_RADIUS*1e3 # km → m
+ delta_xyz = cth_xyz - sat_xyz
+ sat_distance = np.sqrt((delta_xyz*delta_xyz).sum(axis=-1))
+ dist_shape = delta_xyz.shape[:-1] + (1,) # force correct array broadcasting
+ return cth_xyz - delta_xyz*(parallax_distance/sat_distance).reshape(dist_shape)
+
+
+def _get_satellite_elevation(sat_lon, sat_lat, sat_alt, lon, lat):
+ """Get satellite elevation.
+
+ Get the satellite elevation from satellite lon/lat/alt for positions
+ lon/lat.
+ """
+ placeholder_date = datetime.datetime(2000, 1, 1) # no impact on get_observer_look?
+ (_, elevation) = get_observer_look(
+ sat_lon, sat_lat, sat_alt/1e3, # m → km (wanted by get_observer_look)
+ placeholder_date, lon, lat, 0)
+ return elevation
+
+
+def _calculate_slant_cloud_distance(height, elevation):
+ """Calculate slant cloud to ground distance.
+
+ From (cloud top) height and satellite elevation, calculate the
+ slant cloud-to-ground distance along the line of sight of the satellite.
+ """
+ if np.isscalar(elevation) and elevation == 0:
+ raise NotImplementedError(
+ "Parallax correction not implemented for "
+ "satellite elevation 0")
+ if np.isscalar(elevation) and elevation < 0:
+ raise ValueError(
+ "Satellite is below the horizon. Cannot calculate parallax "
+ "correction.")
+ return height / np.sin(np.deg2rad(elevation))
+
+
+class ParallaxCorrection:
+ """Parallax correction calculations.
+
+ This class contains higher-level functionality to wrap the parallax
+ correction calculations in :func:`get_parallax_corrected_lonlats`. The class is
+ initialised using a base area, which is the area for which a corrected
+ geolocation will be calculated. The resulting object is a callable.
+ Calling the object with an array of (cloud top) heights returns a
+ :class:`~pyresample.geometry.SwathDefinition` describing the new ,
+ corrected geolocation. The cloud top height should cover at least the
+ area for which the corrected geolocation will be calculated.
+
+ Note that the ``ctth`` dataset must contain satellite location
+ metadata, such as set in the ``orbital_parameters`` dataset attribute
+ that is set by many Satpy readers. It is essential that the datasets to be
+ corrected are coming from the same platform as the provided cloud top
+ height.
+
+ A note on the algorithm and the implementation. Parallax correction
+ is inherently an inverse problem. The reported geolocation in
+ satellite data files is the true location plus the parallax error.
+ Therefore, this class first calculates the true geolocation (using
+ :func:`get_parallax_corrected_lonlats`), which gives a shifted longitude and
+ shifted latitude on an irregular grid. The difference between
+ the original and the shifted grid is the parallax error or shift.
+ The magnitude of this error can be estimated with
+ :func:`get_surface_parallax_displacement`.
+ With this difference, we need to invert the parallax correction to
+ calculate the corrected geolocation. Due to parallax correction,
+ high clouds shift a lot, low clouds shift a little, and cloud-free
+ pixels shift not at all. The shift may result in zero, one,
+ two, or more source pixel onto a destination pixel. Physically,
+ this corresponds to the situation where a narrow but high cloud is
+ viewed at a large angle. The cloud may occupy two or more pixels when
+ viewed at a large angle, but only one when viewed straight from above.
+ To accurately reproduce this perspective, the parallax correction uses
+ the :class:`~pyresample.bucket.BucketResampler` class, specifically
+ the :meth:`~pyresample.bucket.BucketResampler.get_abs_max` method, to
+ retain only the largest absolute shift (corresponding to the highest
+ cloud) within each pixel. Any other resampling method at this step
+ would yield incorrect results. When cloud moves over clear-sky, the
+ clear-sky pixel is unshifted and the shift is located exactly in the
+ centre of the grid box, so nearest-neighbour resampling would lead to
+ such shifts being deselected. Other resampling methods would average
+ large shifts with small shifts, leading to unpredictable results.
+ Now the reprojected shifts can be applied to the original lat/lon,
+ returning a new :class:`~pyresample.geometry.SwathDefinition`.
+ This is is the object returned by :meth:`corrected_area`.
+
+ This procedure can be configured as a modifier using the
+ :class:`ParallaxCorrectionModifier` class. However, the modifier can only
+ be applied to one dataset at the time, which may not provide optimal
+ performance, although dask should reuse identical calculations between
+ multiple channels.
+
+ """
+
+ def __init__(self, base_area,
+ debug_mode=False):
+ """Initialise parallax correction class.
+
+ Args:
+ base_area (:class:`~pyresample.AreaDefinition`): Area for which calculated
+ geolocation will be calculated.
+ debug_mode (bool): Store diagnostic information in
+ self.diagnostics. This attribute always apply to the most
+ recently applied operation only.
+ """
+ self.base_area = base_area
+ self.debug_mode = debug_mode
+ self.diagnostics = {}
+
+ def __call__(self, cth_dataset, **kwargs):
+ """Apply parallax correction to dataset.
+
+ Args:
+ cth_dataset: Dataset containing cloud top heights (or other heights
+ to be corrected).
+
+ Returns:
+ :class:'~pyresample.geometry.SwathDefinition`: Swathdefinition with corrected
+ lat/lon coordinates.
+ """
+ self.diagnostics.clear()
+ return self.corrected_area(cth_dataset, **kwargs)
+
+ def corrected_area(self, cth_dataset,
+ cth_resampler="nearest",
+ cth_radius_of_influence=50000,
+ lonlat_chunks=1024):
+ """Return the parallax corrected SwathDefinition.
+
+ Using the cloud top heights provided in ``cth_dataset``, calculate the
+ :class:`pyresample.geometry.SwathDefinition` that estimates the
+ geolocation for each pixel if it had been viewed from straight above
+ (without parallax error). The cloud top height will first be resampled
+ onto the area passed upon class initialisation in :meth:`__init__`.
+ Pixels that are invisible after parallax correction are not retained
+ but get geolocation NaN.
+
+ Args:
+ cth_dataset (:class:`~xarray.DataArray`): Cloud top height in
+ meters. The variable attributes must contain an ``area``
+ attribute describing the geolocation in a pyresample-aware way,
+ and they must contain satellite orbital parameters. The
+ dimensions must be ``(y, x)``. For best performance, this
+ should be a dask-based :class:`~xarray.DataArray`.
+ cth_resampler (string, optional): Resampler to use when resampling the
+ (cloud top) height to the base area. Defaults to "nearest".
+ cth_radius_of_influence (number, optional): Radius of influence to use when
+ resampling the (cloud top) height to the base area. Defaults
+ to 50000.
+ lonlat_chunks (int, optional): Chunking to use when calculating lon/lats.
+ Probably the default (1024) should be fine.
+
+ Returns:
+ :class:`~pyresample.geometry.SwathDefinition` describing parallax
+ corrected geolocation.
+ """
+ logger.debug("Calculating parallax correction using heights from "
+ f"{cth_dataset.attrs.get('name', cth_dataset.name)!s}, "
+ f"with base area {self.base_area.name!s}.")
+ (sat_lon, sat_lat, sat_alt_m) = _get_satpos_from_cth(cth_dataset)
+ self._check_overlap(cth_dataset)
+
+ cth_dataset = self._prepare_cth_dataset(
+ cth_dataset, resampler=cth_resampler,
+ radius_of_influence=cth_radius_of_influence,
+ lonlat_chunks=lonlat_chunks)
+
+ (base_lon, base_lat) = self.base_area.get_lonlats(chunks=lonlat_chunks)
+ # calculate the shift/error due to the parallax effect
+ (corrected_lon, corrected_lat) = get_parallax_corrected_lonlats(
+ sat_lon, sat_lat, sat_alt_m,
+ base_lon, base_lat, cth_dataset.data)
+
+ shifted_area = self._get_swathdef_from_lon_lat(corrected_lon, corrected_lat)
+
+ # But we are not actually moving pixels, rather we want a
+ # coordinate transformation. With this transformation we approximately
+ # invert the pixel coordinate transformation, giving the lon and lat
+ # where we should retrieve a value for a given pixel.
+ (proj_lon, proj_lat) = self._get_corrected_lon_lat(
+ base_lon, base_lat, shifted_area)
+
+ return self._get_swathdef_from_lon_lat(proj_lon, proj_lat)
+
+ @staticmethod
+ def _get_swathdef_from_lon_lat(lon, lat):
+ """Return a SwathDefinition from lon/lat.
+
+ Turn ndarrays describing lon/lat into xarray with dimensions y, x, then
+ use these to create a :class:`~pyresample.geometry.SwathDefinition`.
+ """
+ # lons and lats passed to SwathDefinition must be data-arrays with
+ # dimensions, see https://github.com/pytroll/satpy/issues/1434
+ # and https://github.com/pytroll/satpy/issues/1997
+ return SwathDefinition(
+ xr.DataArray(lon, dims=("y", "x")),
+ xr.DataArray(lat, dims=("y", "x")))
+
+ def _prepare_cth_dataset(
+ self, cth_dataset, resampler="nearest", radius_of_influence=50000,
+ lonlat_chunks=1024):
+ """Prepare CTH dataset.
+
+ Set cloud top height to zero wherever lat/lon are valid but CTH is
+ undefined. Then resample onto the base area.
+ """
+ # for calculating the parallax effect, set cth to 0 where it is
+ # undefined, unless pixels have no valid lat/lon
+ # NB: 0 may be below the surface... could be a problem for high
+ # resolution imagery in mountainous or high elevation terrain
+ # NB: how tolerant of xarray & dask is this?
+ resampled_cth_dataset = resample_dataset(
+ cth_dataset, self.base_area, resampler=resampler,
+ radius_of_influence=radius_of_influence)
+ (pixel_lon, pixel_lat) = resampled_cth_dataset.attrs["area"].get_lonlats(
+ chunks=lonlat_chunks)
+ masked_resampled_cth_dataset = resampled_cth_dataset.where(
+ np.isfinite(pixel_lon) & np.isfinite(pixel_lat))
+ masked_resampled_cth_dataset = masked_resampled_cth_dataset.where(
+ masked_resampled_cth_dataset.notnull(), 0)
+ return masked_resampled_cth_dataset
+
+ def _check_overlap(self, cth_dataset):
+ """Ensure cth_dataset is usable for parallax correction.
+
+ Checks the coverage of ``cth_dataset`` compared to the ``base_area``. If
+ the entirety of ``base_area`` is covered by ``cth_dataset``, do
+ nothing. If only part of ``base_area`` is covered by ``cth_dataset``,
+ raise a `IncompleteHeightWarning`. If none of ``base_area`` is covered
+ by ``cth_dataset``, raise a `MissingHeightError`.
+ """
+ warnings.warn(
+ "Overlap checking not impelemented. Waiting for "
+ "fix for https://github.com/pytroll/pyresample/issues/329")
+
+ def _get_corrected_lon_lat(self, base_lon, base_lat, shifted_area):
+ """Calculate the corrected lon/lat based from the shifted area.
+
+ After calculating the shifted area based on
+ :func:`get_parallax_corrected_lonlats`,
+ we invert the parallax error and estimate where those pixels came from.
+ For details on the algorithm, see the class docstring.
+ """
+ (corrected_lon, corrected_lat) = shifted_area.get_lonlats(chunks=1024)
+ lon_diff = corrected_lon - base_lon
+ lat_diff = corrected_lat - base_lat
+ # We use the bucket resampler here, because parallax correction
+ # inevitably means there will be 2 source pixels ending up in the same
+ # destination pixel. We want to choose the biggest shift (max abs in
+ # lat_diff and lon_diff), because the biggest shift corresponds to the
+ # highest clouds, and if we move a 10 km cloud over a 2 km one, we
+ # should retain the 10 km.
+ #
+ # some things to keep in mind:
+ # - even with a constant cloud height, 3 source pixels may end up in
+ # the same destination pixel, because pixels get larger in the
+ # direction of the satellite. This means clouds may shrink as they
+ # approach the satellite.
+ # - the x-shift is a function of y and the y-shift is a function of x,
+ # so a cloud that was rectangular at the start may no longer be
+ # rectangular at the end
+ bur = BucketResampler(self.base_area,
+ da.array(corrected_lon), da.array(corrected_lat))
+ inv_lat_diff = bur.get_abs_max(lat_diff)
+ inv_lon_diff = bur.get_abs_max(lon_diff)
+
+ inv_lon = base_lon - inv_lon_diff
+ inv_lat = base_lat - inv_lat_diff
+ if self.debug_mode:
+ self.diagnostics["corrected_lon"] = corrected_lon
+ self.diagnostics["corrected_lat"] = corrected_lat
+ self.diagnostics["inv_lon"] = inv_lon
+ self.diagnostics["inv_lat"] = inv_lat
+ self.diagnostics["inv_lon_diff"] = inv_lon_diff
+ self.diagnostics["inv_lat_diff"] = inv_lat_diff
+ self.diagnostics["base_lon"] = base_lon
+ self.diagnostics["base_lat"] = base_lat
+ self.diagnostics["lon_diff"] = lon_diff
+ self.diagnostics["lat_diff"] = lat_diff
+ self.diagnostics["shifted_area"] = shifted_area
+ self.diagnostics["count"] = xr.DataArray(
+ bur.get_count(), dims=("y", "x"), attrs={"area": self.base_area})
+ return (inv_lon, inv_lat)
+
+
+class ParallaxCorrectionModifier(ModifierBase):
+ """Modifier for parallax correction.
+
+ Apply parallax correction as a modifier. Uses the
+ :class:`ParallaxCorrection` class, which in turn uses the
+ :func:`get_parallax_corrected_lonlats` function. See the documentation there for
+ details on the behaviour.
+
+ To use this, add to ``composites/visir.yaml`` within ``SATPY_CONFIG_PATH``
+ something like::
+
+ sensor_name: visir
+
+ modifiers:
+ parallax_corrected:
+ modifier: !!python/name:satpy.modifiers.parallax.ParallaxCorrectionModifier
+ prerequisites:
+ - "ctth_alti"
+ dataset_radius_of_influence: 50000
+
+ composites:
+
+ parallax_corrected_VIS006:
+ compositor: !!python/name:satpy.composites.SingleBandCompositor
+ prerequisites:
+ - name: VIS006
+ modifiers: [parallax_corrected]
+
+ Here, ``ctth_alti`` is CTH provided by the ``nwcsaf-geo`` reader, so to use it
+ one would have to pass both on scene creation::
+
+ sc = Scene({"seviri_l1b_hrit": files_l1b, "nwcsaf-geo": files_l2})
+ sc.load(["parallax_corrected_VIS006"])
+
+ The modifier takes optional global parameters, all of which are optional.
+ They affect various steps in the algorithm. Setting them may impact
+ performance:
+
+ cth_resampler
+ Resampler to use when resampling (cloud top) height to the base area.
+ Defaults to "nearest".
+ cth_radius_of_influence
+ Radius of influence to use when resampling the (cloud top) height to
+ the base area. Defaults to 50000.
+ lonlat_chunks
+ Chunk size to use when obtaining longitudes and latitudes from the area
+ definition. Defaults to 1024. If you set this to None, then parallax
+ correction will involve premature calculation. Changing this may or
+ may not make parallax correction slower or faster.
+ dataset_radius_of_influence
+ Radius of influence to use when resampling the dataset onto the
+ swathdefinition describing the parallax-corrected area. Defaults to
+ 50000. This always uses nearest neighbour resampling.
+
+ Alternately, you can use the lower-level API directly with the
+ :class:`ParallaxCorrection` class, which may be more efficient if multiple
+ datasets need to be corrected. RGB Composites cannot be modified in this way
+ (i.e. you can't replace "VIS006" by "natural_color"). To get a parallax
+ corrected RGB composite, create a new composite where each input has the
+ modifier applied. The parallax calculation should only occur once, because
+ calculations are happening via dask and dask should reuse the calculation.
+ """
+
+ def __call__(self, projectables, optional_datasets=None, **info):
+ """Apply parallax correction.
+
+ The argument ``projectables`` needs to contain the dataset to be
+ projected and the height to use for the correction.
+ """
+ (to_be_corrected, cth) = projectables
+ base_area = to_be_corrected.attrs["area"]
+ corrector = self._get_corrector(base_area)
+ plax_corr_area = corrector(
+ cth,
+ cth_resampler=self.attrs.get("cth_resampler", "nearest"),
+ cth_radius_of_influence=self.attrs.get("cth_radius_of_influence", 50_000),
+ lonlat_chunks=self.attrs.get("lonlat_chunks", 1024),
+ )
+ res = resample_dataset(
+ to_be_corrected, plax_corr_area,
+ radius_of_influence=self.attrs.get("dataset_radius_of_influence", 50_000),
+ fill_value=np.nan)
+ res.attrs["area"] = to_be_corrected.attrs["area"]
+ self.apply_modifier_info(to_be_corrected, res)
+
+ return res
+
+ def _get_corrector(self, base_area):
+ # only pass on those attributes that are arguments by
+ # ParallaxCorrection.__init__
+ sig = inspect.signature(ParallaxCorrection.__init__)
+ kwargs = {}
+ for k in sig.parameters.keys() & self.attrs.keys():
+ kwargs[k] = self.attrs[k]
+ corrector = ParallaxCorrection(base_area, **kwargs)
+ return corrector
+
+
+def _get_satpos_from_cth(cth_dataset):
+ """Obtain satellite position from CTH dataset, height in meter.
+
+ From a CTH dataset, obtain the satellite position lon, lat, altitude/m,
+ either directly from orbital parameters, or, when missing, from the
+ platform name using pyorbital and skyfield.
+ """
+ (sat_lon, sat_lat, sat_alt_km) = get_satpos(
+ cth_dataset, use_tle=True)
+ return (sat_lon, sat_lat, sat_alt_km * 1000)
diff --git a/satpy/multiscene.py b/satpy/multiscene.py
index f4a8ff1f20..2dee0ce146 100644
--- a/satpy/multiscene.py
+++ b/satpy/multiscene.py
@@ -19,6 +19,7 @@
import copy
import logging
+import warnings
from queue import Queue
from threading import Thread
@@ -68,29 +69,73 @@ def timeseries(datasets):
return res
-def add_group_aliases(scenes, groups):
- """Add aliases for the groups datasets belong to."""
+def group_datasets_in_scenes(scenes, groups):
+ """Group different datasets in multiple scenes by adding aliases.
+
+ Args:
+ scenes (iterable): Scenes to be processed.
+ groups (dict): Groups of datasets that shall be treated equally by
+ MultiScene. Keys specify the groups, values specify the dataset
+ names to be grouped. For example::
+
+ from satpy import DataQuery
+ groups = {DataQuery(name='odd'): ['ds1', 'ds3'],
+ DataQuery(name='even'): ['ds2', 'ds4']}
+
+ """
for scene in scenes:
- scene = scene.copy()
- for group_id, member_names in groups.items():
- # Find out whether one of the datasets in this scene belongs
- # to this group
- member_ids = [scene[name].attrs['_satpy_id']
- for name in member_names if name in scene]
-
- # Add an alias for the group it belongs to
- if len(member_ids) == 1:
- member_id = member_ids[0]
- new_ds = scene[member_id].copy()
- new_ds.attrs.update(group_id.to_dict())
- scene[group_id] = new_ds
- elif len(member_ids) > 1:
- raise ValueError('Cannot add multiple datasets from the same '
- 'scene to a group')
- else:
- # Datasets in this scene don't belong to any group
- pass
- yield scene
+ grp = GroupAliasGenerator(scene, groups)
+ yield grp.duplicate_datasets_with_group_alias()
+
+
+class GroupAliasGenerator:
+ """Add group aliases to a scene."""
+
+ def __init__(self, scene, groups):
+ """Initialize the alias generator."""
+ self.scene = scene.copy()
+ self.groups = groups
+
+ def duplicate_datasets_with_group_alias(self):
+ """Duplicate datasets to be grouped with a group alias."""
+ for group_id, group_members in self.groups.items():
+ self._duplicate_dataset_with_group_alias(group_id, group_members)
+ return self.scene
+
+ def _duplicate_dataset_with_group_alias(self, group_id, group_members):
+ member_ids = self._get_dataset_id_of_group_members_in_scene(group_members)
+ if len(member_ids) == 1:
+ self._duplicate_dataset_with_different_id(
+ dataset_id=member_ids[0],
+ alias_id=group_id,
+ )
+ elif len(member_ids) > 1:
+ raise ValueError('Cannot add multiple datasets from a scene '
+ 'to the same group')
+
+ def _get_dataset_id_of_group_members_in_scene(self, group_members):
+ return [
+ self.scene[member].attrs['_satpy_id']
+ for member in group_members if member in self.scene
+ ]
+
+ def _duplicate_dataset_with_different_id(self, dataset_id, alias_id):
+ dataset = self.scene[dataset_id].copy()
+ self._prepare_dataset_for_duplication(dataset, alias_id)
+ self.scene[alias_id] = dataset
+
+ def _prepare_dataset_for_duplication(self, dataset, alias_id):
+ # Drop all identifier attributes from the original dataset. Otherwise
+ # they might invalidate the dataset ID of the alias.
+ self._drop_id_attrs(dataset)
+ dataset.attrs.update(alias_id.to_dict())
+
+ def _drop_id_attrs(self, dataset):
+ for drop_key in self._get_id_attrs(dataset):
+ dataset.attrs.pop(drop_key)
+
+ def _get_id_attrs(self, dataset):
+ return dataset.attrs["_satpy_id"].to_dict().keys()
class _SceneGenerator(object):
@@ -196,6 +241,10 @@ def from_files(cls, files_to_sort, reader=None,
scene_kwargs = {}
file_groups = group_files(files_to_sort, reader=reader, **kwargs)
if ensure_all_readers:
+ warnings.warn(
+ "Argument ensure_all_readers is deprecated. Use "
+ "missing='skip' instead.",
+ DeprecationWarning)
file_groups = [fg for fg in file_groups if all(fg.values())]
scenes = (Scene(filenames=fg, **scene_kwargs) for fg in file_groups)
return cls(scenes)
@@ -293,6 +342,18 @@ def resample(self, destination=None, **kwargs):
def blend(self, blend_function=stack):
"""Blend the datasets into one scene.
+ Reduce the :class:`MultiScene` to a single :class:`~satpy.scene.Scene`. Datasets
+ occurring in each scene will be passed to a blending
+ function, which shall take as input a list of datasets
+ (:class:`xarray.DataArray` objects) and shall return a single
+ dataset (:class:`xarray.DataArray` object). The blend method
+ then assigns those datasets to the blended scene.
+
+ Blending functions provided in this module are :func:`stack`
+ (the default) and :func:`timeseries`, but the Python built-in
+ function :func:`sum` also works and may be appropriate for
+ some types of data.
+
.. note::
Blending is not currently optimized for generator-based
@@ -321,7 +382,7 @@ def group(self, groups):
DataQuery('my_group', wavelength=(10, 11, 12)): ['IR_108', 'B13', 'C13']
}
"""
- self._scenes = add_group_aliases(self._scenes, groups)
+ self._scenes = group_datasets_in_scenes(self._scenes, groups)
def _distribute_save_datasets(self, scenes_iter, client, batch_size=1, **kwargs):
"""Distribute save_datasets across a cluster."""
diff --git a/satpy/plugin_base.py b/satpy/plugin_base.py
index 0e55a8bcb1..ee19341796 100644
--- a/satpy/plugin_base.py
+++ b/satpy/plugin_base.py
@@ -20,11 +20,7 @@
import logging
import yaml
-
-try:
- from yaml import UnsafeLoader
-except ImportError:
- from yaml import Loader as UnsafeLoader
+from yaml import UnsafeLoader
from satpy._config import config_search_paths
from satpy.utils import recursive_dict_update
@@ -44,7 +40,7 @@ def __init__(self, default_config_filename=None, config_files=None, **kwargs):
default_config_filename (str): Configuration filename to use if
no other files have been specified with `config_files`.
config_files (list or str): Configuration files to load instead
- of those automatically found in `ppp_config_dir` and other
+ of those automatically found in `SATPY_CONFIG_PATH` and other
default configuration locations.
kwargs (dict): Unused keyword arguments.
diff --git a/satpy/readers/__init__.py b/satpy/readers/__init__.py
index 7d5b46ccde..a9d8661c8b 100644
--- a/satpy/readers/__init__.py
+++ b/satpy/readers/__init__.py
@@ -16,35 +16,34 @@
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
"""Shared objects of the various reader classes."""
+from __future__ import annotations
import logging
import os
+import pickle # nosec B403
import warnings
from datetime import datetime, timedelta
from functools import total_ordering
-import pickle
import yaml
+from yaml import UnsafeLoader
-try:
- from yaml import UnsafeLoader
-except ImportError:
- from yaml import Loader as UnsafeLoader
+from satpy._config import config_search_paths, get_entry_points_config_dirs, glob_config
-from satpy._config import config_search_paths, glob_config
-from .yaml_reader import (AbstractYAMLReader,
- load_yaml_configs as load_yaml_reader_configs)
+from .yaml_reader import AbstractYAMLReader
+from .yaml_reader import load_yaml_configs as load_yaml_reader_configs
LOG = logging.getLogger(__name__)
# Old Name -> New Name
-OLD_READER_NAMES = {
-}
+PENDING_OLD_READER_NAMES = {'fci_l1c_fdhsi': 'fci_l1c_nc'}
+OLD_READER_NAMES: dict[str, str] = {}
def group_files(files_to_sort, reader=None, time_threshold=10,
- group_keys=None, reader_kwargs=None):
+ group_keys=None, reader_kwargs=None,
+ missing="pass"):
"""Group series of files by file pattern information.
By default this will group files by their filename ``start_time``
@@ -52,8 +51,6 @@ def group_files(files_to_sort, reader=None, time_threshold=10,
dictionaries returned by this function to the Scene classes'
``filenames``, a series `Scene` objects can be easily created.
- .. versionadded:: 0.12
-
Args:
files_to_sort (iterable): File paths to sort in to group
reader (str or Collection[str]): Reader or readers whose file patterns
@@ -76,6 +73,16 @@ def group_files(files_to_sort, reader=None, time_threshold=10,
behaviour without doing so is undefined.
reader_kwargs (dict): Additional keyword arguments to pass to reader
creation.
+ missing (str): Parameter to control the behavior in the scenario where
+ multiple readers were passed, but at least one group does not have
+ files associated with every reader. Valid values are ``"pass"``
+ (the default), ``"skip"``, and ``"raise"``. If set to ``"pass"``,
+ groups are passed as-is. Some groups may have zero files for some
+ readers. If set to ``"skip"``, groups for which one or more
+ readers have zero files are skipped (meaning that some files may
+ not be associated to any group). If set to ``"raise"``, raise a
+ `FileNotFoundError` in case there are any groups for which one or
+ more readers have no files associated.
Returns:
List of dictionaries mapping 'reader' to a list of filenames.
@@ -99,7 +106,9 @@ def group_files(files_to_sort, reader=None, time_threshold=10,
file_groups = _get_sorted_file_groups(file_keys, time_threshold)
- return [{rn: file_groups[group_key].get(rn, []) for rn in reader} for group_key in file_groups]
+ groups = [{rn: file_groups[group_key].get(rn, []) for rn in reader} for group_key in file_groups]
+
+ return list(_filter_groups(groups, missing=missing))
def _assign_files_to_readers(files_to_sort, reader_names,
@@ -231,6 +240,62 @@ def _get_sorted_file_groups(all_file_keys, time_threshold):
return file_groups
+def _filter_groups(groups, missing="pass"):
+ """Filter multi-reader group-files behavior.
+
+ Helper for `group_files`. When `group_files` is called with multiple
+ readers, make sure that the desired behaviour for missing files is
+ enforced: if missing is ``"raise"``, raise an exception if at least one
+ group has at least one reader without files; if it is ``"skip"``, remove
+ those. If it is ``"pass"``, do nothing. Yields groups to be kept.
+
+ Args:
+ groups (List[Mapping[str, List[str]]]):
+ groups as found by `group_files`.
+ missing (str):
+ String controlling behaviour, see documentation above.
+
+ Yields:
+ ``Mapping[str:, List[str]]``: groups to be retained
+ """
+ if missing == "pass":
+ yield from groups
+ return
+ if missing not in ("raise", "skip"):
+ raise ValueError("Invalid value for ``missing`` argument. Expected "
+ f"'raise', 'skip', or 'pass', got '{missing!s}'")
+ for (i, grp) in enumerate(groups):
+ readers_without_files = _get_keys_with_empty_values(grp)
+ if readers_without_files:
+ if missing == "raise":
+ raise FileNotFoundError(
+ f"when grouping files, group at index {i:d} "
+ "had no files for readers: " +
+ ", ".join(readers_without_files))
+ else:
+ yield grp
+
+
+def _get_keys_with_empty_values(grp):
+ """Find mapping keys where values have length zero.
+
+ Helper for `_filter_groups`, which is in turn a helper for `group_files`.
+ Given a mapping key -> Collection[Any], return the keys where the length of the
+ collection is zero.
+
+ Args:
+ grp (Mapping[Any, Collection[Any]]): dictionary to check
+
+ Returns:
+ set of keys
+ """
+ empty = set()
+ for (k, v) in grp.items():
+ if len(v) == 0: # explicit check to ensure failure if not a collection
+ empty.add(k)
+ return empty
+
+
def read_reader_config(config_files, loader=UnsafeLoader):
"""Read the reader `config_files` and return the extracted reader metadata."""
reader_config = load_yaml_reader_configs(*config_files, loader=loader)
@@ -254,31 +319,22 @@ def configs_for_reader(reader=None):
if reader is not None:
if not isinstance(reader, (list, tuple)):
reader = [reader]
- # check for old reader names
- new_readers = []
- for reader_name in reader:
- if reader_name.endswith('.yaml') or reader_name not in OLD_READER_NAMES:
- new_readers.append(reader_name)
- continue
-
- new_name = OLD_READER_NAMES[reader_name]
- # Satpy 0.11 only displays a warning
- # Satpy 0.13 will raise an exception
- raise ValueError("Reader name '{}' has been deprecated, use '{}' instead.".format(reader_name, new_name))
- # Satpy 0.15 or 1.0, remove exception and mapping
-
- reader = new_readers
+
+ reader = get_valid_reader_names(reader)
# given a config filename or reader name
config_files = [r if r.endswith('.yaml') else r + '.yaml' for r in reader]
else:
- reader_configs = glob_config(os.path.join('readers', '*.yaml'))
+ paths = get_entry_points_config_dirs('satpy.readers')
+ reader_configs = glob_config(os.path.join('readers', '*.yaml'), search_dirs=paths)
config_files = set(reader_configs)
for config_file in config_files:
config_basename = os.path.basename(config_file)
reader_name = os.path.splitext(config_basename)[0]
+ paths = get_entry_points_config_dirs('satpy.readers')
reader_configs = config_search_paths(
- os.path.join("readers", config_basename))
+ os.path.join("readers", config_basename),
+ search_dirs=paths, check_exists=True)
if not reader_configs:
# either the reader they asked for does not exist
@@ -288,22 +344,46 @@ def configs_for_reader(reader=None):
yield reader_configs
-def available_readers(as_dict=False):
+def get_valid_reader_names(reader):
+ """Check for old reader names or readers pending deprecation."""
+ new_readers = []
+ for reader_name in reader:
+ if reader_name in OLD_READER_NAMES:
+ raise ValueError(
+ "Reader name '{}' has been deprecated, "
+ "use '{}' instead.".format(reader_name,
+ OLD_READER_NAMES[reader_name]))
+
+ if reader_name in PENDING_OLD_READER_NAMES:
+ new_name = PENDING_OLD_READER_NAMES[reader_name]
+ warnings.warn("Reader name '{}' is being deprecated and will be removed soon."
+ "Please use '{}' instead.".format(reader_name, new_name),
+ FutureWarning)
+ new_readers.append(new_name)
+ else:
+ new_readers.append(reader_name)
+
+ return new_readers
+
+
+def available_readers(as_dict=False, yaml_loader=UnsafeLoader):
"""Available readers based on current configuration.
Args:
as_dict (bool): Optionally return reader information as a dictionary.
- Default: False
+ Default: False.
+ yaml_loader (Optional[Union[yaml.BaseLoader, yaml.FullLoader, yaml.UnsafeLoader]]):
+ The yaml loader type. Default: ``yaml.UnsafeLoader``.
- Returns: List of available reader names. If `as_dict` is `True` then
- a list of dictionaries including additionally reader information
- is returned.
+ Returns:
+ Union[list[str], list[dict]]: List of available reader names. If `as_dict` is `True` then
+ a list of dictionaries including additionally reader information is returned.
"""
readers = []
for reader_configs in configs_for_reader():
try:
- reader_info = read_reader_config(reader_configs)
+ reader_info = read_reader_config(reader_configs, loader=yaml_loader)
except (KeyError, IOError, yaml.YAMLError):
LOG.debug("Could not import reader config from: %s", reader_configs)
LOG.debug("Error loading YAML", exc_info=True)
@@ -372,12 +452,12 @@ def find_files_and_readers(start_time=None, end_time=None, base_dir=None,
missing_ok (bool): If False (default), raise ValueError if no files
are found. If True, return empty dictionary if no
files are found.
- fs (FileSystem): Optional, instance of implementation of
- fsspec.spec.AbstractFileSystem (strictly speaking,
- any object of a class implementing ``.glob`` is
- enough). Defaults to searching the local filesystem.
+ fs (:class:`fsspec.spec.AbstractFileSystem`): Optional, instance of implementation of
+ :class:`fsspec.spec.AbstractFileSystem` (strictly speaking, any object of a class implementing
+ ``.glob`` is enough). Defaults to searching the local filesystem.
- Returns: Dictionary mapping reader name string to list of filenames
+ Returns:
+ dict: Dictionary mapping reader name string to list of filenames
"""
reader_files = {}
@@ -391,25 +471,9 @@ def find_files_and_readers(start_time=None, end_time=None, base_dir=None,
reader_kwargs['filter_parameters'] = filter_parameters
for reader_configs in configs_for_reader(reader):
- try:
- reader_instance = load_reader(reader_configs, **reader_kwargs)
- except (KeyError, IOError, yaml.YAMLError) as err:
- LOG.info('Cannot use %s', str(reader_configs))
- LOG.debug(str(err))
- if reader and (isinstance(reader, str) or len(reader) == 1):
- # if it is a single reader then give a more usable error
- raise
- continue
-
- if not reader_instance.supports_sensor(sensor):
- continue
- elif sensor is not None:
- # sensor was specified and a reader supports it
- sensor_supported = True
- loadables = reader_instance.select_files_from_directory(base_dir, fs)
- if loadables:
- loadables = list(
- reader_instance.filter_selected_filenames(loadables))
+ (reader_instance, loadables, this_sensor_supported) = _get_loadables_for_reader_config(
+ base_dir, reader, sensor, reader_configs, reader_kwargs, fs)
+ sensor_supported = sensor_supported or this_sensor_supported
if loadables:
reader_files[reader_instance.name] = list(loadables)
@@ -421,6 +485,44 @@ def find_files_and_readers(start_time=None, end_time=None, base_dir=None,
return reader_files
+def _get_loadables_for_reader_config(base_dir, reader, sensor, reader_configs,
+ reader_kwargs, fs):
+ """Get loadables for reader configs.
+
+ Helper for find_files_and_readers.
+
+ Args:
+ base_dir: as for `find_files_and_readers`
+ reader: as for `find_files_and_readers`
+ sensor: as for `find_files_and_readers`
+ reader_configs: reader metadata such as returned by
+ `configs_for_reader`.
+ reader_kwargs: Keyword arguments to be passed to reader.
+ fs (FileSystem): as for `find_files_and_readers`
+ """
+ sensor_supported = False
+ try:
+ reader_instance = load_reader(reader_configs, **reader_kwargs)
+ except (KeyError, IOError, yaml.YAMLError) as err:
+ LOG.info('Cannot use %s', str(reader_configs))
+ LOG.debug(str(err))
+ if reader and (isinstance(reader, str) or len(reader) == 1):
+ # if it is a single reader then give a more usable error
+ raise
+ return (None, [], False)
+
+ if not reader_instance.supports_sensor(sensor):
+ return (reader_instance, [], False)
+ if sensor is not None:
+ # sensor was specified and a reader supports it
+ sensor_supported = True
+ loadables = reader_instance.select_files_from_directory(base_dir, fs)
+ if loadables:
+ loadables = list(
+ reader_instance.filter_selected_filenames(loadables))
+ return (reader_instance, loadables, sensor_supported)
+
+
def load_readers(filenames=None, reader=None, reader_kwargs=None):
"""Create specified readers and assign files to them.
@@ -439,27 +541,10 @@ def load_readers(filenames=None, reader=None, reader_kwargs=None):
"""
reader_instances = {}
- if not filenames and not reader:
- # used for an empty Scene
- return {}
- elif reader and filenames is not None and not filenames:
- # user made a mistake in their glob pattern
- raise ValueError("'filenames' was provided but is empty.")
- elif not filenames:
- LOG.warning("'filenames' required to create readers and load data")
+ if _early_exit(filenames, reader):
return {}
- elif reader is None and isinstance(filenames, dict):
- # filenames is a dictionary of reader_name -> filenames
- reader = list(filenames.keys())
- remaining_filenames = set(f for fl in filenames.values() for f in fl)
- elif reader and isinstance(filenames, dict):
- # filenames is a dictionary of reader_name -> filenames
- # but they only want one of the readers
- filenames = filenames[reader]
- remaining_filenames = set(filenames or [])
- else:
- remaining_filenames = set(filenames or [])
+ reader, filenames, remaining_filenames = _get_reader_and_filenames(reader, filenames)
(reader_kwargs, reader_kwargs_without_filter) = _get_reader_kwargs(reader, reader_kwargs)
for idx, reader_configs in enumerate(configs_for_reader(reader)):
@@ -490,15 +575,51 @@ def load_readers(filenames=None, reader=None, reader_kwargs=None):
if not remaining_filenames:
break
+ _check_remaining_files(remaining_filenames)
+ _check_reader_instances(reader_instances)
+ return reader_instances
+
+
+def _early_exit(filenames, reader):
+ if not filenames and not reader:
+ # used for an empty Scene
+ return True
+ if reader and filenames is not None and not filenames:
+ # user made a mistake in their glob pattern
+ raise ValueError("'filenames' was provided but is empty.")
+ if not filenames:
+ LOG.warning("'filenames' required to create readers and load data")
+ return True
+ return False
+
+
+def _get_reader_and_filenames(reader, filenames):
+ if reader is None and isinstance(filenames, dict):
+ # filenames is a dictionary of reader_name -> filenames
+ reader = list(filenames.keys())
+ remaining_filenames = set(f for fl in filenames.values() for f in fl)
+ elif reader and isinstance(filenames, dict):
+ # filenames is a dictionary of reader_name -> filenames
+ # but they only want one of the readers
+ filenames = filenames[reader]
+ remaining_filenames = set(filenames or [])
+ else:
+ remaining_filenames = set(filenames or [])
+ return reader, filenames, remaining_filenames
+
+
+def _check_remaining_files(remaining_filenames):
if remaining_filenames:
LOG.warning("Don't know how to open the following files: {}".format(str(remaining_filenames)))
+
+
+def _check_reader_instances(reader_instances):
if not reader_instances:
raise ValueError("No supported files found")
- elif not any(list(r.available_dataset_ids) for r in reader_instances.values()):
+ if not any(list(r.available_dataset_ids) for r in reader_instances.values()):
raise ValueError("No dataset could be loaded. Either missing "
"requirements (such as Epilog, Prolog) or none of the "
"provided files match the filter parameters.")
- return reader_instances
def _get_reader_kwargs(reader, reader_kwargs):
@@ -527,7 +648,10 @@ def _get_reader_kwargs(reader, reader_kwargs):
class FSFile(os.PathLike):
"""Implementation of a PathLike file object, that can be opened.
- This is made to be used in conjuction with fsspec or s3fs. For example::
+ Giving the filenames to :class:`Scene` with valid transfer protocols will automatically
+ use this class so manual usage of this class is needed mainly for fine-grained control.
+
+ This class is made to be used in conjuction with fsspec or s3fs. For example::
from satpy import Scene
@@ -556,6 +680,7 @@ def __init__(self, file, fs=None):
fs (fsspec filesystem, optional)
Object implementing the fsspec filesystem protocol.
"""
+ self._fs_open_kwargs = _get_fs_open_kwargs(file)
try:
self._file = file.path
self._fs = file.fs
@@ -575,15 +700,22 @@ def __repr__(self):
"""Representation of the object."""
return ''
- def open(self):
+ def open(self, *args, **kwargs):
"""Open the file.
This is read-only.
"""
+ fs_open_kwargs = self._update_with_fs_open_kwargs(kwargs)
try:
- return self._fs.open(self._file)
+ return self._fs.open(self._file, *args, **fs_open_kwargs)
except AttributeError:
- return open(self._file)
+ return open(self._file, *args, **kwargs)
+
+ def _update_with_fs_open_kwargs(self, user_kwargs):
+ """Complement keyword arguments for opening a file via file system."""
+ kwargs = user_kwargs.copy()
+ kwargs.update(self._fs_open_kwargs)
+ return kwargs
def __lt__(self, other):
"""Implement ordering.
@@ -619,10 +751,27 @@ def __hash__(self):
try:
fshash = hash(self._fs)
except TypeError: # fsspec < 0.8.8 for CachingFileSystem
- fshash = hash(pickle.dumps(self._fs))
+ fshash = hash(pickle.dumps(self._fs)) # nosec B403
return hash(self._file) ^ fshash
+def _get_fs_open_kwargs(file):
+ """Get keyword arguments for opening a file via file system.
+
+ For example compression.
+ """
+ return {
+ "compression": _get_compression(file)
+ }
+
+
+def _get_compression(file):
+ try:
+ return file.compression
+ except AttributeError:
+ return None
+
+
def open_file_or_filename(unknown_file_thing):
"""Try to open the *unknown_file_thing*, otherwise return the filename."""
try:
diff --git a/satpy/readers/aapp_l1b.py b/satpy/readers/aapp_l1b.py
index e74bed1518..ea3877e48a 100644
--- a/satpy/readers/aapp_l1b.py
+++ b/satpy/readers/aapp_l1b.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
-# Copyright (c) 2012-2020 Satpy developers
+# Copyright (c) 2012-2021 Satpy developers
#
# This file is part of satpy.
#
@@ -33,28 +33,40 @@
import xarray as xr
from dask import delayed
-from satpy import CHUNK_SIZE
from satpy.readers.file_handlers import BaseFileHandler
+from satpy.utils import get_chunk_size_limit
+
+CHANNEL_DTYPE = np.float64
+
+
+def get_avhrr_lac_chunks(shape, dtype):
+ """Get chunks from a given shape adapted for full-resolution AVHRR data."""
+ limit = get_chunk_size_limit(dtype)
+ return da.core.normalize_chunks(("auto", 2048), shape=shape, limit=limit, dtype=dtype)
+
+
+def get_aapp_chunks(shape):
+ """Get chunks from a given shape adapted for AAPP data."""
+ return get_avhrr_lac_chunks(shape, dtype=CHANNEL_DTYPE)
-LINE_CHUNK = CHUNK_SIZE ** 2 // 2048
logger = logging.getLogger(__name__)
-CHANNEL_NAMES = ['1', '2', '3a', '3b', '4', '5']
+AVHRR_CHANNEL_NAMES = ["1", "2", "3a", "3b", "4", "5"]
-ANGLES = ['sensor_zenith_angle',
- 'solar_zenith_angle',
- 'sun_sensor_azimuth_difference_angle']
+AVHRR_ANGLE_NAMES = ['sensor_zenith_angle',
+ 'solar_zenith_angle',
+ 'sun_sensor_azimuth_difference_angle']
-PLATFORM_NAMES = {4: 'NOAA-15',
- 2: 'NOAA-16',
- 6: 'NOAA-17',
- 7: 'NOAA-18',
- 8: 'NOAA-19',
- 11: 'Metop-B',
- 12: 'Metop-A',
- 13: 'Metop-C',
- 14: 'Metop simulator'}
+AVHRR_PLATFORM_IDS2NAMES = {4: 'NOAA-15',
+ 2: 'NOAA-16',
+ 6: 'NOAA-17',
+ 7: 'NOAA-18',
+ 8: 'NOAA-19',
+ 11: 'Metop-B',
+ 12: 'Metop-A',
+ 13: 'Metop-C',
+ 14: 'Metop simulator'}
def create_xarray(arr):
@@ -63,55 +75,29 @@ def create_xarray(arr):
return res
-class AVHRRAAPPL1BFile(BaseFileHandler):
- """Reader for AVHRR L1B files created from the AAPP software."""
+class AAPPL1BaseFileHandler(BaseFileHandler):
+ """A base file handler for the AAPP level-1 formats."""
def __init__(self, filename, filename_info, filetype_info):
- """Initialize object information by reading the input file."""
- super(AVHRRAAPPL1BFile, self).__init__(filename, filename_info,
- filetype_info)
- self.channels = {i: None for i in AVHRR_CHANNEL_NAMES}
- self.units = {i: 'counts' for i in AVHRR_CHANNEL_NAMES}
+ """Initialize AAPP level-1 file handler object."""
+ super().__init__(filename, filename_info, filetype_info)
+
+ self.channels = None
+ self.units = None
+ self.sensor = "unknown"
self._data = None
self._header = None
- self._is3b = None
- self._is3a = None
- self._shape = None
self.area = None
- self.sensor = 'avhrr-3'
- self.read()
- self.active_channels = self._get_active_channels()
-
- self.platform_name = PLATFORM_NAMES.get(self._header['satid'][0], None)
-
- if self.platform_name is None:
- raise ValueError("Unsupported platform ID: %d" % self.header['satid'])
-
- def _get_active_channels(self):
- status = self._get_channel_binary_status_from_header()
- return self._convert_binary_channel_status_to_activation_dict(status)
-
- def _get_channel_binary_status_from_header(self):
- status = self._header['inststat1'].item()
- change_line = self._header['statchrecnb']
- if change_line > 0:
- status |= self._header['inststat2'].item()
- return status
+ self._channel_names = []
+ self._angle_names = []
- @staticmethod
- def _convert_binary_channel_status_to_activation_dict(status):
- bits_channels = ((13, '1'),
- (12, '2'),
- (11, '3a'),
- (10, '3b'),
- (9, '4'),
- (8, '5'))
- activated = dict()
- for bit, channel_name in bits_channels:
- activated[channel_name] = bool(status >> bit & 1)
- return activated
+ def _set_filedata_layout(self):
+ """Set the file data type/layout."""
+ self._header_offset = 0
+ self._scan_type = np.dtype([("siteid", " 0:
+ status |= self._header['inststat2'].item()
+ return status
+
+ @staticmethod
+ def _convert_binary_channel_status_to_activation_dict(status):
+ bits_channels = ((13, '1'),
+ (12, '2'),
+ (11, '3a'),
+ (10, '3b'),
+ (9, '4'),
+ (8, '5'))
+ activated = dict()
+ for bit, channel_name in bits_channels:
+ activated[channel_name] = bool(status >> bit & 1)
+ return activated
def available_datasets(self, configured_datasets=None):
"""Get the available datasets."""
for _, mda in configured_datasets:
- if mda['name'] in CHANNEL_NAMES:
+ if mda['name'] in self._channel_names:
yield self.active_channels[mda['name']], mda
else:
yield True, mda
@@ -180,11 +238,10 @@ def get_angles(self, angle_id):
"""Get sun-satellite viewing angles."""
sunz, satz, azidiff = self._get_all_interpolated_angles()
- name_to_variable = dict(zip(ANGLES, (satz, sunz, azidiff)))
+ name_to_variable = dict(zip(self._angle_names, (satz, sunz, azidiff)))
return create_xarray(name_to_variable[angle_id])
- @functools.lru_cache(maxsize=10)
- def _get_all_interpolated_angles(self):
+ def _get_all_interpolated_angles_uncached(self):
sunz40km, satz40km, azidiff40km = self._get_tiepoint_angles_in_degrees()
return self._interpolate_arrays(sunz40km, satz40km, azidiff40km)
@@ -194,10 +251,10 @@ def _get_tiepoint_angles_in_degrees(self):
azidiff40km = self._data["ang"][:, :, 2] * 1e-2
return sunz40km, satz40km, azidiff40km
- def _interpolate_arrays(self, *input_arrays):
+ def _interpolate_arrays(self, *input_arrays, geolocation=False):
lines = input_arrays[0].shape[0]
try:
- interpolator = self._create_40km_interpolator(lines, *input_arrays)
+ interpolator = self._create_40km_interpolator(lines, *input_arrays, geolocation=geolocation)
except ImportError:
logger.warning("Could not interpolate, python-geotiepoints missing.")
output_arrays = input_arrays
@@ -208,8 +265,12 @@ def _interpolate_arrays(self, *input_arrays):
return output_arrays
@staticmethod
- def _create_40km_interpolator(lines, *arrays_40km):
- from geotiepoints.interpolator import Interpolator
+ def _create_40km_interpolator(lines, *arrays_40km, geolocation=False):
+ if geolocation:
+ # Slower but accurate at datum line
+ from geotiepoints.geointerpolator import GeoInterpolator as Interpolator
+ else:
+ from geotiepoints.interpolator import Interpolator
cols40km = np.arange(24, 2048, 40)
cols1km = np.arange(2048)
rows40km = np.arange(lines)
@@ -226,15 +287,14 @@ def navigate(self, coordinate_id):
lons, lats = self._get_all_interpolated_coordinates()
if coordinate_id == 'longitude':
return create_xarray(lons)
- elif coordinate_id == 'latitude':
+ if coordinate_id == 'latitude':
return create_xarray(lats)
- else:
- raise KeyError("Coordinate {} unknown.".format(coordinate_id))
- @functools.lru_cache(maxsize=10)
- def _get_all_interpolated_coordinates(self):
+ raise KeyError("Coordinate {} unknown.".format(coordinate_id))
+
+ def _get_all_interpolated_coordinates_uncached(self):
lons40km, lats40km = self._get_coordinates_in_degrees()
- return self._interpolate_arrays(lons40km, lats40km)
+ return self._interpolate_arrays(lons40km, lats40km, geolocation=True)
def _get_coordinates_in_degrees(self):
lons40km = self._data["pos"][:, :, 1] * 1e-4
@@ -256,10 +316,11 @@ def calibrate(self,
if dataset_id['name'] in ("3a", "3b") and self._is3b is None:
# Is it 3a or 3b:
+ line_chunks = get_aapp_chunks((self._data.shape[0], 2048))[0]
self._is3a = da.bitwise_and(da.from_array(self._data['scnlinbit'],
- chunks=LINE_CHUNK), 3) == 0
+ chunks=line_chunks), 3) == 0
self._is3b = da.bitwise_and(da.from_array(self._data['scnlinbit'],
- chunks=LINE_CHUNK), 3) == 1
+ chunks=line_chunks), 3) == 1
try:
vis_idx = ['1', '2', '3a'].index(dataset_id['name'])
@@ -295,8 +356,6 @@ def calibrate(self,
return ds
-AVHRR_CHANNEL_NAMES = ("1", "2", "3a", "3b", "4", "5")
-
# AAPP 1b header
_HEADERTYPE = np.dtype([("siteid", "S3"),
@@ -489,13 +548,16 @@ def _vis_calibrate(data,
if calib_type not in ['counts', 'radiance', 'reflectance']:
raise ValueError('Calibration ' + calib_type + ' unknown!')
- channel = da.from_array(data["hrpt"][:, :, chn], chunks=(LINE_CHUNK, 2048))
+ channel_data = data["hrpt"][:, :, chn]
+ chunks = get_aapp_chunks(channel_data.shape)
+ line_chunks = chunks[0]
+ channel = da.from_array(channel_data, chunks=chunks)
mask &= channel != 0
if calib_type == 'counts':
return channel
- channel = channel.astype(np.float64)
+ channel = channel.astype(CHANNEL_DTYPE)
if calib_type == 'radiance':
logger.info("Radiances are not yet supported for " +
@@ -513,33 +575,32 @@ def _vis_calibrate(data,
coeff_idx = 0
intersection = da.from_array(data["calvis"][:, chn, coeff_idx, 4],
- chunks=LINE_CHUNK)
+ chunks=line_chunks)
if calib_coeffs is not None:
logger.info("Updating from external calibration coefficients.")
- slope1 = da.from_array(calib_coeffs[0], chunks=LINE_CHUNK)
- intercept1 = da.from_array(calib_coeffs[1], chunks=LINE_CHUNK)
- slope2 = da.from_array(calib_coeffs[2], chunks=LINE_CHUNK)
- intercept2 = da.from_array(calib_coeffs[3], chunks=LINE_CHUNK)
+ slope1 = da.from_array(calib_coeffs[0], chunks=line_chunks)
+ intercept1 = da.from_array(calib_coeffs[1], chunks=line_chunks)
+ slope2 = da.from_array(calib_coeffs[2], chunks=line_chunks)
+ intercept2 = da.from_array(calib_coeffs[3], chunks=line_chunks)
else:
slope1 = da.from_array(data["calvis"][:, chn, coeff_idx, 0],
- chunks=LINE_CHUNK) * 1e-10
+ chunks=line_chunks) * 1e-10
intercept1 = da.from_array(data["calvis"][:, chn, coeff_idx, 1],
- chunks=LINE_CHUNK) * 1e-7
+ chunks=line_chunks) * 1e-7
slope2 = da.from_array(data["calvis"][:, chn, coeff_idx, 2],
- chunks=LINE_CHUNK) * 1e-10
+ chunks=line_chunks) * 1e-10
intercept2 = da.from_array(data["calvis"][:, chn, coeff_idx, 3],
- chunks=LINE_CHUNK) * 1e-7
-
- if chn == 1:
- # In the level 1b file, the visible coefficients are stored as 4-byte integers. Scaling factors then convert
- # them to real numbers which are applied to the measured counts. The coefficient is different depending on
- # whether the counts are less than or greater than the high-gain/low-gain transition value (nominally 500).
- # The slope for visible channels should always be positive (reflectance increases with count). With the
- # pre-launch coefficients the channel 2 slope is always positive but with the operational coefs the stored
- # number in the high-reflectance regime overflows the maximum 2147483647, i.e. it is negative when
- # interpreted as a signed integer. So you have to modify it.
- slope2 = da.where(slope2 < 0, slope2 + 0.4294967296, slope2)
+ chunks=line_chunks) * 1e-7
+
+ # In the level 1b file, the visible coefficients are stored as 4-byte integers. Scaling factors then convert
+ # them to real numbers which are applied to the measured counts. The coefficient is different depending on
+ # whether the counts are less than or greater than the high-gain/low-gain transition value (nominally 500).
+ # The slope for visible channels should always be positive (reflectance increases with count). With the
+ # pre-launch coefficients the channel 2, 3a slope is always positive but with the operational coefs the stored
+ # number in the high-reflectance regime overflows the maximum 2147483647, i.e. it is negative when
+ # interpreted as a signed integer. So you have to modify it. Also chanel 1 is treated the same way in AAPP.
+ slope2 = da.where(slope2 < 0, slope2 + 0.4294967296, slope2)
channel = da.where(channel <= intersection[:, None],
channel * slope1[:, None] + intercept1[:, None],
@@ -556,18 +617,22 @@ def _ir_calibrate(header, data, irchn, calib_type, mask=True):
*calib_type* in brightness_temperature, radiance, count
"""
- count = da.from_array(data["hrpt"][:, :, irchn + 2], chunks=(LINE_CHUNK, 2048))
+ channel_data = data["hrpt"][:, :, irchn + 2]
+ chunks = get_aapp_chunks(channel_data.shape)
+ line_chunks = chunks[0]
+
+ count = da.from_array(channel_data, chunks=chunks)
if calib_type == 0:
return count
# Mask unnaturally low values
mask &= count != 0
- count = count.astype(np.float64)
+ count = count.astype(CHANNEL_DTYPE)
- k1_ = da.from_array(data['calir'][:, irchn, 0, 0], chunks=LINE_CHUNK) / 1.0e9
- k2_ = da.from_array(data['calir'][:, irchn, 0, 1], chunks=LINE_CHUNK) / 1.0e6
- k3_ = da.from_array(data['calir'][:, irchn, 0, 2], chunks=LINE_CHUNK) / 1.0e6
+ k1_ = da.from_array(data['calir'][:, irchn, 0, 0], chunks=line_chunks) / 1.0e9
+ k2_ = da.from_array(data['calir'][:, irchn, 0, 1], chunks=line_chunks) / 1.0e6
+ k3_ = da.from_array(data['calir'][:, irchn, 0, 2], chunks=line_chunks) / 1.0e6
# Count to radiance conversion:
rad = k1_[:, None] * count * count + k2_[:, None] * count + k3_[:, None]
diff --git a/satpy/readers/aapp_mhs_amsub_l1c.py b/satpy/readers/aapp_mhs_amsub_l1c.py
new file mode 100644
index 0000000000..f590214953
--- /dev/null
+++ b/satpy/readers/aapp_mhs_amsub_l1c.py
@@ -0,0 +1,214 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2020, 2021, 2022 Pytroll developers
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+
+"""Reader for the AAPP AMSU-B/MHS level-1c data.
+
+https://nwp-saf.eumetsat.int/site/download/documentation/aapp/NWPSAF-MF-UD-003_Formats_v8.0.pdf
+
+"""
+
+import logging
+
+import dask.array as da
+import numpy as np
+
+from satpy import CHUNK_SIZE
+from satpy.readers.aapp_l1b import AAPPL1BaseFileHandler, create_xarray
+
+logger = logging.getLogger(__name__)
+
+
+LINE_CHUNK = CHUNK_SIZE ** 2 // 90
+
+MHS_AMSUB_CHANNEL_NAMES = ['1', '2', '3', '4', '5']
+MHS_AMSUB_ANGLE_NAMES = ['sensor_zenith_angle', 'sensor_azimuth_angle',
+ 'solar_zenith_angle', 'solar_azimuth_difference_angle']
+
+MHS_AMSUB_PLATFORM_IDS2NAMES = {15: 'NOAA-15',
+ 16: 'NOAA-16',
+ 17: 'NOAA-17',
+ 18: 'NOAA-18',
+ 19: 'NOAA-19',
+ 1: 'Metop-B',
+ 2: 'Metop-A',
+ 3: 'Metop-C',
+ 4: 'Metop simulator'}
+
+MHS_AMSUB_PLATFORMS = ['Metop-A', 'Metop-B', 'Metop-C', 'NOAA-18', 'NOAA-19']
+
+
+class MHS_AMSUB_AAPPL1CFile(AAPPL1BaseFileHandler):
+ """Reader for AMSU-B/MHS L1C files created from the AAPP software."""
+
+ def __init__(self, filename, filename_info, filetype_info):
+ """Initialize object information by reading the input file."""
+ super().__init__(filename, filename_info, filetype_info)
+
+ self.channels = {i: None for i in MHS_AMSUB_CHANNEL_NAMES}
+ self.units = {i: 'brightness_temperature' for i in MHS_AMSUB_CHANNEL_NAMES}
+
+ self._channel_names = MHS_AMSUB_CHANNEL_NAMES
+ self._angle_names = MHS_AMSUB_ANGLE_NAMES
+
+ self._set_filedata_layout()
+ self.read()
+
+ self._get_platform_name(MHS_AMSUB_PLATFORM_IDS2NAMES)
+ self._get_sensorname()
+
+ def _set_filedata_layout(self):
+ """Set the file data type/layout."""
+ self._header_offset = HEADER_LENGTH
+ self._scan_type = _SCANTYPE
+ self._header_type = _HEADERTYPE
+
+ def _get_sensorname(self):
+ """Get the sensor name from the header."""
+ if self._header['instrument'][0] == 11:
+ self.sensor = 'amsub'
+ elif self._header['instrument'][0] == 12:
+ self.sensor = 'mhs'
+ else:
+ raise IOError("Sensor neither MHS nor AMSU-B!")
+
+ def get_angles(self, angle_id):
+ """Get sun-satellite viewing angles."""
+ satz = self._data["angles"][:, :, 0] * 1e-2
+ sata = self._data["angles"][:, :, 1] * 1e-2
+
+ sunz = self._data["angles"][:, :, 2] * 1e-2
+ suna = self._data["angles"][:, :, 3] * 1e-2
+
+ name_to_variable = dict(zip(MHS_AMSUB_ANGLE_NAMES, (satz, sata, sunz, suna)))
+ return create_xarray(name_to_variable[angle_id])
+
+ def navigate(self, coordinate_id):
+ """Get the longitudes and latitudes of the scene."""
+ lons, lats = self._get_coordinates_in_degrees()
+ if coordinate_id == 'longitude':
+ return create_xarray(lons)
+ if coordinate_id == 'latitude':
+ return create_xarray(lats)
+
+ raise KeyError("Coordinate {} unknown.".format(coordinate_id))
+
+ def _get_coordinates_in_degrees(self):
+ lons = self._data["latlon"][:, :, 1] * 1e-4
+ lats = self._data["latlon"][:, :, 0] * 1e-4
+ return lons, lats
+
+ def _calibrate_active_channel_data(self, key):
+ """Calibrate active channel data only."""
+ return self.calibrate(key)
+
+ def calibrate(self, dataset_id):
+ """Calibrate the data."""
+ units = {'brightness_temperature': 'K'}
+
+ mask = True
+ idx = ['1', '2', '3', '4', '5'].index(dataset_id['name'])
+
+ ds = create_xarray(
+ _calibrate(self._data, idx,
+ dataset_id['calibration'],
+ mask=mask))
+
+ ds.attrs['units'] = units[dataset_id['calibration']]
+ ds.attrs.update(dataset_id._asdict())
+ return ds
+
+
+def _calibrate(data,
+ chn,
+ calib_type,
+ mask=True):
+ """Calibrate channel data.
+
+ *calib_type* in brightness_temperature.
+
+ """
+ if calib_type not in ['brightness_temperature']:
+ raise ValueError('Calibration ' + calib_type + ' unknown!')
+
+ channel = da.from_array(data["btemps"][:, :, chn] / 100., chunks=(LINE_CHUNK, 90))
+ mask &= channel != 0
+
+ if calib_type == 'counts':
+ return channel
+
+ channel = channel.astype(np.float)
+
+ return da.where(mask, channel, np.nan)
+
+
+HEADER_LENGTH = 1152*4
+
+_HEADERTYPE = np.dtype([("siteid", "S3"),
+ ("cfill_1", "S1"),
+ ("l1bsite", "S3"),
+ ("cfill_2", "S1"),
+ ("versnb", ".
-"""Advanced Geostationary Radiation Imager reader for the Level_1 HDF format.
-
-The files read by this reader are described in the official Real Time Data Service:
-
- http://fy4.nsmc.org.cn/data/en/data/realtime.html
-
-"""
-
-import logging
-import numpy as np
-import xarray as xr
-import dask.array as da
-from datetime import datetime
-from satpy.readers._geos_area import get_area_extent, get_area_definition
-from satpy.readers.hdf5_utils import HDF5FileHandler
-
-logger = logging.getLogger(__name__)
-
-# info of 500 m, 1 km, 2 km and 4 km data
-_resolution_list = [500, 1000, 2000, 4000]
-_COFF_list = [10991.5, 5495.5, 2747.5, 1373.5]
-_CFAC_list = [81865099.0, 40932549.0, 20466274.0, 10233137.0]
-_LOFF_list = [10991.5, 5495.5, 2747.5, 1373.5]
-_LFAC_list = [81865099.0, 40932549.0, 20466274.0, 10233137.0]
-
-PLATFORM_NAMES = {'FY4A': 'FY-4A',
- 'FY4B': 'FY-4B',
- 'FY4C': 'FY-4C'}
-
-
-class HDF_AGRI_L1(HDF5FileHandler):
- """AGRI l1 file handler."""
-
- def __init__(self, filename, filename_info, filetype_info):
- """Init filehandler."""
- super(HDF_AGRI_L1, self).__init__(filename, filename_info, filetype_info)
-
- def get_dataset(self, dataset_id, ds_info):
- """Load a dataset."""
- logger.debug('Reading in get_dataset %s.', dataset_id['name'])
- file_key = ds_info.get('file_key', dataset_id['name'])
- lut_key = ds_info.get('lut_key', dataset_id['name'])
- data = self.get(file_key)
- lut = self.get(lut_key)
- if data.ndim >= 2:
- data = data.rename({data.dims[-2]: 'y', data.dims[-1]: 'x'})
-
- # calibration
- calibration = ds_info['calibration']
-
- if calibration == 'counts':
- data.attrs['units'] = ds_info['units']
- ds_info['valid_range'] = data.attrs['valid_range']
- return data
-
- elif calibration in ['reflectance', 'radiance']:
- logger.debug("Calibrating to reflectances")
- # using the corresponding SCALE and OFFSET
- cal_coef = 'CALIBRATION_COEF(SCALE+OFFSET)'
- num_channel = self.get(cal_coef).shape[0]
-
- if num_channel == 1:
- # only channel_2, resolution = 500 m
- slope = self.get(cal_coef)[0, 0].values
- offset = self.get(cal_coef)[0, 1].values
- else:
- slope = self.get(cal_coef)[int(file_key[-2:])-1, 0].values
- offset = self.get(cal_coef)[int(file_key[-2:])-1, 1].values
-
- data = self.dn2(data, calibration, slope, offset)
-
- if calibration == 'reflectance':
- ds_info['valid_range'] = (data.attrs['valid_range'] * slope + offset) * 100
- else:
- ds_info['valid_range'] = (data.attrs['valid_range'] * slope + offset)
-
- elif calibration == 'brightness_temperature':
- logger.debug("Calibrating to brightness_temperature")
- # the value of dn is the index of brightness_temperature
- data = self.calibrate(data, lut)
- ds_info['valid_range'] = lut.attrs['valid_range']
-
- satname = PLATFORM_NAMES.get(self['/attr/Satellite Name'], self['/attr/Satellite Name'])
- data.attrs.update({'platform_name': satname,
- 'sensor': self['/attr/Sensor Identification Code'].lower(),
- 'orbital_parameters': {
- 'satellite_nominal_latitude': self['/attr/NOMCenterLat'].item(),
- 'satellite_nominal_longitude': self['/attr/NOMCenterLon'].item(),
- 'satellite_nominal_altitude': self['/attr/NOMSatHeight'].item()}})
- data.attrs.update(ds_info)
-
- # remove attributes that could be confusing later
- data.attrs.pop('FillValue', None)
- data.attrs.pop('Intercept', None)
- data.attrs.pop('Slope', None)
-
- data = data.where((data >= min(data.attrs['valid_range'])) &
- (data <= max(data.attrs['valid_range'])))
-
- return data
-
- def get_area_def(self, key):
- """Get the area definition."""
- # Coordination Group for Meteorological Satellites LRIT/HRIT Global Specification
- # https://www.cgms-info.org/documents/cgms-lrit-hrit-global-specification-(v2-8-of-30-oct-2013).pdf
- res = key['resolution']
- pdict = {}
- pdict['coff'] = _COFF_list[_resolution_list.index(res)]
- pdict['loff'] = _LOFF_list[_resolution_list.index(res)]
- pdict['cfac'] = _CFAC_list[_resolution_list.index(res)]
- pdict['lfac'] = _LFAC_list[_resolution_list.index(res)]
- pdict['a'] = self.file_content['/attr/dEA'] * 1E3 # equator radius (m)
- pdict['b'] = pdict['a'] * (1 - 1 / self.file_content['/attr/dObRecFlat']) # polar radius (m)
- pdict['h'] = self.file_content['/attr/NOMSatHeight'] # the altitude of satellite (m)
-
- pdict['ssp_lon'] = self.file_content['/attr/NOMCenterLon']
- pdict['nlines'] = self.file_content['/attr/RegLength']
- pdict['ncols'] = self.file_content['/attr/RegWidth']
-
- pdict['scandir'] = 'S2N'
-
- b500 = ['C02']
- b1000 = ['C01', 'C03']
- b2000 = ['C04', 'C05', 'C06', 'C07']
-
- pdict['a_desc'] = "AGRI {} area".format(self.filename_info['observation_type'])
-
- if (key['name'] in b500):
- pdict['a_name'] = self.filename_info['observation_type']+'_500m'
- pdict['p_id'] = 'FY-4A, 500m'
- elif (key['name'] in b1000):
- pdict['a_name'] = self.filename_info['observation_type']+'_1000m'
- pdict['p_id'] = 'FY-4A, 1000m'
- elif (key['name'] in b2000):
- pdict['a_name'] = self.filename_info['observation_type']+'_2000m'
- pdict['p_id'] = 'FY-4A, 2000m'
- else:
- pdict['a_name'] = self.filename_info['observation_type']+'_4000m'
- pdict['p_id'] = 'FY-4A, 4000m'
-
- pdict['coff'] = pdict['coff'] + 0.5
- pdict['nlines'] = pdict['nlines'] - 1
- pdict['ncols'] = pdict['ncols'] - 1
- pdict['loff'] = (pdict['loff'] - self.file_content['/attr/End Line Number'] + 0.5)
- area_extent = get_area_extent(pdict)
- area_extent = (area_extent[0] + 2000, area_extent[1], area_extent[2] + 2000, area_extent[3])
-
- pdict['nlines'] = pdict['nlines'] + 1
- pdict['ncols'] = pdict['ncols'] + 1
- area = get_area_definition(pdict, area_extent)
-
- return area
-
- def dn2(self, dn, calibration, slope, offset):
- """Convert digital number (DN) to reflectance or radiance.
-
- Args:
- dn: Raw detector digital number
- slope: Slope
- offset: Offset
-
- Returns:
- Reflectance [%]
- or Radiance [mW/ (m2 cm-1 sr)]
- """
- ref = dn * slope + offset
- if calibration == 'reflectance':
- ref *= 100 # set unit to %
- ref = ref.clip(min=0)
- ref.attrs = dn.attrs
-
- return ref
-
- @staticmethod
- def _getitem(block, lut):
- return lut[block]
-
- def calibrate(self, data, lut):
- """Calibrate digital number (DN) to brightness_temperature.
-
- Args:
- dn: Raw detector digital number
- lut: the look up table
- Returns:
- brightness_temperature [K]
- """
- # append nan to the end of lut for fillvalue
- lut = np.append(lut, np.nan)
- data.data = da.where(data.data > lut.shape[0], lut.shape[0] - 1, data.data)
- res = data.data.map_blocks(self._getitem, lut, dtype=lut.dtype)
- res = xr.DataArray(res, dims=data.dims,
- attrs=data.attrs, coords=data.coords)
-
- return res
-
- @property
- def start_time(self):
- """Get the start time."""
- start_time = self['/attr/Observing Beginning Date'] + 'T' + self['/attr/Observing Beginning Time'] + 'Z'
- return datetime.strptime(start_time, '%Y-%m-%dT%H:%M:%S.%fZ')
-
- @property
- def end_time(self):
- """Get the end time."""
- end_time = self['/attr/Observing Ending Date'] + 'T' + self['/attr/Observing Ending Time'] + 'Z'
- return datetime.strptime(end_time, '%Y-%m-%dT%H:%M:%S.%fZ')
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2022 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Advanced Geostationary Radiation Imager reader for the Level_1 HDF format.
+
+The files read by this reader are described in the official Real Time Data Service:
+
+ http://fy4.nsmc.org.cn/data/en/data/realtime.html
+
+"""
+
+import logging
+
+from satpy.readers.fy4_base import FY4Base
+
+logger = logging.getLogger(__name__)
+
+
+class HDF_AGRI_L1(FY4Base):
+ """AGRI l1 file handler."""
+
+ def __init__(self, filename, filename_info, filetype_info):
+ """Init filehandler."""
+ super(HDF_AGRI_L1, self).__init__(filename, filename_info, filetype_info)
+ self.sensor = 'AGRI'
+
+ def get_dataset(self, dataset_id, ds_info):
+ """Load a dataset."""
+ ds_name = dataset_id['name']
+ logger.debug('Reading in get_dataset %s.', ds_name)
+ file_key = ds_info.get('file_key', ds_name)
+ if self.PLATFORM_ID == 'FY-4B':
+ if self.CHANS_ID in file_key:
+ file_key = f'Data/{file_key}'
+ elif self.SUN_ID in file_key or self.SAT_ID in file_key:
+ file_key = f'Navigation/{file_key}'
+ data = self.get(file_key)
+ if data.ndim >= 2:
+ data = data.rename({data.dims[-2]: 'y', data.dims[-1]: 'x'})
+ data = self.calibrate(data, ds_info, ds_name, file_key)
+
+ self.adjust_attrs(data, ds_info)
+
+ return data
+
+ def adjust_attrs(self, data, ds_info):
+ """Adjust the attrs of the data."""
+ satname = self.PLATFORM_NAMES.get(self['/attr/Satellite Name'], self['/attr/Satellite Name'])
+ data.attrs.update({'platform_name': satname,
+ 'sensor': self['/attr/Sensor Identification Code'].lower(),
+ 'orbital_parameters': {
+ 'satellite_nominal_latitude': self['/attr/NOMCenterLat'].item(),
+ 'satellite_nominal_longitude': self['/attr/NOMCenterLon'].item(),
+ 'satellite_nominal_altitude': self['/attr/NOMSatHeight'].item()}})
+ data.attrs.update(ds_info)
+ # remove attributes that could be confusing later
+ data.attrs.pop('FillValue', None)
+ data.attrs.pop('Intercept', None)
+ data.attrs.pop('Slope', None)
diff --git a/satpy/readers/ahi_hsd.py b/satpy/readers/ahi_hsd.py
index 0cec4b22ed..95f81569ac 100644
--- a/satpy/readers/ahi_hsd.py
+++ b/satpy/readers/ahi_hsd.py
@@ -24,31 +24,61 @@
Time Information
****************
-AHI observations use the idea of a "scheduled" time and an "observation time.
-The "scheduled" time is when the instrument was told to record the data,
-usually at a specific and consistent interval. The "observation" time is when
-the data was actually observed. Scheduled time can be accessed from the
-`scheduled_time` metadata key and observation time from the `start_time` key.
+AHI observations use the idea of a "nominal" time and an "observation" time.
+The "nominal" time or repeat cycle is the overall window when the instrument
+can record data, usually at a specific and consistent interval. The
+"observation" time is when the data was actually observed inside the nominal
+window. These two times are stored in a sub-dictionary in the metadata calls
+``time_parameters``. Nominal time can be accessed from the
+``nominal_start_time`` and ``nominal_end_time`` metadata keys and
+observation time from the ``observation_start_time`` and
+``observation_end_time`` keys. Observation time can also be accessed from the
+parent (``.attrs``) dictionary as the ``start_time`` and ``end_time`` keys.
+
+Satellite Position
+******************
+
+As discussed in the :ref:`orbital_parameters` documentation, a satellite
+position can be described by a specific "actual" position, a "nominal"
+position, a "projection" position, or sometimes a "nadir" position. Not all
+readers are able to produce all of these positions. In the case of AHI HSD data
+we have an "actual" and "projection" position. For a lot of sensors/readers
+though, the "actual" position values do not change between bands or segments
+of the same time step (repeat cycle). AHI HSD files contain varying values for
+the actual position.
+
+Other components in Satpy use this actual satellite
+position to generate other values (ex. sensor zenith angles). If these values
+are not consistent between bands then Satpy (dask) will not be able to share
+these calculations (generate one sensor zenith angle for band 1, another for
+band 2, etc) even though there is rarely a noticeable difference. To deal with
+this this reader has an option ``round_actual_position`` that defaults to
+``True`` and will round the "actual" position (longitude, latitude, altitude)
+in a way to produce as consistent a position between bands as possible.
"""
import logging
+import os
+import warnings
from datetime import datetime, timedelta
-import numpy as np
import dask.array as da
+import numpy as np
import xarray as xr
-import warnings
-import os
from satpy import CHUNK_SIZE
-from satpy.readers.file_handlers import BaseFileHandler
-from satpy.readers.utils import unzip_file, get_geostationary_mask, \
- np2str, get_earth_radius, \
- get_user_calibration_factors, \
- apply_rad_correction
-from satpy.readers._geos_area import get_area_extent, get_area_definition
from satpy._compat import cached_property
+from satpy.readers._geos_area import get_area_definition, get_area_extent
+from satpy.readers.file_handlers import BaseFileHandler
+from satpy.readers.utils import (
+ apply_rad_correction,
+ get_earth_radius,
+ get_geostationary_mask,
+ get_user_calibration_factors,
+ np2str,
+ unzip_file,
+)
AHI_CHANNEL_NAMES = ("1", "2", "3", "4", "5",
"6", "7", "8", "9", "10",
@@ -200,13 +230,25 @@
("numof_correction_info_data", " 23:
+ return False
+ return True
+
+ def _modify_observation_time_for_nominal(self, observation_time):
+ """Round observation time to a nominal time based on known observation frequency.
+
+ AHI observations are split into different sectors including Full Disk
+ (FLDK), Japan (JP) sectors, and smaller regional (R) sectors. Each
+ sector is observed at different frequencies (ex. every 10 minutes,
+ every 2.5 minutes, and every 30 seconds). This method will take the
+ actual observation time and round it to the nearest interval for this
+ sector. So if the observation time is 13:32:48 for the "JP02" sector
+ which is the second Japan observation where every Japan observation is
+ 2.5 minutes apart, then the result should be 13:32:30.
+
+ """
timeline = "{:04d}".format(self.basic_info['observation_timeline'][0])
+ if not self._is_valid_timeline(timeline):
+ warnings.warn("Observation timeline is fill value, not rounding observation time.")
+ return observation_time
+
if self.observation_area == 'FLDK':
dt = 0
else:
- observation_freq = {'JP': 150, 'R3': 150, 'R4': 30, 'R5': 30}[self.observation_area[:2]]
- dt = observation_freq * (int(self.observation_area[2:]) - 1)
- return self.start_time.replace(hour=int(timeline[:2]), minute=int(timeline[2:4]) + dt//60,
- second=dt % 60, microsecond=0)
+ observation_frequency_seconds = {'JP': 150, 'R3': 150, 'R4': 30, 'R5': 30}[self.observation_area[:2]]
+ dt = observation_frequency_seconds * (int(self.observation_area[2:]) - 1)
+
+ return observation_time.replace(
+ hour=int(timeline[:2]), minute=int(timeline[2:4]) + dt//60,
+ second=dt % 60, microsecond=0)
def get_dataset(self, key, info):
"""Get the dataset."""
@@ -421,7 +512,7 @@ def _get_area_def(self):
def _check_fpos(self, fp_, fpos, offset, block):
"""Check file position matches blocksize."""
if fp_.tell() + offset != fpos:
- warnings.warn("Actual "+block+" header size does not match expected")
+ warnings.warn(f"Actual {block} header size does not match expected")
return
def _read_header(self, fp_):
@@ -476,31 +567,22 @@ def _read_header(self, fp_):
fp_, dtype=_NAVIGATION_CORRECTION_INFO_TYPE, count=1)
# 8 The navigation corrections:
ncorrs = header["block8"]['numof_correction_info_data'][0]
- dtype = np.dtype([
- ("line_number_after_rotation", "Refl/BT conversion."""
- import tempfile
- import shutil
import pathlib
+ import shutil
+ import tempfile
# Check that the LUT directory exists
pathlib.Path(self.lut_dir).mkdir(parents=True, exist_ok=True)
@@ -180,7 +178,7 @@ def _get_luts(self):
# The file is tarred, untar and remove the downloaded file
self._untar_luts(fname, tempdir)
- lut_dl_dir = os.path.join(tempdir, 'count2tbb/')
+ lut_dl_dir = os.path.join(tempdir, 'count2tbb_v102/')
# Loop over the LUTs and copy to the correct location
for lutfile in AHI_LUT_NAMES:
@@ -249,12 +247,9 @@ def calibrate(self, data, calib):
"""Calibrate the data."""
if calib == 'counts':
return data
- elif calib == 'reflectance' or calib == 'brightness_temperature':
- data = self._calibrate(data)
- else:
- raise NotImplementedError("ERROR: Unsupported calibration.",
- "Only counts, reflectance and ",
- "brightness_temperature calibration",
- "are supported.")
-
- return data
+ if calib == 'reflectance' or calib == 'brightness_temperature':
+ return self._calibrate(data)
+ raise NotImplementedError("ERROR: Unsupported calibration.",
+ "Only counts, reflectance and ",
+ "brightness_temperature calibration",
+ "are supported.")
diff --git a/satpy/readers/ami_l1b.py b/satpy/readers/ami_l1b.py
index a2de3c79f3..2df90a7d98 100644
--- a/satpy/readers/ami_l1b.py
+++ b/satpy/readers/ami_l1b.py
@@ -20,16 +20,16 @@
import logging
from datetime import datetime, timedelta
-import numpy as np
-import xarray as xr
import dask.array as da
+import numpy as np
import pyproj
+import xarray as xr
+from pyspectral.blackbody import blackbody_wn_rad2temp as rad2temp
-from satpy.readers.utils import get_user_calibration_factors, apply_rad_correction
+from satpy import CHUNK_SIZE
from satpy.readers._geos_area import get_area_definition, get_area_extent
-from pyspectral.blackbody import blackbody_wn_rad2temp as rad2temp
from satpy.readers.file_handlers import BaseFileHandler
-from satpy import CHUNK_SIZE
+from satpy.readers.utils import apply_rad_correction, get_user_calibration_factors
logger = logging.getLogger(__name__)
diff --git a/satpy/readers/amsr2_l1b.py b/satpy/readers/amsr2_l1b.py
index 1f40014d9b..bd3a35c05d 100644
--- a/satpy/readers/amsr2_l1b.py
+++ b/satpy/readers/amsr2_l1b.py
@@ -32,7 +32,7 @@ def get_metadata(self, ds_id, ds_info):
"shape": self.get_shape(ds_id, ds_info),
"units": self[var_path + "/attr/UNIT"],
"platform_name": self["/attr/PlatformShortName"],
- "sensor": self["/attr/SensorShortName"],
+ "sensor": self["/attr/SensorShortName"].lower(),
"start_orbit": int(self["/attr/StartOrbitNumber"]),
"end_orbit": int(self["/attr/StopOrbitNumber"]),
})
diff --git a/satpy/readers/amsr2_l2_gaasp.py b/satpy/readers/amsr2_l2_gaasp.py
index f8e603435b..dad9bcdfb5 100644
--- a/satpy/readers/amsr2_l2_gaasp.py
+++ b/satpy/readers/amsr2_l2_gaasp.py
@@ -38,15 +38,16 @@
import logging
from datetime import datetime
+from typing import Tuple
import numpy as np
import xarray as xr
from pyproj import CRS
from pyresample.geometry import AreaDefinition
-from satpy.readers.file_handlers import BaseFileHandler
-from satpy._compat import cached_property
from satpy import CHUNK_SIZE
+from satpy._compat import cached_property
+from satpy.readers.file_handlers import BaseFileHandler
logger = logging.getLogger(__name__)
@@ -54,10 +55,10 @@
class GAASPFileHandler(BaseFileHandler):
"""Generic file handler for GAASP output files."""
- y_dims = (
+ y_dims: Tuple[str, ...] = (
'Number_of_Scans',
)
- x_dims = (
+ x_dims: Tuple[str, ...] = (
'Number_of_hi_rez_FOVs',
'Number_of_low_rez_FOVs',
)
@@ -134,9 +135,9 @@ def _nan_for_dtype(data_arr_dtype):
# if we don't have to
if data_arr_dtype.type == np.float32:
return np.float32(np.nan)
- elif np.issubdtype(data_arr_dtype, np.timedelta64):
+ if np.issubdtype(data_arr_dtype, np.timedelta64):
return np.timedelta64('NaT')
- elif np.issubdtype(data_arr_dtype, np.datetime64):
+ if np.issubdtype(data_arr_dtype, np.datetime64):
return np.datetime64('NaT')
return np.nan
diff --git a/satpy/readers/ascat_l2_soilmoisture_bufr.py b/satpy/readers/ascat_l2_soilmoisture_bufr.py
index 980570b5fe..b3e234f3e8 100644
--- a/satpy/readers/ascat_l2_soilmoisture_bufr.py
+++ b/satpy/readers/ascat_l2_soilmoisture_bufr.py
@@ -23,9 +23,10 @@
import logging
from datetime import datetime
+
+import dask.array as da
import numpy as np
import xarray as xr
-import dask.array as da
try:
import eccodes as ec
@@ -34,8 +35,8 @@
"""Missing eccodes-python and/or eccodes C-library installation. Use conda to install eccodes.
Error: """, e)
-from satpy.readers.file_handlers import BaseFileHandler
from satpy import CHUNK_SIZE
+from satpy.readers.file_handlers import BaseFileHandler
logger = logging.getLogger('AscatSoilMoistureBufr')
diff --git a/satpy/readers/atms_l1b_nc.py b/satpy/readers/atms_l1b_nc.py
new file mode 100644
index 0000000000..1ea61fe92c
--- /dev/null
+++ b/satpy/readers/atms_l1b_nc.py
@@ -0,0 +1,121 @@
+# Copyright (c) 2022 Satpy developers
+#
+# satpy is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# satpy is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with satpy. If not, see .
+"""Advanced Technology Microwave Sounder (ATMS) Level 1B product reader.
+
+The format is explained in the `ATMS L1B Product User Guide`_
+
+.. _`ATMS L1B Product User Guide`:
+ https://docserver.gesdisc.eosdis.nasa.gov/public/project/Sounder/ATMS_V3_L1B_Product_User_Guide.pdf
+
+"""
+
+import logging
+from datetime import datetime
+
+from satpy.readers.netcdf_utils import NetCDF4FileHandler
+
+logger = logging.getLogger(__name__)
+
+DATE_FMT = '%Y-%m-%dT%H:%M:%SZ'
+
+
+class AtmsL1bNCFileHandler(NetCDF4FileHandler):
+ """Reader class for ATMS L1B products in netCDF format."""
+
+ def __init__(self, filename, filename_info, filetype_info, **kwargs):
+ """Initialize file handler."""
+ super().__init__(
+ filename, filename_info, filetype_info, auto_maskandscale=True,
+ )
+
+ @property
+ def start_time(self):
+ """Get observation start time."""
+ return datetime.strptime(self['/attr/time_coverage_start'], DATE_FMT)
+
+ @property
+ def end_time(self):
+ """Get observation end time."""
+ return datetime.strptime(self['/attr/time_coverage_end'], DATE_FMT)
+
+ @property
+ def platform_name(self):
+ """Get platform name."""
+ return self["/attr/platform"]
+
+ @property
+ def sensor(self):
+ """Get sensor."""
+ return self["/attr/instrument"]
+
+ @property
+ def antenna_temperature(self):
+ """Get antenna temperature."""
+ file_key = self.filetype_info["antenna_temperature"]
+ return self[file_key]
+
+ @property
+ def attrs(self):
+ """Return attributes."""
+ return {
+ "filename": self.filename,
+ "start_time": self.start_time,
+ "end_time": self.end_time,
+ "platform_name": self.platform_name,
+ "sensor": self.sensor,
+ }
+
+ @staticmethod
+ def _standardize_dims(dataset):
+ """Standardize dims to y, x."""
+ if "atrack" in dataset.dims:
+ dataset = dataset.rename({"atrack": "y"})
+ if "xtrack" in dataset.dims:
+ dataset = dataset.rename({"xtrack": "x"})
+ if dataset.dims[0] == "x":
+ dataset = dataset.transpose("y", "x")
+ return dataset
+
+ @staticmethod
+ def _drop_coords(dataset):
+ """Drop coords that are not in dims."""
+ for coord in dataset.coords:
+ if coord not in dataset.dims:
+ dataset = dataset.drop_vars(coord)
+ return dataset
+
+ def _merge_attributes(self, dataset, dataset_info):
+ """Merge attributes of the dataset."""
+ dataset.attrs.update(self.filename_info)
+ dataset.attrs.update(dataset_info)
+ dataset.attrs.update(self.attrs)
+ return dataset
+
+ def _select_dataset(self, name):
+ """Select dataset."""
+ try:
+ idx = int(name) - 1
+ return self.antenna_temperature[:, :, idx]
+ except ValueError:
+ return self[name]
+
+ def get_dataset(self, dataset_id, ds_info):
+ """Get dataset."""
+ name = dataset_id['name']
+ logger.debug(f'Reading in file to get dataset with name {name}.')
+ dataset = self._select_dataset(name)
+ dataset = self._merge_attributes(dataset, ds_info)
+ dataset = self._drop_coords(dataset)
+ return self._standardize_dims(dataset)
diff --git a/satpy/readers/avhrr_l1b_gaclac.py b/satpy/readers/avhrr_l1b_gaclac.py
index 3cf7f10760..449eb97136 100644
--- a/satpy/readers/avhrr_l1b_gaclac.py
+++ b/satpy/readers/avhrr_l1b_gaclac.py
@@ -29,13 +29,13 @@
import dask.array as da
import numpy as np
-
import pygac.utils
import xarray as xr
from pygac.gac_klm import GACKLMReader
from pygac.gac_pod import GACPODReader
from pygac.lac_klm import LACKLMReader
from pygac.lac_pod import LACPODReader
+
from satpy import CHUNK_SIZE
from satpy.readers.file_handlers import BaseFileHandler
diff --git a/satpy/readers/caliop_l2_cloud.py b/satpy/readers/caliop_l2_cloud.py
index b8e9baae72..0fc89ae548 100644
--- a/satpy/readers/caliop_l2_cloud.py
+++ b/satpy/readers/caliop_l2_cloud.py
@@ -15,13 +15,14 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+# type: ignore
"""Interface to CALIOP L2 HDF4 cloud products."""
import logging
import os.path
import re
-
from datetime import datetime
+
from pyhdf.SD import SD, SDC
from satpy.dataset import Dataset
diff --git a/satpy/readers/clavrx.py b/satpy/readers/clavrx.py
index 78d4e3e05c..d9a8dee055 100644
--- a/satpy/readers/clavrx.py
+++ b/satpy/readers/clavrx.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
-# Copyright (c) 2017 Satpy developers
+# Copyright (c) 2021 Satpy developers
#
# This file is part of satpy.
#
@@ -17,14 +17,21 @@
# satpy. If not, see .
"""Interface to CLAVR-X HDF4 products."""
-import os
import logging
-import numpy as np
-import netCDF4
+import os
from glob import glob
-from satpy.readers.hdf4_utils import HDF4FileHandler, SDS
+from pathlib import Path
+from typing import Optional
+
+import netCDF4
+import numpy as np
+import xarray as xr
from pyresample import geometry
+from satpy import CHUNK_SIZE
+from satpy.readers.file_handlers import BaseFileHandler
+from satpy.readers.hdf4_utils import SDS, HDF4FileHandler
+
LOG = logging.getLogger(__name__)
@@ -32,181 +39,112 @@
'none': '1',
}
+SENSORS = {
+ 'MODIS': 'modis',
+ 'VIIRS': 'viirs',
+ 'AVHRR': 'avhrr',
+ 'AHI': 'ahi',
+ 'ABI': 'abi',
+}
+PLATFORMS = {
+ 'SNPP': 'npp',
+ 'HIM8': 'himawari8',
+ 'HIM9': 'himawari9',
+ 'H08': 'himawari8',
+ 'H09': 'himawari9',
+ 'G16': 'GOES-16',
+ 'G17': 'GOES-17'
+}
+ROWS_PER_SCAN = {
+ 'viirs': 16,
+ 'modis': 10,
+}
+NADIR_RESOLUTION = {
+ 'viirs': 742,
+ 'modis': 1000,
+ 'avhrr': 1050,
+ 'ahi': 2000,
+ 'abi': 2004,
+}
-class CLAVRXFileHandler(HDF4FileHandler):
- """A file handler for CLAVRx files."""
- sensors = {
- 'MODIS': 'modis',
- 'VIIRS': 'viirs',
- 'AVHRR': 'avhrr',
- 'AHI': 'ahi',
- # 'ABI': 'abi',
- }
- platforms = {
- 'SNPP': 'npp',
- 'HIM8': 'himawari8',
- 'HIM9': 'himawari9',
- 'H08': 'himawari8',
- 'H09': 'himawari9',
- # 'G16': 'GOES-16',
- # 'G17': 'GOES-17'
- }
- rows_per_scan = {
- 'viirs': 16,
- 'modis': 10,
- }
- nadir_resolution = {
- 'viirs': 742,
- 'modis': 1000,
- 'avhrr': 1050,
- 'ahi': 2000,
- # 'abi': 2004,
- }
-
- def get_sensor(self, sensor):
- """Get the sensor."""
- for k, v in self.sensors.items():
- if k in sensor:
- return v
- raise ValueError("Unknown sensor '{}'".format(sensor))
+def _get_sensor(sensor: str) -> str:
+ """Get the sensor."""
+ for k, v in SENSORS.items():
+ if k in sensor:
+ return v
+ raise ValueError("Unknown sensor '{}'".format(sensor))
- def get_platform(self, platform):
- """Get the platform."""
- for k, v in self.platforms.items():
- if k in platform:
- return v
- return platform
- def get_rows_per_scan(self, sensor):
- """Get number of rows per scan."""
- for k, v in self.rows_per_scan.items():
- if sensor.startswith(k):
- return v
+def _get_platform(platform: str) -> str:
+ """Get the platform."""
+ for k, v in PLATFORMS.items():
+ if k in platform:
+ return v
+ return platform
- def get_nadir_resolution(self, sensor):
- """Get nadir resolution."""
- for k, v in self.nadir_resolution.items():
- if sensor.startswith(k):
- return v
- res = self.filename_info.get('resolution')
- if res.endswith('m'):
- return int(res[:-1])
- elif res is not None:
- return int(res)
- @property
- def start_time(self):
- """Get the start time."""
- return self.filename_info['start_time']
+def _get_rows_per_scan(sensor: str) -> Optional[int]:
+ """Get number of rows per scan."""
+ for k, v in ROWS_PER_SCAN.items():
+ if sensor.startswith(k):
+ return v
+ return None
- @property
- def end_time(self):
- """Get the end time."""
- return self.filename_info.get('end_time', self.start_time)
- def available_datasets(self, configured_datasets=None):
- """Automatically determine datasets provided by this file."""
- sensor = self.get_sensor(self['/attr/sensor'])
- nadir_resolution = self.get_nadir_resolution(sensor)
- coordinates = ('longitude', 'latitude')
- handled_variables = set()
+def _remove_attributes(attrs: dict) -> dict:
+ """Remove attributes that described data before scaling."""
+ old_attrs = ['unscaled_missing', 'SCALED_MIN', 'SCALED_MAX',
+ 'SCALED_MISSING']
- # update previously configured datasets
- for is_avail, ds_info in (configured_datasets or []):
- this_res = ds_info.get('resolution')
- this_coords = ds_info.get('coordinates')
- # some other file handler knows how to load this
- if is_avail is not None:
- yield is_avail, ds_info
+ for attr_key in old_attrs:
+ attrs.pop(attr_key, None)
+ return attrs
- var_name = ds_info.get('file_key', ds_info['name'])
- matches = self.file_type_matches(ds_info['file_type'])
- # we can confidently say that we can provide this dataset and can
- # provide more info
- if matches and var_name in self and this_res != nadir_resolution:
- handled_variables.add(var_name)
- new_info = ds_info.copy() # don't mess up the above yielded
- new_info['resolution'] = nadir_resolution
- if self._is_polar() and this_coords is None:
- new_info['coordinates'] = coordinates
- yield True, new_info
- elif is_avail is None:
- # if we didn't know how to handle this dataset and no one else did
- # then we should keep it going down the chain
- yield is_avail, ds_info
-
- # add new datasets
- for var_name, val in self.file_content.items():
- if isinstance(val, SDS):
- ds_info = {
- 'file_type': self.filetype_info['file_type'],
- 'resolution': nadir_resolution,
- 'name': var_name,
- }
- if self._is_polar():
- ds_info['coordinates'] = ['longitude', 'latitude']
- yield True, ds_info
-
- def get_shape(self, dataset_id, ds_info):
- """Get the shape."""
- var_name = ds_info.get('file_key', dataset_id['name'])
- return self[var_name + '/shape']
- def get_metadata(self, data_arr, ds_info):
- """Get metadata."""
- i = {}
- i.update(data_arr.attrs)
- i.update(ds_info)
-
- flag_meanings = i.get('flag_meanings')
- if not i.get('SCALED', 1) and not flag_meanings:
- i['flag_meanings'] = ''
- i.setdefault('flag_values', [None])
+class _CLAVRxHelper:
+ """A base class for the CLAVRx File Handlers."""
- u = i.get('units')
- if u in CF_UNITS:
- # CF compliance
- i['units'] = CF_UNITS[u]
-
- i['sensor'] = sensor = self.get_sensor(self['/attr/sensor'])
- platform = self.get_platform(self['/attr/platform'])
- i['platform'] = i['platform_name'] = platform
- i['resolution'] = i.get('resolution') or self.get_nadir_resolution(i['sensor'])
- rps = self.get_rows_per_scan(sensor)
- if rps:
- i['rows_per_scan'] = rps
- i['reader'] = 'clavrx'
-
- return i
+ @staticmethod
+ def _scale_data(data_arr: xr.DataArray, scale_factor: float, add_offset: float) -> xr.DataArray:
+ """Scale data, if needed."""
+ scaling_needed = not (scale_factor == 1.0 and add_offset == 0.0)
+ if scaling_needed:
+ data_arr = data_arr * scale_factor + add_offset
+ return data_arr
- def get_dataset(self, dataset_id, ds_info):
+ @staticmethod
+ def _get_data(data: xr.DataArray, dataset_id: dict) -> xr.DataArray:
"""Get a dataset."""
- var_name = ds_info.get('file_key', dataset_id['name'])
- data = self[var_name]
- if dataset_id['resolution']:
+ if dataset_id.get('resolution'):
data.attrs['resolution'] = dataset_id['resolution']
- data.attrs = self.get_metadata(data, ds_info)
- fill = data.attrs.pop('_FillValue', None)
- factor = data.attrs.pop('scale_factor', None)
- offset = data.attrs.pop('add_offset', None)
- valid_range = data.attrs.pop('valid_range', None)
-
- if factor is not None and offset is not None:
- def scale_inplace(data):
- data *= factor
- data += offset
- return data
- else:
- def scale_inplace(data):
- return data
-
- data = data.where(data != fill)
- scale_inplace(data)
- if valid_range is not None:
- valid_min, valid_max = scale_inplace(valid_range[0]), scale_inplace(valid_range[1])
- data = data.where((data >= valid_min) & (data <= valid_max))
- data.attrs['valid_min'], data.attrs['valid_max'] = valid_min, valid_max
+
+ attrs = data.attrs.copy()
+
+ fill = attrs.get('_FillValue')
+ factor = attrs.pop('scale_factor', (np.ones(1, dtype=data.dtype))[0])
+ offset = attrs.pop('add_offset', (np.zeros(1, dtype=data.dtype))[0])
+ valid_range = attrs.get('valid_range', [None])
+ if isinstance(valid_range, np.ndarray):
+ attrs["valid_range"] = valid_range.tolist()
+
+ flags = not data.attrs.get("SCALED", 1) and any(data.attrs.get("flag_values", [None]))
+ if not flags:
+ data = data.where(data != fill)
+ data = _CLAVRxHelper._scale_data(data, factor, offset)
+ # don't need _FillValue if it has been applied.
+ attrs.pop('_FillValue', None)
+
+ if all(valid_range):
+ valid_min = _CLAVRxHelper._scale_data(valid_range[0], factor, offset)
+ valid_max = _CLAVRxHelper._scale_data(valid_range[1], factor, offset)
+ if flags:
+ data = data.where((data >= valid_min) & (data <= valid_max), fill)
+ else:
+ data = data.where((data >= valid_min) & (data <= valid_max))
+ attrs['valid_range'] = [valid_min, valid_max]
+
+ data.attrs = _remove_attributes(attrs)
return data
@@ -224,7 +162,7 @@ def _area_extent(x, y, h):
return area_extent, ncols, nlines
@staticmethod
- def _read_pug_fixed_grid(projection, distance_multiplier=1.0):
+ def _read_pug_fixed_grid(projection, distance_multiplier=1.0) -> dict:
"""Read from recent PUG format, where axes are in meters."""
a = projection.semi_major_axis
h = projection.perspective_point_height
@@ -242,8 +180,14 @@ def _read_pug_fixed_grid(projection, distance_multiplier=1.0):
'sweep': sweep_axis}
return proj_dict
- def _find_input_nc(self, l1b_base):
- dirname = os.path.split(self.filename)[0]
+ @staticmethod
+ def _find_input_nc(filename: str, l1b_base: str) -> str:
+ file_path = Path(filename)
+ dirname = file_path.parent
+ l1b_filename = dirname.joinpath(l1b_base + '.nc')
+ if l1b_filename.exists():
+ return str(l1b_filename)
+
glob_pat = os.path.join(dirname, l1b_base + '*R20*.nc')
LOG.debug("searching for {0}".format(glob_pat))
l1b_filenames = list(glob(glob_pat))
@@ -253,7 +197,8 @@ def _find_input_nc(self, l1b_base):
LOG.debug('Candidate nav donors: {0}'.format(repr(l1b_filenames)))
return l1b_filenames[0]
- def _read_axi_fixed_grid(self, l1b_attr):
+ @staticmethod
+ def _read_axi_fixed_grid(filename: str, l1b_attr) -> geometry.AreaDefinition:
"""Read a fixed grid.
CLAVR-x does not transcribe fixed grid parameters to its output
@@ -268,7 +213,7 @@ def _read_axi_fixed_grid(self, l1b_attr):
"""
LOG.debug("looking for corresponding input file for {0}"
" to act as fixed grid navigation donor".format(l1b_attr))
- l1b_path = self._find_input_nc(l1b_attr)
+ l1b_path = _CLAVRxHelper._find_input_nc(filename, l1b_attr)
LOG.info("Since CLAVR-x does not include fixed-grid parameters,"
" using input file {0} as donor".format(l1b_path))
l1b = netCDF4.Dataset(l1b_path)
@@ -277,19 +222,19 @@ def _read_axi_fixed_grid(self, l1b_attr):
if proj_var is not None:
# hsd2nc input typically used by CLAVR-x uses old-form km for axes/height
LOG.debug("found hsd2nc-style draft PUG fixed grid specification")
- proj = self._read_pug_fixed_grid(proj_var, 1000.0)
+ proj = _CLAVRxHelper._read_pug_fixed_grid(proj_var, 1000.0)
if proj is None: # most likely to come into play for ABI cases
proj_var = l1b.variables.get("goes_imager_projection", None)
if proj_var is not None:
LOG.debug("found cmip-style final PUG fixed grid specification")
- proj = self._read_pug_fixed_grid(proj_var)
+ proj = _CLAVRxHelper._read_pug_fixed_grid(proj_var)
if not proj:
raise ValueError("Unable to recover projection information"
- " for {0}".format(self.filename))
+ " for {0}".format(filename))
h = float(proj['h'])
x, y = l1b['x'], l1b['y']
- area_extent, ncols, nlines = self._area_extent(x, y, h)
+ area_extent, ncols, nlines = _CLAVRxHelper._area_extent(x, y, h)
# LOG.debug(repr(proj))
# LOG.debug(repr(area_extent))
@@ -305,16 +250,238 @@ def _read_axi_fixed_grid(self, l1b_attr):
return area
+ @staticmethod
+ def get_metadata(sensor, platform, attrs: dict, ds_info: dict) -> dict:
+ """Get metadata."""
+ i = {}
+ i.update(attrs)
+ i.update(ds_info)
+
+ flag_meanings = i.get('flag_meanings', None)
+ if not i.get('SCALED', 1) and not flag_meanings:
+ i['flag_meanings'] = ''
+ i.setdefault('flag_values', [None])
+ u = i.get('units')
+ if u in CF_UNITS:
+ # CF compliance
+ i['units'] = CF_UNITS[u]
+ if u.lower() == "none":
+ i['units'] = "1"
+ i['sensor'] = sensor
+ i['platform_name'] = platform
+ rps = _get_rows_per_scan(sensor)
+ if rps:
+ i['rows_per_scan'] = rps
+ i['reader'] = 'clavrx'
+
+ return i
+
+
+class CLAVRXHDF4FileHandler(HDF4FileHandler, _CLAVRxHelper):
+ """A file handler for CLAVRx files."""
+
+ def __init__(self, filename, filename_info, filetype_info):
+ """Init method."""
+ super(CLAVRXHDF4FileHandler, self).__init__(filename,
+ filename_info,
+ filetype_info)
+
+ @property
+ def start_time(self):
+ """Get the start time."""
+ return self.filename_info['start_time']
+
+ @property
+ def end_time(self):
+ """Get the end time."""
+ return self.filename_info.get('end_time', self.start_time)
+
+ def get_dataset(self, dataset_id, ds_info):
+ """Get a dataset."""
+ var_name = ds_info.get('file_key', dataset_id['name'])
+ data = self[var_name]
+ data = _CLAVRxHelper._get_data(data, dataset_id)
+ data.attrs = _CLAVRxHelper.get_metadata(self.sensor, self.platform,
+ data.attrs, ds_info)
+ return data
+
+ def get_nadir_resolution(self, sensor):
+ """Get nadir resolution."""
+ for k, v in NADIR_RESOLUTION.items():
+ if sensor.startswith(k):
+ return v
+ res = self.filename_info.get('resolution')
+ if res.endswith('m'):
+ return int(res[:-1])
+ elif res is not None:
+ return int(res)
+
+ def available_datasets(self, configured_datasets=None):
+ """Automatically determine datasets provided by this file."""
+ self.sensor = _get_sensor(self.file_content.get('/attr/sensor'))
+ self.platform = _get_platform(self.file_content.get('/attr/platform'))
+
+ nadir_resolution = self.get_nadir_resolution(self.sensor)
+ coordinates = ('longitude', 'latitude')
+ handled_variables = set()
+
+ # update previously configured datasets
+ for is_avail, ds_info in (configured_datasets or []):
+ this_res = ds_info.get('resolution')
+ this_coords = ds_info.get('coordinates')
+ # some other file handler knows how to load this
+ if is_avail is not None:
+ yield is_avail, ds_info
+
+ var_name = ds_info.get('file_key', ds_info['name'])
+ matches = self.file_type_matches(ds_info['file_type'])
+ # we can confidently say that we can provide this dataset and can
+ # provide more info
+ if matches and var_name in self and this_res != nadir_resolution:
+ handled_variables.add(var_name)
+ new_info = ds_info.copy() # don't mess up the above yielded
+ new_info['resolution'] = nadir_resolution
+ if self._is_polar() and this_coords is None:
+ new_info['coordinates'] = coordinates
+ yield True, new_info
+ elif is_avail is None:
+ # if we didn't know how to handle this dataset and no one else did
+ # then we should keep it going down the chain
+ yield is_avail, ds_info
+
+ # add new datasets
+ for var_name, val in self.file_content.items():
+ if isinstance(val, SDS):
+ ds_info = {
+ 'file_type': self.filetype_info['file_type'],
+ 'resolution': nadir_resolution,
+ 'name': var_name,
+ }
+ if self._is_polar():
+ ds_info['coordinates'] = ['longitude', 'latitude']
+ yield True, ds_info
+
+ def get_shape(self, dataset_id, ds_info):
+ """Get the shape."""
+ var_name = ds_info.get('file_key', dataset_id['name'])
+ return self[var_name + '/shape']
+
def _is_polar(self):
l1b_att, inst_att = (str(self.file_content.get('/attr/L1B', None)),
str(self.file_content.get('/attr/sensor', None)))
- return (inst_att != 'AHI') or (l1b_att is None)
+ return (inst_att != 'AHI' and 'GOES' not in inst_att) or (l1b_att is None)
def get_area_def(self, key):
"""Get the area definition of the data at hand."""
if self._is_polar(): # then it doesn't have a fixed grid
- return super(CLAVRXFileHandler, self).get_area_def(key)
+ return super(CLAVRXHDF4FileHandler, self).get_area_def(key)
l1b_att = str(self.file_content.get('/attr/L1B', None))
- return self._read_axi_fixed_grid(l1b_att)
+ area_def = _CLAVRxHelper._read_axi_fixed_grid(self.filename, l1b_att)
+ return area_def
+
+
+class CLAVRXNetCDFFileHandler(_CLAVRxHelper, BaseFileHandler):
+ """File Handler for CLAVRX netcdf files."""
+
+ def __init__(self, filename, filename_info, filetype_info):
+ """Init method."""
+ super(CLAVRXNetCDFFileHandler, self).__init__(filename,
+ filename_info,
+ filetype_info,
+ )
+
+ self.nc = xr.open_dataset(filename,
+ decode_cf=True,
+ mask_and_scale=False,
+ decode_coords=True,
+ chunks=CHUNK_SIZE)
+ # y,x is used in satpy, bands rather than channel using in xrimage
+ self.nc = self.nc.rename_dims({'scan_lines_along_track_direction': "y",
+ 'pixel_elements_along_scan_direction': "x"})
+
+ self.platform = _get_platform(
+ self.filename_info.get('platform_shortname', None))
+ self.sensor = self.nc.attrs.get('sensor', None)
+ # coordinates need scaling and valid_range (mask_and_scale won't work on valid_range)
+ self.nc.coords["latitude"] = _CLAVRxHelper._get_data(self.nc.coords["latitude"],
+ {"name": "latitude"})
+ self.nc.coords["longitude"] = _CLAVRxHelper._get_data(self.nc.coords["longitude"],
+ {"name": "longitude"})
+
+ def _get_ds_info_for_data_arr(self, var_name):
+ ds_info = {
+ 'file_type': self.filetype_info['file_type'],
+ 'name': var_name,
+ }
+ return ds_info
+
+ def _is_2d_yx_data_array(self, data_arr):
+ has_y_dim = data_arr.dims[0] == "y"
+ has_x_dim = data_arr.dims[1] == "x"
+ return has_y_dim and has_x_dim
+
+ def _available_new_datasets(self, handled_vars):
+ """Metadata for available variables other than BT."""
+ possible_vars = list(self.nc.items()) + list(self.nc.coords.items())
+ for var_name, data_arr in possible_vars:
+ if var_name in handled_vars:
+ continue
+ if data_arr.ndim != 2:
+ # we don't currently handle non-2D variables
+ continue
+ if not self._is_2d_yx_data_array(data_arr):
+ # we need 'traditional' y/x dimensions currently
+ continue
+
+ ds_info = self._get_ds_info_for_data_arr(var_name)
+ yield True, ds_info
+
+ def available_datasets(self, configured_datasets=None):
+ """Dynamically discover what variables can be loaded from this file.
+
+ See :meth:`satpy.readers.file_handlers.BaseHandler.available_datasets`
+ for more information.
+
+ """
+ handled_vars = set()
+ for is_avail, ds_info in (configured_datasets or []):
+ if is_avail is not None:
+ # some other file handler said it has this dataset
+ # we don't know any more information than the previous
+ # file handler so let's yield early
+ yield is_avail, ds_info
+ continue
+ if self.file_type_matches(ds_info['file_type']):
+ handled_vars.add(ds_info['name'])
+ yield self.file_type_matches(ds_info['file_type']), ds_info
+ yield from self._available_new_datasets(handled_vars)
+
+ def _is_polar(self):
+ l1b_att, inst_att = (str(self.nc.attrs.get('L1B', None)),
+ str(self.nc.attrs.get('sensor', None)))
+
+ return (inst_att != 'AHI' and 'GOES' not in inst_att) or (l1b_att is None)
+
+ def get_area_def(self, key):
+ """Get the area definition of the data at hand."""
+ if self._is_polar(): # then it doesn't have a fixed grid
+ return super(CLAVRXNetCDFFileHandler, self).get_area_def(key)
+
+ l1b_att = str(self.nc.attrs.get('L1B', None))
+ return _CLAVRxHelper._read_axi_fixed_grid(self.filename, l1b_att)
+
+ def get_dataset(self, dataset_id, ds_info):
+ """Get a dataset."""
+ var_name = ds_info.get('name', dataset_id['name'])
+ data = self[var_name]
+ data = _CLAVRxHelper._get_data(data, dataset_id)
+ data.attrs = _CLAVRxHelper.get_metadata(self.sensor, self.platform,
+ data.attrs, ds_info)
+ return data
+
+ def __getitem__(self, item):
+ """Wrap around `self.nc[item]`."""
+ data = self.nc[item]
+ return data
diff --git a/satpy/readers/cmsaf_claas2.py b/satpy/readers/cmsaf_claas2.py
index 70f10e2e39..f8f360623e 100644
--- a/satpy/readers/cmsaf_claas2.py
+++ b/satpy/readers/cmsaf_claas2.py
@@ -1,13 +1,36 @@
"""Module containing CMSAF CLAAS v2 FileHandler."""
import datetime
-import pyresample.geometry
+
+from satpy.resample import get_area_def
+
from .netcdf_utils import NetCDF4FileHandler
+def _is_georef_offset_present(date):
+ # Reference: Product User Manual, section 3.
+ # https://doi.org/10.5676/EUM_SAF_CM/CLAAS/V002_01
+ return date < datetime.date(2017, 12, 6)
+
+
+def _adjust_area_to_match_shifted_data(area):
+ # Reference:
+ # https://github.com/pytroll/satpy/wiki/SEVIRI-georeferencing-offset-correction
+ offset = area.pixel_size_x / 2
+ llx, lly, urx, ury = area.area_extent
+ new_extent = [llx + offset, lly - offset, urx + offset, ury - offset]
+ return area.copy(area_extent=new_extent)
+
+
+FULL_DISK = get_area_def("msg_seviri_fes_3km")
+FULL_DISK_WITH_OFFSET = _adjust_area_to_match_shifted_data(FULL_DISK)
+
+
class CLAAS2(NetCDF4FileHandler):
"""Handle CMSAF CLAAS-2 files."""
+ grid_size = 3636
+
def __init__(self, *args, **kwargs):
"""Initialise class."""
super().__init__(*args, **kwargs, cache_handle=False,
@@ -67,16 +90,24 @@ def get_dataset(self, dataset_id, info):
ds = self[dataset_id['name']]
if "time" in ds.dims:
return ds.squeeze(["time"])
- else:
- return ds
+
+ return ds
def get_area_def(self, dataset_id):
"""Get the area definition."""
- return pyresample.geometry.AreaDefinition(
- "some_area_name",
- "on-the-fly area",
- "geos",
- self["/attr/CMSAF_proj4_params"],
- self["/dimension/x"],
- self["/dimension/y"],
- self["/attr/CMSAF_area_extent"])
+ return self._get_subset_of_full_disk()
+
+ def _get_subset_of_full_disk(self):
+ """Get subset of the full disk.
+
+ CLAAS products are provided on a grid that is slightly smaller
+ than the full disk (excludes most of the space pixels).
+ """
+ full_disk = self._get_full_disk()
+ offset = int((full_disk.width - self.grid_size) // 2)
+ return full_disk[offset:-offset, offset:-offset]
+
+ def _get_full_disk(self):
+ if _is_georef_offset_present(self.start_time.date()):
+ return FULL_DISK_WITH_OFFSET
+ return FULL_DISK
diff --git a/satpy/readers/electrol_hrit.py b/satpy/readers/electrol_hrit.py
index 3dab249c49..53e69d42b4 100644
--- a/satpy/readers/electrol_hrit.py
+++ b/satpy/readers/electrol_hrit.py
@@ -30,10 +30,15 @@
import numpy as np
import xarray as xr
-from satpy.readers._geos_area import get_area_extent, get_area_definition
-from satpy.readers.hrit_base import (HRITFileHandler, ancillary_text,
- annotation_header, base_hdr_map,
- image_data_function, time_cds_short)
+from satpy.readers._geos_area import get_area_definition, get_area_extent
+from satpy.readers.hrit_base import (
+ HRITFileHandler,
+ ancillary_text,
+ annotation_header,
+ base_hdr_map,
+ image_data_function,
+ time_cds_short,
+)
logger = logging.getLogger('hrit_electrol')
@@ -156,7 +161,6 @@ def read_prologue(self):
def process_prologue(self):
"""Reprocess prologue to correct types."""
- pass
radiometric_processing = np.dtype([("TagType", ".
-"""Interface to MTG-FCI-FDHSI L1C NetCDF files.
+"""Interface to MTG-FCI L1c NetCDF files.
-This module defines the :class:`FCIFDHSIFileHandler` file handler, to
+This module defines the :class:`FCIL1cNCFileHandler` file handler, to
be used for reading Meteosat Third Generation (MTG) Flexible Combined
-Imager (FCI) Full Disk High Spectral Imagery (FDHSI) data. FCI will fly
+Imager (FCI) Level-1c data. FCI will fly
on the MTG Imager (MTG-I) series of satellites, scheduled to be launched
in 2022 by the earliest. For more information about FCI, see `EUMETSAT`_.
For simulated test data to be used with this reader, see `test data release`_.
For the Product User Guide (PUG) of the FCI L1c data, see `PUG`_.
+.. note::
+ This reader currently supports Full Disk High Spectral Resolution Imagery
+ (FDHSI) files. Support for High Spatial Resolution Fast Imagery (HRFI) files
+ will be implemented when corresponding test datasets will be available.
Geolocation is based on information from the data files. It uses:
@@ -55,12 +59,20 @@
``pyresample.geometry.AreaDefinition``, which then uses proj4 for the actual
geolocation calculations.
-The brightness temperature and reflectance calculation is based on the formulas indicated in
-`PUG`_.
The reading routine supports channel data in counts, radiances, and (depending
-on channel) brightness temperatures or reflectances. For each channel, it also
-supports the pixel quality, obtained by prepending the channel name such as
+on channel) brightness temperatures or reflectances. The brightness temperature and reflectance calculation is based on the formulas indicated in
+`PUG`_.
+Radiance datasets are returned in units of radiance per unit wavenumber (mW m-2 sr-1 (cm-1)-1). Radiances can be
+converted to units of radiance per unit wavelength (W m-2 um-1 sr-1) by multiplying with the
+`radiance_unit_conversion_coefficient` dataset attribute.
+
+For each channel, it also supports a number of auxiliary datasets, such as the pixel quality,
+the index map and the related geometric and acquisition parameters: time,
+subsatellite latitude, subsatellite longitude, platform altitude, subsolar latitude, subsolar longitude,
+earth-sun distance, sun-satellite distance, swath number, and swath direction.
+
+All auxiliary data can be obtained by prepending the channel name such as
``"vis_04_pixel_quality"``.
.. warning::
@@ -70,20 +82,36 @@
``pixel_quality`` and disambiguated by a to-be-decided property in the
`DataID`.
+.. note::
+
+ For reading compressed data, a decompression library is
+ needed. Either install the FCIDECOMP library (see `PUG`_), or the
+ ``hdf5plugin`` package with::
+
+ pip install hdf5plugin
+
+ or::
+
+ conda install hdf5plugin -c conda-forge
+
+ If you use ``hdf5plugin``, make sure to add the line ``import hdf5plugin``
+ at the top of your script.
+
.. _PUG: https://www-cdn.eumetsat.int/files/2020-07/pdf_mtg_fci_l1_pug.pdf
.. _EUMETSAT: https://www.eumetsat.int/mtg-flexible-combined-imager # noqa: E501
.. _test data release: https://www.eumetsat.int/simulated-mtg-fci-l1c-enhanced-non-nominal-datasets
"""
-from __future__ import (division, absolute_import, print_function,
- unicode_literals)
+from __future__ import absolute_import, division, print_function, unicode_literals
import logging
+from functools import cached_property
+
import numpy as np
import xarray as xr
-
-from pyresample import geometry
from netCDF4 import default_fillvals
+from pyresample import geometry
+
from satpy.readers._geos_area import get_geos_area_naming
from satpy.readers.eum_base import get_service_mode
@@ -91,15 +119,52 @@
logger = logging.getLogger(__name__)
+# dict containing all available auxiliary data parameters to be read using the index map. Keys are the
+# parameter name and values are the paths to the variable inside the netcdf
+AUX_DATA = {
+ 'subsatellite_latitude': 'state/platform/subsatellite_latitude',
+ 'subsatellite_longitude': 'state/platform/subsatellite_longitude',
+ 'platform_altitude': 'state/platform/platform_altitude',
+ 'subsolar_latitude': 'state/celestial/subsolar_latitude',
+ 'subsolar_longitude': 'state/celestial/subsolar_longitude',
+ 'earth_sun_distance': 'state/celestial/earth_sun_distance',
+ 'sun_satellite_distance': 'state/celestial/sun_satellite_distance',
+ 'time': 'time',
+ 'swath_number': 'data/swath_number',
+ 'swath_direction': 'data/swath_direction',
+}
+
+
+def _get_aux_data_name_from_dsname(dsname):
+ aux_data_name = [key for key in AUX_DATA.keys() if key in dsname]
+ if len(aux_data_name) > 0:
+ return aux_data_name[0]
+
+ return None
+
+
+def _get_channel_name_from_dsname(dsname):
+ # FIXME: replace by .removesuffix after we drop support for Python < 3.9
+ if dsname.endswith("_pixel_quality"):
+ channel_name = dsname[:-len("_pixel_quality")]
+ elif dsname.endswith("_index_map"):
+ channel_name = dsname[:-len("_index_map")]
+ elif _get_aux_data_name_from_dsname(dsname) is not None:
+ channel_name = dsname[:-len(_get_aux_data_name_from_dsname(dsname)) - 1]
+ else:
+ channel_name = dsname
-class FCIFDHSIFileHandler(NetCDF4FileHandler):
- """Class implementing the MTG FCI FDHSI File .
+ return channel_name
+
+
+class FCIL1cNCFileHandler(NetCDF4FileHandler):
+ """Class implementing the MTG FCI L1c Filehandler.
This class implements the Meteosat Third Generation (MTG) Flexible
- Combined Imager (FCI) Full Disk High Spectral Imagery (FDHSI) reader.
+ Combined Imager (FCI) Level-1c NetCDF reader.
It is designed to be used through the :class:`~satpy.Scene`
class using the :mod:`~satpy.Scene.load` method with the reader
- ``"fci_l1c_fdhsi"``.
+ ``"fci_l1c_nc"``.
"""
@@ -112,17 +177,17 @@ class using the :mod:`~satpy.Scene.load` method with the reader
# numbering will be considering MTG-S1 and MTG-S2 will be launched
# in-between.
_platform_name_translate = {
- "MTI1": "MTG-I1",
- "MTI2": "MTG-I2",
- "MTI3": "MTG-I3",
- "MTI4": "MTG-I4"}
+ "MTI1": "MTG-I1",
+ "MTI2": "MTG-I2",
+ "MTI3": "MTG-I3",
+ "MTI4": "MTG-I4"}
def __init__(self, filename, filename_info, filetype_info):
"""Initialize file handler."""
- super(FCIFDHSIFileHandler, self).__init__(filename, filename_info,
- filetype_info,
- cache_var_size=10000,
- cache_handle=True)
+ super().__init__(filename, filename_info,
+ filetype_info,
+ cache_var_size=10000,
+ cache_handle=True)
logger.debug('Reading: {}'.format(self.filename))
logger.debug('Start: {}'.format(self.start_time))
logger.debug('End: {}'.format(self.end_time))
@@ -139,15 +204,39 @@ def end_time(self):
"""Get end time."""
return self.filename_info['end_time']
+ def get_segment_position_info(self):
+ """Get the vertical position and size information of the chunk (aka segment) for both 1km and 2km grids.
+
+ This is used in the GEOVariableSegmentYAMLReader to compute optimal chunk sizes for missing chunks.
+ """
+ segment_position_info = {
+ '1km': {'start_position_row': self['data/vis_04/measured/start_position_row'].item(),
+ 'end_position_row': self['data/vis_04/measured/end_position_row'].item(),
+ 'segment_height': self['data/vis_04/measured/end_position_row'].item() -
+ self['data/vis_04/measured/start_position_row'].item() + 1,
+ 'segment_width': 11136},
+ '2km': {'start_position_row': self['data/ir_105/measured/start_position_row'].item(),
+ 'end_position_row': self['data/ir_105/measured/end_position_row'].item(),
+ 'segment_height': self['data/ir_105/measured/end_position_row'].item() -
+ self['data/ir_105/measured/start_position_row'].item() + 1,
+ 'segment_width': 5568}
+ }
+
+ return segment_position_info
+
def get_dataset(self, key, info=None):
"""Load a dataset."""
logger.debug('Reading {} from {}'.format(key['name'], self.filename))
if "pixel_quality" in key['name']:
- return self._get_dataset_quality(key, info=info)
+ return self._get_dataset_quality(key['name'])
+ elif "index_map" in key['name']:
+ return self._get_dataset_index_map(key['name'])
+ elif _get_aux_data_name_from_dsname(key['name']) is not None:
+ return self._get_dataset_aux_data(key['name'])
elif any(lb in key['name'] for lb in {"vis_", "ir_", "nir_", "wv_"}):
return self._get_dataset_measurand(key, info=info)
else:
- raise ValueError("Unknown dataset key, not a channel or quality: "
+ raise ValueError("Unknown dataset key, not a channel, quality or auxiliary data: "
f"{key['name']:s}")
def _get_dataset_measurand(self, key, info=None):
@@ -180,20 +269,21 @@ def _get_dataset_measurand(self, key, info=None):
res = self.calibrate(data, key)
# pre-calibration units no longer apply
- info.pop("units")
attrs.pop("units")
# For each channel, the effective_radiance contains in the
# "ancillary_variables" attribute the value "pixel_quality". In
# FileYAMLReader._load_ancillary_variables, satpy will try to load
# "pixel_quality" but is lacking the context from what group to load
- # it. Until we can have multiple pixel_quality variables defined (for
+ # it: in the FCI format, each channel group (data//measured) has
+ # its own data variable 'pixel_quality'.
+ # Until we can have multiple pixel_quality variables defined (for
# example, with https://github.com/pytroll/satpy/pull/1088), rewrite
- # the ancillary variable to include the channel. See also
+ # the ancillary variable to include the channel. See also
# https://github.com/pytroll/satpy/issues/1171.
if "pixel_quality" in attrs["ancillary_variables"]:
attrs["ancillary_variables"] = attrs["ancillary_variables"].replace(
- "pixel_quality", key['name'] + "_pixel_quality")
+ "pixel_quality", key['name'] + "_pixel_quality")
else:
raise ValueError(
"Unexpected value for attribute ancillary_variables, "
@@ -206,7 +296,7 @@ def _get_dataset_measurand(self, key, info=None):
res.attrs.update(attrs)
res.attrs["platform_name"] = self._platform_name_translate.get(
- self["/attr/platform"], self["/attr/platform"])
+ self["/attr/platform"], self["/attr/platform"])
# remove unpacking parameters for calibrated data
if key['calibration'] in ['brightness_temperature', 'reflectance']:
@@ -218,29 +308,86 @@ def _get_dataset_measurand(self, key, info=None):
# remove attributes from original file which don't apply anymore
res.attrs.pop('long_name')
- return res
+ res.attrs.update(self.orbital_param)
- def _get_dataset_quality(self, key, info=None):
- """Load quality for channel.
+ return res
- Load a quality field for an FCI channel. This is a bit involved in
- case of FCI because each channel group (data//measured) has
- its own data variable 'pixel_quality', referred to in ancillary
- variables (see also Satpy issue 1171), so some special treatment in
- necessary.
- """
- # FIXME: replace by .removesuffix after we drop support for Python < 3.9
- if key['name'].endswith("_pixel_quality"):
- chan_lab = key['name'][:-len("_pixel_quality")]
- else:
- raise ValueError("Quality label must end with pixel_quality, got "
- f"{key['name']:s}")
- grp_path = self.get_channel_measured_group_path(chan_lab)
+ @cached_property
+ def orbital_param(self):
+ """Compute the orbital parameters for the current chunk."""
+ actual_subsat_lon = float(np.nanmean(self._get_aux_data_lut_vector('subsatellite_longitude')))
+ actual_subsat_lat = float(np.nanmean(self._get_aux_data_lut_vector('subsatellite_latitude')))
+ actual_sat_alt = float(np.nanmean(self._get_aux_data_lut_vector('platform_altitude')))
+
+ nominal_and_proj_subsat_lon = float(self["data/mtg_geos_projection/attr/longitude_of_projection_origin"])
+ nominal_and_proj_subsat_lat = 0
+ nominal_and_proj_sat_alt = float(self["data/mtg_geos_projection/attr/perspective_point_height"])
+
+ orb_param_dict = {
+ 'orbital_parameters': {
+ 'satellite_actual_longitude': actual_subsat_lon,
+ 'satellite_actual_latitude': actual_subsat_lat,
+ 'satellite_actual_altitude': actual_sat_alt,
+ 'satellite_nominal_longitude': nominal_and_proj_subsat_lon,
+ 'satellite_nominal_latitude': nominal_and_proj_subsat_lat,
+ 'satellite_nominal_altitude': nominal_and_proj_sat_alt,
+ 'projection_longitude': nominal_and_proj_subsat_lon,
+ 'projection_latitude': nominal_and_proj_subsat_lat,
+ 'projection_altitude': nominal_and_proj_sat_alt,
+ }}
+
+ return orb_param_dict
+
+ def _get_dataset_quality(self, dsname):
+ """Load a quality field for an FCI channel."""
+ grp_path = self.get_channel_measured_group_path(_get_channel_name_from_dsname(dsname))
dv_path = grp_path + "/pixel_quality"
data = self[dv_path]
return data
- def get_channel_measured_group_path(self, channel):
+ def _get_dataset_index_map(self, dsname):
+ """Load the index map for an FCI channel."""
+ grp_path = self.get_channel_measured_group_path(_get_channel_name_from_dsname(dsname))
+ dv_path = grp_path + "/index_map"
+ data = self[dv_path]
+
+ data = data.where(data != data.attrs.get('_FillValue', 65535))
+ return data
+
+ def _get_aux_data_lut_vector(self, aux_data_name):
+ """Load the lut vector of an auxiliary variable."""
+ lut = self[AUX_DATA[aux_data_name]]
+
+ fv = default_fillvals.get(lut.dtype.str[1:], np.nan)
+ lut = lut.where(lut != fv)
+
+ return lut
+
+ @staticmethod
+ def _getitem(block, lut):
+ return lut[block.astype('uint16')]
+
+ def _get_dataset_aux_data(self, dsname):
+ """Get the auxiliary data arrays using the index map."""
+ # get index map
+ index_map = self._get_dataset_index_map(_get_channel_name_from_dsname(dsname))
+ # subtract minimum of index variable (index_offset)
+ index_map -= np.min(self['index'])
+
+ # get lut values from 1-d vector variable
+ lut = self._get_aux_data_lut_vector(_get_aux_data_name_from_dsname(dsname))
+
+ # assign lut values based on index map indices
+ aux = index_map.data.map_blocks(self._getitem, lut.data, dtype=lut.data.dtype)
+ aux = xr.DataArray(aux, dims=index_map.dims, attrs=index_map.attrs, coords=index_map.coords)
+
+ # filter out out-of-disk values
+ aux = aux.where(index_map >= 0)
+
+ return aux
+
+ @staticmethod
+ def get_channel_measured_group_path(channel):
"""Get the channel's measured group path."""
measured_group_path = 'data/{}/measured'.format(channel)
@@ -248,19 +395,16 @@ def get_channel_measured_group_path(self, channel):
def calc_area_extent(self, key):
"""Calculate area extent for a dataset."""
- # if a user requests a pixel quality before the channel data, the
- # yaml-reader will ask the area extent of the pixel quality field,
+ # if a user requests a pixel quality or index map before the channel data, the
+ # yaml-reader will ask the area extent of the pixel quality/index map field,
# which will ultimately end up here
- if key['name'].endswith("_pixel_quality"):
- lab = key['name'][:-len("_pixel_quality")]
- else:
- lab = key['name']
+ channel_name = _get_channel_name_from_dsname(key['name'])
# Get metadata for given dataset
- measured = self.get_channel_measured_group_path(lab)
+ measured = self.get_channel_measured_group_path(channel_name)
# Get start/end line and column of loaded swath.
nlines, ncols = self[measured + "/effective_radiance/shape"]
- logger.debug('Channel {} resolution: {}'.format(lab, ncols))
+ logger.debug('Channel {} resolution: {}'.format(channel_name, ncols))
logger.debug('Row/Cols: {} / {}'.format(nlines, ncols))
# Calculate full globe line extent
@@ -268,7 +412,18 @@ def calc_area_extent(self, key):
extents = {}
for coord in "xy":
- coord_radian = self["data/{:s}/measured/{:s}".format(lab, coord)]
+ coord_radian = self["data/{:s}/measured/{:s}".format(channel_name, coord)]
+
+ # TODO remove this check when old versions of IDPF test data ( 0:
+ coord_radian.attrs['scale_factor'] *= -1
+
+ # TODO remove this check when old versions of IDPF test data ( 1.1:
+ logger.info('The variable state/celestial/earth_sun_distance contains unexpected values'
+ '(mean value is {} AU). Defaulting to 1 AU for reflectance calculation.'
+ ''.format(sun_earth_distance))
+ sun_earth_distance = 1
+
+ res = 100 * radiance * np.pi * sun_earth_distance ** 2 / cesi
return res
diff --git a/satpy/readers/fci_l2_nc.py b/satpy/readers/fci_l2_nc.py
index 9691d99d82..867317df72 100644
--- a/satpy/readers/fci_l2_nc.py
+++ b/satpy/readers/fci_l2_nc.py
@@ -20,19 +20,19 @@
import logging
from contextlib import suppress
-from datetime import datetime, timedelta
import numpy as np
import xarray as xr
+from pyresample import geometry
from satpy import CHUNK_SIZE
-from satpy.readers._geos_area import get_area_definition, make_ext
+from satpy.readers._geos_area import get_geos_area_naming, make_ext
+from satpy.readers.eum_base import get_service_mode
from satpy.readers.file_handlers import BaseFileHandler
+from satpy.resample import get_area_def
logger = logging.getLogger(__name__)
-PRODUCT_DATA_DURATION_MINUTES = 20
-
SSP_DEFAULT = 0.0
@@ -40,44 +40,24 @@ class FciL2CommonFunctions(object):
"""Shared operations for file handlers."""
@property
- def _start_time(self):
- try:
- start_time = datetime.strptime(self.nc.attrs['time_coverage_start'], '%Y%m%d%H%M%S')
- except (ValueError, KeyError):
- # TODO if the sensing_start_time_utc attribute is not valid, uses a hardcoded value
- logger.warning("Start time cannot be obtained from file content, using default value instead")
- start_time = datetime.strptime('20200101120000', '%Y%m%d%H%M%S')
- return start_time
+ def spacecraft_name(self):
+ """Return spacecraft name."""
+ return self.nc.attrs['platform']
@property
- def _end_time(self):
- """Get observation end time."""
- try:
- end_time = datetime.strptime(self.nc.attrs['time_coverage_end'], '%Y%m%d%H%M%S')
- except (ValueError, KeyError):
- # TODO if the sensing_end_time_utc attribute is not valid, adds 20 minutes to the start time
- end_time = self._start_time + timedelta(minutes=PRODUCT_DATA_DURATION_MINUTES)
- return end_time
+ def sensor_name(self):
+ """Return instrument name."""
+ return self.nc.attrs['data_source']
@property
- def _spacecraft_name(self):
- """Return spacecraft name."""
- try:
- return self.nc.attrs['platform']
- except KeyError:
- # TODO if the platform attribute is not valid, return a default value
- logger.warning("Spacecraft name cannot be obtained from file content, using default value instead")
- return 'DEFAULT_MTG'
-
- @property
- def _sensor_name(self):
- """Return instrument."""
+ def ssp_lon(self):
+ """Return longitude at subsatellite point."""
try:
- return self.nc.attrs['data_source']
- except KeyError:
- # TODO if the data_source attribute is not valid, return a default value
- logger.warning("Sensor cannot be obtained from file content, using default value instead")
- return 'fci'
+ return float(self.nc['mtg_geos_projection'].attrs['longitude_of_projection_origin'])
+ except (KeyError, AttributeError):
+ logger.warning(f"ssp_lon could not be obtained from file content, using default value "
+ f"of {SSP_DEFAULT} degrees east instead")
+ return SSP_DEFAULT
def _get_global_attributes(self):
"""Create a dictionary of global attributes to be added to all datasets.
@@ -85,41 +65,76 @@ def _get_global_attributes(self):
Returns:
dict: A dictionary of global attributes.
filename: name of the product file
- start_time: sensing start time from best available source
- end_time: sensing end time from best available source
spacecraft_name: name of the spacecraft
ssp_lon: longitude of subsatellite point
sensor: name of sensor
- creation_time: creation time of the product
platform_name: name of the platform
"""
attributes = {
'filename': self.filename,
- 'start_time': self._start_time,
- 'end_time': self._end_time,
- 'spacecraft_name': self._spacecraft_name,
+ 'spacecraft_name': self.spacecraft_name,
'ssp_lon': self.ssp_lon,
- 'sensor': self._sensor_name,
- 'creation_time': self.filename_info['creation_time'],
- 'platform_name': self._spacecraft_name,
+ 'sensor': self.sensor_name,
+ 'platform_name': self.spacecraft_name,
}
return attributes
+ def _set_attributes(self, variable, dataset_info, segmented=False):
+ """Set dataset attributes."""
+ if segmented:
+ xdim, ydim = "number_of_FoR_cols", "number_of_FoR_rows"
+ else:
+ xdim, ydim = "number_of_columns", "number_of_rows"
+
+ if dataset_info['file_key'] not in ['product_quality', 'product_completeness', 'product_timeliness']:
+ variable = variable.rename({ydim: 'y', xdim: 'x'})
+
+ variable.attrs.setdefault('units', None)
+ variable.attrs.update(dataset_info)
+ variable.attrs.update(self._get_global_attributes())
+
+ return variable
+
+ def _slice_dataset(self, variable, dataset_info, dimensions):
+ """Slice data if dimension layers have been provided in yaml-file."""
+ slice_dict = {dim: dataset_info[dim_id] for (dim, dim_id) in dimensions.items()
+ if dim_id in dataset_info.keys() and dim in variable.dims}
+ for dim, dim_ind in slice_dict.items():
+ logger.debug(f"Extracting {dimensions[dim]}-index {dim_ind} from dimension '{dim}'.")
+ variable = variable.sel(slice_dict)
+
+ return variable
+
+ @staticmethod
+ def _mask_data(variable, fill_value):
+ """Set fill_values, as defined in yaml-file, to NaN.
+
+ Set data points in variable to NaN if they are equal to fill_value
+ or any of the values in fill_value if fill_value is a list.
+ """
+ if not isinstance(fill_value, list):
+ fill_value = [fill_value]
+
+ for val in fill_value:
+ variable = variable.where(variable != val).astype('float32')
+
+ return variable
+
def __del__(self):
"""Close the NetCDF file that may still be open."""
with suppress(OSError):
self.nc.close()
-class FciL2NCFileHandler(BaseFileHandler, FciL2CommonFunctions):
+class FciL2NCFileHandler(FciL2CommonFunctions, BaseFileHandler):
"""Reader class for FCI L2 products in NetCDF4 format."""
- def __init__(self, filename, filename_info, filetype_info):
+ def __init__(self, filename, filename_info, filetype_info, with_area_definition=True):
"""Open the NetCDF file with xarray and prepare for dataset reading."""
super().__init__(filename, filename_info, filetype_info)
- # Use xarray's default netcdf4 engine to open the file
+ # Use xarray's default netcdf4 engine to open the fileq
self.nc = xr.open_dataset(
self.filename,
decode_cf=True,
@@ -130,26 +145,26 @@ def __init__(self, filename, filename_info, filetype_info):
}
)
+ if with_area_definition is False:
+ logger.info("Setting `with_area_defintion=False` has no effect on pixel-based products.")
+
# Read metadata which are common to all datasets
self.nlines = self.nc['y'].size
self.ncols = self.nc['x'].size
self._projection = self.nc['mtg_geos_projection']
+ self.multi_dims = {'maximum_number_of_layers': 'layer', 'number_of_vis_channels': 'vis_channel_id'}
- # Compute the area definition
- self._area_def = self._compute_area_def()
-
- @property
- def ssp_lon(self):
- """Return subsatellite point longitude."""
+ def get_area_def(self, key):
+ """Return the area definition."""
try:
- return float(self._projection.attrs['longitude_of_projection_origin'])
- except KeyError:
- logger.warning("ssp_lon cannot be obtained from file content, using default value instead")
- return SSP_DEFAULT
+ return self._area_def
+ except AttributeError:
+ raise NotImplementedError
def get_dataset(self, dataset_id, dataset_info):
"""Get dataset using the file_key in dataset_info."""
var_key = dataset_info['file_key']
+ par_name = dataset_info['name']
logger.debug('Reading in file to get dataset with key %s.', var_key)
try:
@@ -158,109 +173,127 @@ def get_dataset(self, dataset_id, dataset_info):
logger.warning("Could not find key %s in NetCDF file, no valid Dataset created", var_key)
return None
- # TODO in some of the test files, invalid pixels contain the value defined as "fill_value" in the YAML file
- # instead of being masked directly in the netCDF variable.
- # therefore NaN is applied where such value is found or (0 if the array contains integer values)
- # the next 11 lines have to be removed once the product files are correctly configured
- try:
- mask_value = dataset_info['mask_value']
- except KeyError:
- mask_value = np.NaN
- try:
- fill_value = dataset_info['fill_value']
- except KeyError:
- fill_value = np.NaN
+ # Compute the area definition
+ if var_key not in ['product_quality', 'product_completeness', 'product_timeliness']:
+ self._area_def = self._compute_area_def(dataset_id)
- if dataset_info['file_type'] == 'nc_fci_test_clm':
- data_values = variable.where(variable != fill_value, mask_value).astype('uint32', copy=False)
- else:
- data_values = variable.where(variable != fill_value, mask_value).astype('float32', copy=False)
+ if any(dim_id in dataset_info.keys() for dim_id in self.multi_dims.values()):
+ variable = self._slice_dataset(variable, dataset_info, self.multi_dims)
- data_values.attrs = variable.attrs
- variable = data_values
+ if par_name == 'retrieved_cloud_optical_thickness':
+ variable = self.get_total_cot(variable)
- # If the variable has 3 dimensions, select the required layer
- if variable.ndim == 3:
- layer = dataset_info.get('layer', 0)
- logger.debug('Selecting the layer %d.', layer)
- variable = variable.sel(maximum_number_of_layers=layer)
+ if dataset_info['file_type'] == 'nc_fci_test_clm':
+ variable = self._decode_clm_test_data(variable, dataset_info)
- if dataset_info['file_type'] == 'nc_fci_test_clm' and var_key != 'cloud_mask_cmrt6_test_result':
- variable.values = (variable.values >> dataset_info['extract_byte'] << 31 >> 31)
+ if 'fill_value' in dataset_info:
+ variable = self._mask_data(variable, dataset_info['fill_value'])
- # Rename the dimensions as required by Satpy
- variable = variable.rename({"number_of_rows": 'y', "number_of_columns": 'x'})
+ variable = self._set_attributes(variable, dataset_info)
- # Manage the attributes of the dataset
- variable.attrs.setdefault('units', None)
+ return variable
- variable.attrs.update(dataset_info)
- variable.attrs.update(self._get_global_attributes())
+ @staticmethod
+ def _decode_clm_test_data(variable, dataset_info):
+ if dataset_info['file_key'] != 'cloud_mask_cmrt6_test_result':
+ variable = variable.astype('uint32')
+ variable.values = (variable.values >> dataset_info['extract_byte'] << 31 >> 31).astype('int8')
return variable
- def get_area_def(self, key):
- """Return the area definition (common to all data in product)."""
- return self._area_def
-
- def _compute_area_def(self):
+ def _compute_area_def(self, dataset_id):
"""Compute the area definition.
Returns:
AreaDefinition: A pyresample AreaDefinition object containing the area definition.
"""
- # Read the projection data from the mtg_geos_projection variable
- a = float(self._projection.attrs['semi_major_axis'])
- b = float(self._projection.attrs['semi_minor_axis'])
- h = float(self._projection.attrs['perspective_point_height'])
+ area_extent = self._get_area_extent()
+ area_naming, proj_dict = self._get_proj_area(dataset_id)
+ area_def = geometry.AreaDefinition(
+ area_naming['area_id'],
+ area_naming['description'],
+ "",
+ proj_dict,
+ self.ncols,
+ self.nlines,
+ area_extent)
- # TODO sweep_angle_axis value not handled at the moment, therefore commented out
- # sweep_axis = self._projection.attrs['sweep_angle_axis']
+ return area_def
- # Coordinates of the pixel in radians
+ def _get_area_extent(self):
+ """Calculate area extent of dataset."""
+ # Load and convert x/y coordinates to degrees as required by the make_ext function
x = self.nc['x']
y = self.nc['y']
- # TODO conversion to radians: offset and scale factor are missing from some test NetCDF file
- # TODO the next two lines should be removed when the offset and scale factor are correctly configured
- if not hasattr(x, 'standard_name'):
- x = np.radians(x * 0.003202134 - 8.914740401)
- y = np.radians(y * 0.003202134 - 8.914740401)
-
- # Convert to degrees as required by the make_ext function
x_deg = np.degrees(x)
y_deg = np.degrees(y)
- # Select the extreme points of the extension area
- x_l, x_r = x_deg.values[0], x_deg.values[-1]
- y_l, y_u = y_deg.values[0], y_deg.values[-1]
+ # Select the extreme points and calcualte area extent (not: these refer to pixel center)
+ ll_x, ur_x = -x_deg.values[0], -x_deg.values[-1]
+ ll_y, ur_y = y_deg.values[-1], y_deg.values[0]
+ h = float(self._projection.attrs['perspective_point_height'])
+ area_extent_pixel_center = make_ext(ll_x, ur_x, ll_y, ur_y, h)
- # Compute the extension area in meters
- area_extent = make_ext(x_l, x_r, y_l, y_u, h)
+ # Shift area extent by half a pixel to get the area extent w.r.t. the dataset/pixel corners
+ scale_factor = (x[1:]-x[0:-1]).values.mean()
+ res = abs(scale_factor) * h
+ area_extent = tuple(i + res/2 if i > 0 else i - res/2 for i in area_extent_pixel_center)
- # Assemble the projection definition dictionary
- p_dict = {
- 'nlines': self.nlines,
- 'ncols': self.ncols,
- 'ssp_lon': self.ssp_lon,
- 'a': a,
- 'b': b,
- 'h': h,
- 'a_name': 'FCI Area', # TODO to be confirmed
- 'a_desc': 'Area for FCI instrument', # TODO to be confirmed
- 'p_id': 'geos'
- }
+ return area_extent
- # Compute the area definition
- area_def = get_area_definition(p_dict, area_extent)
+ def _get_proj_area(self, dataset_id):
+ """Extract projection and area information."""
+ # Read the projection data from the mtg_geos_projection variable
+ a = float(self._projection.attrs['semi_major_axis'])
+ h = float(self._projection.attrs['perspective_point_height'])
- return area_def
+ # Some L2PF test data files have a typo in the keyname for the inverse flattening parameter. Use a default value
+ # as fallback until all L2PF test files are correctly formatted.
+ rf = float(self._projection.attrs.get('inverse_flattening', 298.257223563))
+
+ res = dataset_id.resolution
+
+ area_naming_input_dict = {'platform_name': 'mtg',
+ 'instrument_name': 'fci',
+ 'resolution': res,
+ }
+
+ area_naming = get_geos_area_naming({**area_naming_input_dict,
+ **get_service_mode('fci', self.ssp_lon)})
+
+ proj_dict = {'a': a,
+ 'lon_0': self.ssp_lon,
+ 'h': h,
+ "rf": rf,
+ 'proj': 'geos',
+ 'units': 'm',
+ "sweep": 'y'}
+
+ return area_naming, proj_dict
+
+ @staticmethod
+ def get_total_cot(variable):
+ """Sum the cloud optical thickness from the two OCA layers.
+
+ The optical thickness has to be transformed to linear space before adding the values from the two layers. The
+ combined/total optical thickness is then transformed back to logarithmic space.
+ """
+ attrs = variable.attrs
+ variable = 10 ** variable
+ variable = variable.fillna(0.)
+ variable = variable.sum(dim='maximum_number_of_layers', keep_attrs=True)
+ variable = variable.where(variable != 0., np.nan)
+ variable = np.log10(variable)
+ variable.attrs = attrs
+
+ return variable
-class FciL2NCSegmentFileHandler(BaseFileHandler, FciL2CommonFunctions):
+class FciL2NCSegmentFileHandler(FciL2CommonFunctions, BaseFileHandler):
"""Reader class for FCI L2 Segmented products in NetCDF4 format."""
- def __init__(self, filename, filename_info, filetype_info):
+ def __init__(self, filename, filename_info, filetype_info, with_area_definition=False):
"""Open the NetCDF file with xarray and prepare for dataset reading."""
super().__init__(filename, filename_info, filetype_info)
# Use xarray's default netcdf4 engine to open the file
@@ -277,8 +310,19 @@ def __init__(self, filename, filename_info, filetype_info):
# Read metadata which are common to all datasets
self.nlines = self.nc['number_of_FoR_rows'].size
self.ncols = self.nc['number_of_FoR_cols'].size
+ self.with_adef = with_area_definition
+ self.multi_dims = {
+ 'number_of_categories': 'category_id', 'number_of_channels': 'channel_id',
+ 'number_of_vis_channels': 'vis_channel_id', 'number_of_ir_channels': 'ir_channel_id',
+ 'number_test': 'test_id',
+ }
- self.ssp_lon = SSP_DEFAULT
+ def get_area_def(self, key):
+ """Return the area definition."""
+ try:
+ return self._area_def
+ except AttributeError:
+ raise NotImplementedError
def get_dataset(self, dataset_id, dataset_info):
"""Get dataset using the file_key in dataset_info."""
@@ -291,24 +335,70 @@ def get_dataset(self, dataset_id, dataset_info):
logger.warning("Could not find key %s in NetCDF file, no valid Dataset created", var_key)
return None
- # TODO in some of the test files, invalid pixels contain the value defined as "fill_value" in the YAML file
- # instead of being masked directly in the netCDF variable.
- # therefore NaN is applied where such value is found or (0 if the array contains integer values)
- # the next 11 lines have to be removed once the product files are correctly configured
+ if any(dim_id in dataset_info.keys() for dim_id in self.multi_dims.values()):
+ variable = self._slice_dataset(variable, dataset_info, self.multi_dims)
- mask_value = dataset_info.get('mask_value', np.NaN)
- fill_value = dataset_info.get('fill_value', np.NaN)
+ if self.with_adef and var_key not in ['longitude', 'latitude',
+ 'product_quality', 'product_completeness', 'product_timeliness']:
+ self._area_def = self._construct_area_def(dataset_id)
- float_variable = variable.where(variable != fill_value, mask_value).astype('float32', copy=False)
- float_variable.attrs = variable.attrs
- variable = float_variable
+ # coordinates are not relevant when returning data with an AreaDefinition
+ if 'coordinates' in dataset_info.keys():
+ del dataset_info['coordinates']
- # Rename the dimensions as required by Satpy
- variable = variable.rename({"number_of_FoR_rows": 'y', "number_of_FoR_cols": 'x'})
-# # Manage the attributes of the dataset
- variable.attrs.setdefault('units', None)
+ if 'fill_value' in dataset_info:
+ variable = self._mask_data(variable, dataset_info['fill_value'])
- variable.attrs.update(dataset_info)
- variable.attrs.update(self._get_global_attributes())
+ variable = self._set_attributes(variable, dataset_info, segmented=True)
return variable
+
+ def _construct_area_def(self, dataset_id):
+ """Construct the area definition.
+
+ Returns:
+ AreaDefinition: A pyresample AreaDefinition object containing the area definition.
+
+ """
+ res = dataset_id.resolution
+
+ area_naming_input_dict = {'platform_name': 'mtg',
+ 'instrument_name': 'fci',
+ 'resolution': res,
+ }
+
+ area_naming = get_geos_area_naming({**area_naming_input_dict,
+ **get_service_mode('fci', self.ssp_lon)})
+
+ # Construct area definition from standardized area definition.
+ stand_area_def = get_area_def(area_naming['area_id'])
+
+ if (stand_area_def.x_size != self.ncols) | (stand_area_def.y_size != self.nlines):
+ raise NotImplementedError('Unrecognised AreaDefinition.')
+
+ mod_area_extent = self._modify_area_extent(stand_area_def.area_extent)
+
+ area_def = geometry.AreaDefinition(
+ stand_area_def.area_id,
+ stand_area_def.description,
+ "",
+ stand_area_def.proj_dict,
+ stand_area_def.x_size,
+ stand_area_def.y_size,
+ mod_area_extent)
+
+ return area_def
+
+ @staticmethod
+ def _modify_area_extent(stand_area_extent):
+ """Modify area extent to macth satellite projection.
+
+ Area extent has to be modified since the L2 products are stored with the south-east
+ in the upper-right corner (as opposed to north-east in the standardized area definitions).
+ """
+ ll_x, ll_y, ur_x, ur_y = stand_area_extent
+ ll_y *= -1.
+ ur_y *= -1.
+ area_extent = tuple([ll_x, ll_y, ur_x, ur_y])
+
+ return area_extent
diff --git a/satpy/readers/file_handlers.py b/satpy/readers/file_handlers.py
index 5bf0f1734b..cebab6e307 100644
--- a/satpy/readers/file_handlers.py
+++ b/satpy/readers/file_handlers.py
@@ -17,15 +17,34 @@
# satpy. If not, see .
"""Interface for BaseFileHandlers."""
-from abc import ABCMeta
-
import numpy as np
+import xarray as xr
from pyresample.geometry import SwathDefinition
from satpy.dataset import combine_metadata
+from satpy.readers import open_file_or_filename
+
+
+def open_dataset(filename, *args, **kwargs):
+ """Open a file with xarray.
+
+ Args:
+ filename (Union[str, FSFile]):
+ The path to the file to open. Can be a `string` or
+ :class:`~satpy.readers.FSFile` object which allows using
+ `fsspec` or `s3fs` like files.
+
+ Returns:
+ xarray.Dataset:
+
+ Notes:
+ This can be used to enable readers to open remote files.
+ """
+ f_obj = open_file_or_filename(filename)
+ return xr.open_dataset(f_obj, *args, **kwargs)
-class BaseFileHandler(metaclass=ABCMeta):
+class BaseFileHandler:
"""Base file handler."""
def __init__(self, filename, filename_info, filetype_info):
@@ -85,10 +104,8 @@ def combine_info(self, all_infos):
- end_time
- start_orbit
- end_orbit
- - satellite_altitude
- - satellite_latitude
- - satellite_longitude
- orbital_parameters
+ - time_parameters
Also, concatenate the areas.
@@ -97,26 +114,8 @@ def combine_info(self, all_infos):
new_dict = self._combine(all_infos, min, 'start_time', 'start_orbit')
new_dict.update(self._combine(all_infos, max, 'end_time', 'end_orbit'))
- new_dict.update(self._combine(all_infos, np.mean,
- 'satellite_longitude',
- 'satellite_latitude',
- 'satellite_altitude'))
-
- # Average orbital parameters
- orb_params = [info.get('orbital_parameters', {}) for info in all_infos]
- if all(orb_params):
- # Collect all available keys
- orb_params_comb = {}
- for d in orb_params:
- orb_params_comb.update(d)
-
- # Average known keys
- keys = ['projection_longitude', 'projection_latitude', 'projection_altitude',
- 'satellite_nominal_longitude', 'satellite_nominal_latitude',
- 'satellite_actual_longitude', 'satellite_actual_latitude', 'satellite_actual_altitude',
- 'nadir_longitude', 'nadir_latitude']
- orb_params_comb.update(self._combine(orb_params, np.mean, *keys))
- new_dict['orbital_parameters'] = orb_params_comb
+ new_dict.update(self._combine_orbital_parameters(all_infos))
+ new_dict.update(self._combine_time_parameters(all_infos))
try:
area = SwathDefinition(lons=np.ma.vstack([info['area'].lons for info in all_infos]),
@@ -129,6 +128,44 @@ def combine_info(self, all_infos):
new_dict.update(combined_info)
return new_dict
+ def _combine_orbital_parameters(self, all_infos):
+ orb_params = [info.get('orbital_parameters', {}) for info in all_infos]
+ if not all(orb_params):
+ return {}
+ # Collect all available keys
+ orb_params_comb = {}
+ for d in orb_params:
+ orb_params_comb.update(d)
+
+ # Average known keys
+ keys = ['projection_longitude', 'projection_latitude', 'projection_altitude',
+ 'satellite_nominal_longitude', 'satellite_nominal_latitude',
+ 'satellite_actual_longitude', 'satellite_actual_latitude', 'satellite_actual_altitude',
+ 'nadir_longitude', 'nadir_latitude']
+ orb_params_comb.update(self._combine(orb_params, np.mean, *keys))
+ return {'orbital_parameters': orb_params_comb}
+
+ def _combine_time_parameters(self, all_infos):
+ time_params = [info.get('time_parameters', {}) for info in all_infos]
+ if not all(time_params):
+ return {}
+ # Collect all available keys
+ time_params_comb = {}
+ for d in time_params:
+ time_params_comb.update(d)
+
+ start_keys = (
+ 'nominal_start_time',
+ 'observation_start_time',
+ )
+ end_keys = (
+ 'nominal_end_time',
+ 'observation_end_time',
+ )
+ time_params_comb.update(self._combine(time_params, min, *start_keys))
+ time_params_comb.update(self._combine(time_params, max, *end_keys))
+ return {'time_parameters': time_params_comb}
+
@property
def start_time(self):
"""Get start time."""
@@ -151,13 +188,16 @@ def file_type_matches(self, ds_ftype):
ds_ftype (str or list): File type or list of file types that a
dataset is configured to be loaded from.
- Returns: ``True`` if this file handler object's type matches the
- dataset's file type(s), ``False`` otherwise.
+ Returns:
+ ``True`` if this file handler object's type matches the
+ dataset's file type(s), ``None`` otherwise. ``None`` is returned
+ instead of ``False`` to follow the convention of the
+ :meth:`available_datasets` method.
"""
- if isinstance(ds_ftype, str) and ds_ftype == self.filetype_info['file_type']:
- return True
- elif self.filetype_info['file_type'] in ds_ftype:
+ if not isinstance(ds_ftype, (list, tuple)):
+ ds_ftype = [ds_ftype]
+ if self.filetype_info['file_type'] in ds_ftype:
return True
return None
@@ -197,7 +237,8 @@ def available_datasets(self, configured_datasets=None):
available datasets. This argument could be the result of a
previous file handler's implementation of this method.
- Returns: Iterator of (bool or None, dict) pairs where dict is the
+ Returns:
+ Iterator of (bool or None, dict) pairs where dict is the
dataset's metadata. If the dataset is available in the current
file type then the boolean value should be ``True``, ``False``
if we **know** about the dataset but it is unavailable, or
diff --git a/satpy/readers/fy4_base.py b/satpy/readers/fy4_base.py
new file mode 100644
index 0000000000..ceb8ee75bd
--- /dev/null
+++ b/satpy/readers/fy4_base.py
@@ -0,0 +1,260 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2019 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Base reader for the L1 HDF data from the AGRI and GHI instruments aboard the FengYun-4A/B satellites.
+
+The files read by this reader are described in the official Real Time Data Service:
+
+ http://fy4.nsmc.org.cn/data/en/data/realtime.html
+
+"""
+
+import logging
+from datetime import datetime
+
+import dask.array as da
+import numpy as np
+import xarray as xr
+
+from satpy._compat import cached_property
+from satpy.readers._geos_area import get_area_definition, get_area_extent
+from satpy.readers.hdf5_utils import HDF5FileHandler
+
+logger = logging.getLogger(__name__)
+
+RESOLUTION_LIST = [250, 500, 1000, 2000, 4000]
+
+
+class FY4Base(HDF5FileHandler):
+ """The base class for the FengYun4 AGRI and GHI readers."""
+
+ def __init__(self, filename, filename_info, filetype_info):
+ """Init filehandler."""
+ super(FY4Base, self).__init__(filename, filename_info, filetype_info)
+
+ self.sensor = filename_info['instrument']
+
+ # info of 250m, 500m, 1km, 2km and 4km data
+ self._COFF_list = [21983.5, 10991.5, 5495.5, 2747.5, 1373.5]
+ self._LOFF_list = [21983.5, 10991.5, 5495.5, 2747.5, 1373.5]
+
+ self._CFAC_list = [163730199.0, 81865099.0, 40932549.0, 20466274.0, 10233137.0]
+ self._LFAC_list = [163730199.0, 81865099.0, 40932549.0, 20466274.0, 10233137.0]
+
+ self.PLATFORM_NAMES = {'FY4A': 'FY-4A',
+ 'FY4B': 'FY-4B',
+ 'FY4C': 'FY-4C'}
+
+ try:
+ self.PLATFORM_ID = self.PLATFORM_NAMES[filename_info['platform_id']]
+ except KeyError:
+ raise KeyError(f"Unsupported platform ID: {filename_info['platform_id']}")
+ self.CHANS_ID = 'NOMChannel'
+ self.SAT_ID = 'NOMSatellite'
+ self.SUN_ID = 'NOMSun'
+
+ @staticmethod
+ def scale(dn, slope, offset):
+ """Convert digital number (DN) to calibrated quantity through scaling.
+
+ Args:
+ dn: Raw detector digital number
+ slope: Slope
+ offset: Offset
+
+ Returns:
+ Scaled data
+
+ """
+ ref = dn * slope + offset
+ ref = ref.clip(min=0)
+ ref.attrs = dn.attrs
+
+ return ref
+
+ def apply_lut(self, data, lut):
+ """Calibrate digital number (DN) by applying a LUT.
+
+ Args:
+ data: Raw detector digital number
+ lut: the look up table
+ Returns:
+ Calibrated quantity
+ """
+ # append nan to the end of lut for fillvalue
+ lut = np.append(lut, np.nan)
+ data.data = da.where(data.data > lut.shape[0], lut.shape[0] - 1, data.data)
+ res = data.data.map_blocks(self._getitem, lut, dtype=lut.dtype)
+ res = xr.DataArray(res, dims=data.dims,
+ attrs=data.attrs, coords=data.coords)
+
+ return res
+
+ @staticmethod
+ def _getitem(block, lut):
+ return lut[block]
+
+ @cached_property
+ def reflectance_coeffs(self):
+ """Retrieve the reflectance calibration coefficients from the HDF file."""
+ # using the corresponding SCALE and OFFSET
+ if self.PLATFORM_ID == 'FY-4A':
+ cal_coef = 'CALIBRATION_COEF(SCALE+OFFSET)'
+ elif self.PLATFORM_ID == 'FY-4B':
+ cal_coef = 'Calibration/CALIBRATION_COEF(SCALE+OFFSET)'
+ else:
+ raise KeyError(f"Unsupported platform ID for calibration: {self.PLATFORM_ID}")
+ return self.get(cal_coef).values
+
+ def calibrate(self, data, ds_info, ds_name, file_key):
+ """Calibrate the data."""
+ # Check if calibration is present, if not assume dataset is an angle
+ calibration = ds_info.get('calibration')
+ # Return raw data in case of counts or no calibration
+ if calibration in ('counts', None):
+ data.attrs['units'] = ds_info['units']
+ ds_info['valid_range'] = data.attrs['valid_range']
+ ds_info['fill_value'] = data.attrs['FillValue'].item()
+ elif calibration == 'reflectance':
+ channel_index = int(file_key[-2:]) - 1
+ data = self.calibrate_to_reflectance(data, channel_index, ds_info)
+ elif calibration == 'brightness_temperature':
+ data = self.calibrate_to_bt(data, ds_info, ds_name)
+ elif calibration == 'radiance':
+ raise NotImplementedError("Calibration to radiance is not supported.")
+ # Apply range limits, but not for counts or we convert to float!
+ if calibration != 'counts':
+ data = data.where((data >= min(data.attrs['valid_range'])) &
+ (data <= max(data.attrs['valid_range'])))
+ else:
+ data.attrs['_FillValue'] = data.attrs['FillValue'].item()
+ return data
+
+ def calibrate_to_reflectance(self, data, channel_index, ds_info):
+ """Calibrate to reflectance [%]."""
+ logger.debug("Calibrating to reflectances")
+ # using the corresponding SCALE and OFFSET
+ if self.sensor != 'AGRI' and self.sensor != 'GHI':
+ raise ValueError(f'Unsupported sensor type: {self.sensor}')
+
+ coeffs = self.reflectance_coeffs
+ num_channel = coeffs.shape[0]
+
+ if self.sensor == 'AGRI' and num_channel == 1:
+ # only channel_2, resolution = 500 m
+ channel_index = 0
+ data.data = da.where(data.data == data.attrs['FillValue'].item(), np.nan, data.data)
+ data.attrs['scale_factor'] = coeffs[channel_index, 0].item()
+ data.attrs['add_offset'] = coeffs[channel_index, 1].item()
+ data = self.scale(data, data.attrs['scale_factor'], data.attrs['add_offset'])
+ data *= 100
+ ds_info['valid_range'] = (data.attrs['valid_range'] * data.attrs['scale_factor'] + data.attrs['add_offset'])
+ ds_info['valid_range'] = ds_info['valid_range'] * 100
+ return data
+
+ def calibrate_to_bt(self, data, ds_info, ds_name):
+ """Calibrate to Brightness Temperatures [K]."""
+ logger.debug("Calibrating to brightness_temperature")
+
+ if self.sensor not in ['GHI', 'AGRI']:
+ raise ValueError("Error, sensor must be GHI or AGRI.")
+
+ # The key is sometimes prefixes with `Calibration/` so we try both options here
+ lut_key = ds_info.get('lut_key', ds_name)
+ try:
+ lut = self[lut_key]
+ except KeyError:
+ lut_key = f'Calibration/{ds_info.get("lut_key", ds_name)}'
+ lut = self[lut_key]
+
+ # the value of dn is the index of brightness_temperature
+ data = self.apply_lut(data, lut)
+ ds_info['valid_range'] = lut.attrs['valid_range']
+ return data
+
+ @property
+ def start_time(self):
+ """Get the start time."""
+ start_time = self['/attr/Observing Beginning Date'] + 'T' + self['/attr/Observing Beginning Time'] + 'Z'
+ try:
+ return datetime.strptime(start_time, '%Y-%m-%dT%H:%M:%S.%fZ')
+ except ValueError:
+ # For some data there is no sub-second component
+ return datetime.strptime(start_time, '%Y-%m-%dT%H:%M:%SZ')
+
+ @property
+ def end_time(self):
+ """Get the end time."""
+ end_time = self['/attr/Observing Ending Date'] + 'T' + self['/attr/Observing Ending Time'] + 'Z'
+ try:
+ return datetime.strptime(end_time, '%Y-%m-%dT%H:%M:%S.%fZ')
+ except ValueError:
+ # For some data there is no sub-second component
+ return datetime.strptime(end_time, '%Y-%m-%dT%H:%M:%SZ')
+
+ def get_area_def(self, key):
+ """Get the area definition."""
+ # Coordination Group for Meteorological Satellites LRIT/HRIT Global Specification
+ # https://www.cgms-info.org/documents/cgms-lrit-hrit-global-specification-(v2-8-of-30-oct-2013).pdf
+ res = key['resolution']
+ pdict = {}
+ pdict['coff'] = self._COFF_list[RESOLUTION_LIST.index(res)]
+ pdict['loff'] = self._LOFF_list[RESOLUTION_LIST.index(res)]
+ pdict['cfac'] = self._CFAC_list[RESOLUTION_LIST.index(res)]
+ pdict['lfac'] = self._LFAC_list[RESOLUTION_LIST.index(res)]
+ try:
+ pdict['a'] = float(self.file_content['/attr/Semimajor axis of ellipsoid'])
+ except KeyError:
+ pdict['a'] = float(self.file_content['/attr/dEA'])
+ if pdict['a'] < 10000:
+ pdict['a'] = pdict['a'] * 1E3 # equator radius (m)
+ try:
+ pdict['b'] = float(self.file_content['/attr/Semiminor axis of ellipsoid'])
+ except KeyError:
+ pdict['b'] = pdict['a'] * (1 - 1 / self.file_content['/attr/dObRecFlat']) # polar radius (m)
+
+ pdict['h'] = self.file_content['/attr/NOMSatHeight'] # the altitude of satellite (m)
+ if pdict['h'] > 42000000.0:
+ pdict['h'] = pdict['h'] - pdict['a']
+
+ pdict['ssp_lon'] = float(self.file_content['/attr/NOMCenterLon'])
+ pdict['nlines'] = float(self.file_content['/attr/RegLength'])
+ pdict['ncols'] = float(self.file_content['/attr/RegWidth'])
+
+ pdict['scandir'] = 'N2S'
+ pdict['a_desc'] = "FY-4 {} area".format(self.filename_info['observation_type'])
+ pdict['a_name'] = f'{self.filename_info["observation_type"]}_{res}m'
+ pdict['p_id'] = f'FY-4, {res}m'
+
+ pdict['nlines'] = pdict['nlines'] - 1
+ pdict['ncols'] = pdict['ncols'] - 1
+
+ pdict['coff'] = pdict['coff'] - 0.5
+ pdict['loff'] = pdict['loff'] + 1
+
+ area_extent = get_area_extent(pdict)
+ area_extent = (area_extent[0],
+ area_extent[1],
+ area_extent[2],
+ area_extent[3])
+
+ pdict['nlines'] = pdict['nlines'] + 1
+ pdict['ncols'] = pdict['ncols'] + 1
+
+ area = get_area_definition(pdict, area_extent)
+
+ return area
diff --git a/satpy/readers/generic_image.py b/satpy/readers/generic_image.py
index f8d1667abd..0f2604ec20 100644
--- a/satpy/readers/generic_image.py
+++ b/satpy/readers/generic_image.py
@@ -28,15 +28,15 @@
"""
import logging
-import rasterio
-import xarray as xr
import dask.array as da
import numpy as np
+import rasterio
+import xarray as xr
+from pyresample import utils
-from satpy.readers.file_handlers import BaseFileHandler
from satpy import CHUNK_SIZE
-from pyresample import utils
+from satpy.readers.file_handlers import BaseFileHandler
BANDS = {1: ['L'],
2: ['L', 'A'],
diff --git a/satpy/readers/geocat.py b/satpy/readers/geocat.py
index b9094119f9..3343e25533 100644
--- a/satpy/readers/geocat.py
+++ b/satpy/readers/geocat.py
@@ -29,7 +29,10 @@
and projection information to calculate the area extents.
"""
+from __future__ import annotations
+
import logging
+
import numpy as np
from pyproj import Proj
from pyresample import geometry
@@ -61,7 +64,7 @@ class GEOCATFileHandler(NetCDF4FileHandler):
'goes16': 'abi', # untested
'goesr': 'abi', # untested
}
- platforms = {
+ platforms: dict[str, str] = {
}
resolutions = {
'abi': {
@@ -81,7 +84,7 @@ def get_sensor(self, sensor):
for k, v in self.sensors.items():
if k == sensor:
return v
- elif k in sensor:
+ if k in sensor:
last_resort = v
if last_resort:
return last_resort
diff --git a/satpy/readers/ghi_l1.py b/satpy/readers/ghi_l1.py
new file mode 100644
index 0000000000..2e26aeee24
--- /dev/null
+++ b/satpy/readers/ghi_l1.py
@@ -0,0 +1,127 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2022 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Geostationary High-speed Imager reader for the Level_1 HDF format.
+
+This instrument is aboard the Fengyun-4B satellite. No document is available to describe this
+format is available, but it's broadly similar to the co-flying AGRI instrument.
+
+"""
+
+import logging
+
+from pyproj import Proj
+
+from satpy.readers._geos_area import get_area_definition
+from satpy.readers.fy4_base import FY4Base
+
+logger = logging.getLogger(__name__)
+
+
+class HDF_GHI_L1(FY4Base):
+ """GHI l1 file handler."""
+
+ def __init__(self, filename, filename_info, filetype_info):
+ """Init filehandler."""
+ super(HDF_GHI_L1, self).__init__(filename, filename_info, filetype_info)
+ self.sensor = 'GHI'
+
+ def get_dataset(self, dataset_id, ds_info):
+ """Load a dataset."""
+ ds_name = dataset_id['name']
+ logger.debug('Reading in get_dataset %s.', ds_name)
+ file_key = ds_info.get('file_key', ds_name)
+ if self.CHANS_ID in file_key:
+ file_key = f'Data/{file_key}'
+ elif self.SUN_ID in file_key or self.SAT_ID in file_key:
+ file_key = f'Navigation/{file_key}'
+ data = self.get(file_key)
+ if data.ndim >= 2:
+ data = data.rename({data.dims[-2]: 'y', data.dims[-1]: 'x'})
+
+ data = self.calibrate(data, ds_info, ds_name, file_key)
+
+ self.adjust_attrs(data, ds_info)
+
+ return data
+
+ def adjust_attrs(self, data, ds_info):
+ """Adjust the attrs of the data."""
+ satname = self.PLATFORM_NAMES.get(self['/attr/Satellite Name'], self['/attr/Satellite Name'])
+ data.attrs.update({'platform_name': satname,
+ 'sensor': self['/attr/Sensor Identification Code'].lower(),
+ 'orbital_parameters': {
+ 'satellite_nominal_latitude': self['/attr/NOMSubSatLat'].item(),
+ 'satellite_nominal_longitude': self['/attr/NOMSubSatLon'].item(),
+ 'satellite_nominal_altitude': self['/attr/NOMSatHeight'].item()}})
+ data.attrs.update(ds_info)
+ # remove attributes that could be confusing later
+ data.attrs.pop('FillValue', None)
+ data.attrs.pop('Intercept', None)
+ data.attrs.pop('Slope', None)
+
+ def get_area_def(self, key):
+ """Get the area definition."""
+ # Coordination Group for Meteorological Satellites LRIT/HRIT Global Specification
+ # https://www.cgms-info.org/documents/cgms-lrit-hrit-global-specification-(v2-8-of-30-oct-2013).pdf
+ res = key['resolution']
+
+ pdict = {}
+
+ c_lats = self.file_content['/attr/Corner-Point Latitudes']
+ c_lons = self.file_content['/attr/Corner-Point Longitudes']
+
+ p1 = (c_lons[0], c_lats[0])
+ p2 = (c_lons[1], c_lats[1])
+ p3 = (c_lons[2], c_lats[2])
+ p4 = (c_lons[3], c_lats[3])
+
+ pdict['a'] = self.file_content['/attr/Semi_major_axis'] * 1E3 # equator radius (m)
+ pdict['b'] = self.file_content['/attr/Semi_minor_axis'] * 1E3 # equator radius (m)
+ pdict['h'] = self.file_content['/attr/NOMSatHeight'] * 1E3 # the altitude of satellite (m)
+
+ pdict['h'] = pdict['h'] - pdict['a']
+
+ pdict['ssp_lon'] = float(self.file_content['/attr/NOMSubSatLon'])
+ pdict['nlines'] = float(self.file_content['/attr/RegLength'])
+ pdict['ncols'] = float(self.file_content['/attr/RegWidth'])
+
+ pdict['scandir'] = 'S2N'
+
+ pdict['a_desc'] = "FY-4 {} area".format(self.filename_info['observation_type'])
+ pdict['a_name'] = f'{self.filename_info["observation_type"]}_{res}m'
+ pdict['p_id'] = f'FY-4, {res}m'
+
+ proj_dict = {'a': pdict['a'],
+ 'b': pdict['b'],
+ 'lon_0': pdict['ssp_lon'],
+ 'h': pdict['h'],
+ 'proj': 'geos',
+ 'units': 'm',
+ 'sweep': 'y'}
+
+ p = Proj(proj_dict)
+ o1 = (p(p1[0], p1[1])) # Upper left
+ o2 = (p(p2[0], p2[1])) # Upper right
+ o3 = (p(p3[0], p3[1])) # Lower left
+ o4 = (p(p4[0], p4[1])) # Lower right
+
+ deller = res / 2.
+
+ area = get_area_definition(pdict, (o3[0] - deller, o4[1] - deller, o2[0], o1[1]))
+
+ return area
diff --git a/satpy/readers/ghrsst_l2.py b/satpy/readers/ghrsst_l2.py
new file mode 100644
index 0000000000..dde3ce7a71
--- /dev/null
+++ b/satpy/readers/ghrsst_l2.py
@@ -0,0 +1,97 @@
+# -*- coding: utf-8 -*-
+# Copyright (c) 2017 - 2022 Satpy developers
+#
+# This file is part of Satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Reader for the GHRSST level-2 formatted data."""
+
+import os
+import tarfile
+from contextlib import suppress
+from datetime import datetime
+from functools import cached_property
+
+import xarray as xr
+
+from satpy import CHUNK_SIZE
+from satpy.readers.file_handlers import BaseFileHandler
+
+
+class GHRSSTL2FileHandler(BaseFileHandler):
+ """File handler for GHRSST L2 netCDF files."""
+
+ def __init__(self, filename, filename_info, filetype_info, engine=None):
+ """Initialize the file handler for GHRSST L2 netCDF data."""
+ super().__init__(filename, filename_info, filetype_info)
+ self._engine = engine
+ self._tarfile = None
+
+ self.filename_info['start_time'] = datetime.strptime(
+ self.nc.start_time, '%Y%m%dT%H%M%SZ')
+ self.filename_info['end_time'] = datetime.strptime(
+ self.nc.stop_time, '%Y%m%dT%H%M%SZ')
+
+ @cached_property
+ def nc(self):
+ """Get the xarray Dataset for the filename."""
+ if os.fspath(self.filename).endswith('tar'):
+ file_obj = self._open_tarfile()
+ else:
+ file_obj = self.filename
+
+ nc = xr.open_dataset(file_obj,
+ decode_cf=True,
+ mask_and_scale=True,
+ engine=self._engine,
+ chunks={'ni': CHUNK_SIZE,
+ 'nj': CHUNK_SIZE})
+
+ return nc.rename({'ni': 'x', 'nj': 'y'})
+
+ def _open_tarfile(self):
+ self._tarfile = tarfile.open(name=self.filename, mode='r')
+ sst_filename = next((name for name in self._tarfile.getnames()
+ if self._is_sst_file(name)))
+ file_obj = self._tarfile.extractfile(sst_filename)
+ return file_obj
+
+ @staticmethod
+ def _is_sst_file(name):
+ """Check if file in the tar archive is a valid SST file."""
+ return name.endswith('nc') and 'GHRSST-SSTskin' in name
+
+ def get_dataset(self, key, info):
+ """Get any available dataset."""
+ stdname = info.get('standard_name')
+ return self.nc[stdname].squeeze()
+
+ @property
+ def start_time(self):
+ """Get start time."""
+ return self.filename_info['start_time']
+
+ @property
+ def end_time(self):
+ """Get end time."""
+ return self.filename_info['end_time']
+
+ @property
+ def sensor(self):
+ """Get the sensor name."""
+ return self.nc.attrs['sensor'].lower()
+
+ def __del__(self):
+ """Close the tarfile object."""
+ with suppress(AttributeError):
+ self._tarfile.close()
diff --git a/satpy/readers/ghrsst_l3c_sst.py b/satpy/readers/ghrsst_l3c_sst.py
index 5b8f86f4c6..d35621d341 100644
--- a/satpy/readers/ghrsst_l3c_sst.py
+++ b/satpy/readers/ghrsst_l3c_sst.py
@@ -15,11 +15,12 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
-
+# type: ignore
"""An OSISAF SST reader for the netCDF GHRSST format."""
import logging
from datetime import datetime
+
import numpy as np
from satpy.dataset import Dataset
@@ -71,6 +72,18 @@ def get_dataset(self, dataset_id, ds_info, out=None):
out.mask = np.zeros(shape, dtype=bool)
out.data[:] = np.require(self[var_path][0][::-1], dtype=dtype)
+ self._scale_and_mask_data(out, var_path)
+
+ ds_info.update({
+ "units": ds_info.get("units", file_units),
+ "platform_name": PLATFORM_NAME.get(self['/attr/platform'], self['/attr/platform']),
+ "sensor": SENSOR_NAME.get(self['/attr/sensor'], self['/attr/sensor']),
+ })
+ ds_info.update(dataset_id.to_dict())
+ cls = ds_info.pop("container", Dataset)
+ return cls(out, **ds_info)
+
+ def _scale_and_mask_data(self, out, var_path):
valid_min = self[var_path + '/attr/valid_min']
valid_max = self[var_path + '/attr/valid_max']
try:
@@ -78,24 +91,13 @@ def get_dataset(self, dataset_id, ds_info, out=None):
scale_offset = self[var_path + '/attr/add_offset']
except KeyError:
scale_factor = scale_offset = None
-
if valid_min is not None and valid_max is not None:
out.mask[:] |= (out.data < valid_min) | (out.data > valid_max)
-
factors = (scale_factor, scale_offset)
if factors[0] != 1 or factors[1] != 0:
out.data[:] *= factors[0]
out.data[:] += factors[1]
- ds_info.update({
- "units": ds_info.get("units", file_units),
- "platform_name": PLATFORM_NAME.get(self['/attr/platform'], self['/attr/platform']),
- "sensor": SENSOR_NAME.get(self['/attr/sensor'], self['/attr/sensor']),
- })
- ds_info.update(dataset_id.to_dict())
- cls = ds_info.pop("container", Dataset)
- return cls(out, **ds_info)
-
def get_lonlats(self, navid, nav_info, lon_out=None, lat_out=None):
"""Load an area."""
lon_key = 'lon'
diff --git a/satpy/readers/glm_l2.py b/satpy/readers/glm_l2.py
index 2ad6e35302..bfb2719b07 100644
--- a/satpy/readers/glm_l2.py
+++ b/satpy/readers/glm_l2.py
@@ -26,15 +26,16 @@
import logging
from datetime import datetime
-from satpy.readers.abi_base import NC_ABI_BASE
-
import numpy as np
+from satpy.readers.abi_base import NC_ABI_BASE
+
logger = logging.getLogger(__name__)
PLATFORM_NAMES = {
'G16': 'GOES-16',
'G17': 'GOES-17',
+ 'G18': 'GOES-18',
}
# class NC_GLM_L2_LCFA(BaseFileHandler): — add this with glmtools
diff --git a/satpy/readers/goes_imager_hrit.py b/satpy/readers/goes_imager_hrit.py
index 8ff3f786a5..05bcc513d7 100644
--- a/satpy/readers/goes_imager_hrit.py
+++ b/satpy/readers/goes_imager_hrit.py
@@ -27,23 +27,24 @@
import logging
from datetime import datetime, timedelta
+import dask.array as da
import numpy as np
import xarray as xr
-import dask.array as da
-
-from pyresample import geometry
-from satpy.readers.eum_base import (time_cds_short, recarray2dict)
-from satpy.readers.hrit_base import (HRITFileHandler, ancillary_text,
- annotation_header, base_hdr_map,
- image_data_function)
+from satpy.readers._geos_area import get_area_definition, get_area_extent, get_geos_area_naming
+from satpy.readers.eum_base import recarray2dict, time_cds_short
+from satpy.readers.hrit_base import (
+ HRITFileHandler,
+ ancillary_text,
+ annotation_header,
+ base_hdr_map,
+ image_data_function,
+)
class CalibrationError(Exception):
"""Dummy error-class."""
- pass
-
logger = logging.getLogger('hrit_goes')
@@ -351,6 +352,8 @@ def process_prologue(self):
14: "GOES-14",
15: "GOES-15"}
+SENSOR_NAME = 'goes_imager'
+
class HRITGOESFileHandler(HRITFileHandler):
"""GOES HRIT format reader."""
@@ -384,7 +387,7 @@ def get_dataset(self, key, info):
new_attrs.update(res.attrs)
res.attrs = new_attrs
res.attrs['platform_name'] = self.platform_name
- res.attrs['sensor'] = 'goes_imager'
+ res.attrs['sensor'] = SENSOR_NAME
res.attrs['orbital_parameters'] = {'projection_longitude': self.mda['projection_parameters']['SSP_longitude'],
'projection_latitude': 0.0,
'projection_altitude': ALTITUDE}
@@ -441,45 +444,41 @@ def _calibrate(self, data):
res.attrs['units'] = units.get(unit, unit)
return res
- def get_area_def(self, dsid):
+ def get_area_def(self, dataset_id):
"""Get the area definition of the band."""
- cfac = np.int32(self.mda['cfac'])
- lfac = np.int32(self.mda['lfac'])
- coff = np.float32(self.mda['coff'])
- loff = np.float32(self.mda['loff'])
-
- a = EQUATOR_RADIUS
- b = POLE_RADIUS
- h = ALTITUDE
-
- lon_0 = self.prologue['SubSatLongitude']
-
- nlines = int(self.mda['number_of_lines'])
- ncols = int(self.mda['number_of_columns'])
-
- loff = nlines - loff
-
- area_extent = self.get_area_extent((nlines, ncols),
- (loff, coff),
- (lfac, cfac),
- h)
-
- proj_dict = {'a': float(a),
- 'b': float(b),
- 'lon_0': float(lon_0),
- 'h': float(h),
- 'proj': 'geos',
- 'units': 'm'}
-
- area = geometry.AreaDefinition(
- 'some_area_name',
- "On-the-fly area",
- 'geosmsg',
- proj_dict,
- ncols,
- nlines,
- area_extent)
-
+ proj_dict = self._get_proj_dict(dataset_id)
+ area_extent = get_area_extent(proj_dict)
+ area = get_area_definition(proj_dict, area_extent)
self.area = area
-
return area
+
+ def _get_proj_dict(self, dataset_id):
+ loff = np.float32(self.mda['loff'])
+ nlines = np.int32(self.mda['number_of_lines'])
+ loff = nlines - loff
+ name_dict = get_geos_area_naming({
+ 'platform_name': self.platform_name,
+ 'instrument_name': SENSOR_NAME,
+ # Partial scans are padded to full disk
+ 'service_name': 'FD',
+ 'service_desc': 'Full Disk',
+ 'resolution': dataset_id['resolution']
+ })
+ return {
+ 'a': EQUATOR_RADIUS,
+ 'b': POLE_RADIUS,
+ 'ssp_lon': float(self.prologue['SubSatLongitude']),
+ 'h': ALTITUDE,
+ 'proj': 'geos',
+ 'units': 'm',
+ 'a_name': name_dict['area_id'],
+ 'a_desc': name_dict['description'],
+ 'p_id': '',
+ 'nlines': nlines,
+ 'ncols': np.int32(self.mda['number_of_columns']),
+ 'cfac': np.int32(self.mda['cfac']),
+ 'lfac': np.int32(self.mda['lfac']),
+ 'coff': np.float32(self.mda['coff']),
+ 'loff': loff,
+ 'scandir': 'N2S'
+ }
diff --git a/satpy/readers/goes_imager_nc.py b/satpy/readers/goes_imager_nc.py
index bbf591eebc..eb89369de1 100644
--- a/satpy/readers/goes_imager_nc.py
+++ b/satpy/readers/goes_imager_nc.py
@@ -15,41 +15,64 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
-"""Reader for GOES 8-15 imager data in netCDF format from NOAA CLASS.
+"""Reader for GOES 8-15 imager data in netCDF format.
-Also handles GOES 15 data in netCDF format reformated by Eumetsat
+Supports netCDF files from both NOAA-CLASS and EUMETSAT.
-GOES Imager netCDF files contain geolocated detector counts. If ordering via
-NOAA CLASS, select 16 bits/pixel. The instrument oversamples the viewed scene
-in E-W direction by a factor of 1.75: IR/VIS pixels are 112/28 urad on a side,
-but the instrument samples every 64/16 urad in E-W direction (see [BOOK-I] and
-[BOOK-N]).
-Important note: Some essential information are missing in the netCDF files,
-which might render them inappropriate for certain applications. The unknowns
-are:
- 1. Subsatellite point
- 2. Calibration coefficients
- 3. Detector-scanline assignment, i.e. information about which scanline
- was recorded by which detector
+NOAA-CLASS
+==========
+
+GOES-Imager netCDF files from NOAA-CLASS contain detector counts alongside
+latitude and longitude coordinates.
+
+.. note ::
+ If ordering files via NOAA CLASS, select 16 bits/pixel.
+
+
+.. note ::
+ Some essential information are missing in the netCDF files:
+
+ 1. Subsatellite point
+ 2. Calibration coefficients
+ 3. Detector-scanline assignment, i.e. information about which scanline
+ was recorded by which detector
+
+ Items 1. and 2. are not critical because the images are geo-located and NOAA
+ provides static calibration coefficients (`[VIS]`_, `[IR]`_). The
+ detector-scanline assignment however cannot be reconstructed properly. This
+ is where an approximation has to be applied (see below).
+
+
+
+Oversampling
+------------
+
+GOES-Imager oversamples the viewed scene in E-W direction by a factor of
+1.75: IR/VIS pixels are 112/28 urad on a side, but the instrument samples
+every 64/16 urad in E-W direction (see `[BOOK-I]`_ and `[BOOK-N]`_). That means
+pixels are actually overlapping on the ground. This cannot be represented
+by a pyresample area definition.
+
+For full disk images it is possible to estimate an area definition with uniform
+sampling where pixels don't overlap. This can be used for resampling and is
+available via ``scene[dataset].attrs["area_def_uni"]``. The pixel size is derived
+from altitude and N-S sampling angle. The area extent is based on the maximum
+scanning angles at the earth's limb.
-Items 1. and 2. are not critical because the images are geo-located and NOAA
-provides static calibration coefficients ([VIS], [IR]). The detector-scanline
-assignment however cannot be reconstructed properly. This is where an
-approximation has to be applied (see below).
Calibration
-===========
+-----------
-Calibration is performed according to [VIS] and [IR], but with an average
+Calibration is performed according to `[VIS]`_ and `[IR]`_, but with an average
calibration coefficient applied to all detectors in a certain channel. The
reason for and impact of this approximation is described below.
The GOES imager simultaneously records multiple scanlines per sweep using
multiple detectors per channel. The VIS channel has 8 detectors, the IR
channels have 1-2 detectors (see e.g. Figures 3-5a/b, 3-6a/b and 3-7/a-b in
-[BOOK-N]). Each detector has its own calibration coefficients, so in order to
+`[BOOK-N]`_). Each detector has its own calibration coefficients, so in order to
perform an accurate calibration, the detector-scanline assignment is needed.
In theory it is known which scanline was recorded by which detector
@@ -57,7 +80,7 @@
mounted flexes due to thermal gradients in the instrument which leads to a N-S
shift of +/- 8 visible or +/- 2 IR pixels. This shift is compensated in the
GVAR scan formation process, but in a way which is hard to reconstruct
-properly afterwards. See [GVAR], section 3.2.1. for details.
+properly afterwards. See `[GVAR]`_, section 3.2.1. for details.
Since the calibration coefficients of the detectors in a certain channel only
differ slightly, a workaround is to calibrate each scanline with the average
@@ -162,19 +185,12 @@
13_3 0.008 K
======= ===== ====
-References:
-- [GVAR] https://goes.gsfc.nasa.gov/text/GVARRDL98.pdf
-- [BOOK-N] https://goes.gsfc.nasa.gov/text/GOES-N_Databook/databook.pdf
-- [BOOK-I] https://goes.gsfc.nasa.gov/text/databook/databook.pdf
-- [IR] https://www.ospo.noaa.gov/Operations/GOES/calibration/gvar-conversion.html
-- [VIS] https://www.ospo.noaa.gov/Operations/GOES/calibration/goes-vis-ch-calibration.html
-- [FAQ] https://www.ncdc.noaa.gov/sites/default/files/attachments/Satellite-Frequently-Asked-Questions_2.pdf
-- [SCHED-W] http://www.ospo.noaa.gov/Operations/GOES/west/imager-routine.html
-- [SCHED-E] http://www.ospo.noaa.gov/Operations/GOES/east/imager-routine.html
-Eumetsat formatted netCDF data:
+EUMETSAT
+========
-The main differences are:
+During tandem operations of GOES-15 and GOES-17, EUMETSAT distributed a
+variant of this dataset with the following differences:
1. The geolocation is in a separate file, used for all bands
2. VIS data is calibrated to Albedo (or reflectance)
@@ -183,22 +199,42 @@
5. File name differs also slightly
6. Data is received via EumetCast
+
+References
+==========
+
+- `[GVAR]`_ GVAR transmission format
+- `[BOOK-N]`_ GOES-N databook
+- `[BOOK-I]`_ GOES-I databook (broken)
+- `[IR]`_ Conversion of GVAR Infrared Data to Scene Radiance or Temperature
+- `[VIS]`_ Calibration of the Visible Channels of the GOES Imagers and Sounders
+- `[GLOSSARY]`_ GVAR_IMG Glossary
+- `[SCHED-W]`_ GOES-15 Routine Imager Schedule
+- `[SCHED-E]`_ Optimized GOES-East Routine Imager Schedule
+
+
+.. _[GVAR]: https://noaasis.noaa.gov/NOAASIS/pubs/nesdis82.PDF
+.. _[BOOK-N]: https://www.nasa.gov/pdf/148080main_GOES-N%20Databook%20with%20Copyright.pdf
+.. _[BOOK-I]: https://goes.gsfc.nasa.gov/text/databook/databook.pdf
+.. _[IR]: https://www.ospo.noaa.gov/Operations/GOES/calibration/gvar-conversion.html
+.. _[VIS]: https://www.ospo.noaa.gov/Operations/GOES/calibration/goes-vis-ch-calibration.html
+.. _[GLOSSARY]: https://www.avl.class.noaa.gov/release/glossary/GVAR_IMG.htm
+.. _[SCHED-W]: https://www.ospo.noaa.gov/Operations/GOES/15/imager-routine.html
+.. _[SCHED-E]: http://www.ospo.noaa.gov/Operations/GOES/east/imager-routine.html
"""
-from abc import abstractmethod
-from collections import namedtuple
-from datetime import datetime, timedelta
import logging
import re
+from abc import abstractmethod
+from datetime import datetime, timedelta
import numpy as np
+import pyresample.geometry
import xarray as xr
-import pyresample.geometry
from satpy import CHUNK_SIZE
from satpy.readers.file_handlers import BaseFileHandler
-from satpy.readers.goes_imager_hrit import (SPACECRAFTS, EQUATOR_RADIUS, POLE_RADIUS,
- ALTITUDE)
+from satpy.readers.goes_imager_hrit import ALTITUDE, EQUATOR_RADIUS, POLE_RADIUS, SPACECRAFTS
from satpy.readers.utils import bbox, get_geostationary_angle_extent
logger = logging.getLogger(__name__)
@@ -566,6 +602,8 @@
class GOESNCBaseFileHandler(BaseFileHandler):
"""File handler for GOES Imager data in netCDF format."""
+ yaw_flip_sampling_distance = 10
+
def __init__(self, filename, filename_info, filetype_info, geo_data=None):
"""Initialize the reader."""
super(GOESNCBaseFileHandler, self).__init__(filename, filename_info,
@@ -620,7 +658,7 @@ def _get_platform_name(ncattr):
def _get_sector(self, channel, nlines, ncols):
"""Determine which sector was scanned."""
- if self._is_vis(channel):
+ if is_vis_channel(channel):
margin = 100
sectors_ref = self.vis_sectors
else:
@@ -634,16 +672,6 @@ def _get_sector(self, channel, nlines, ncols):
return UNKNOWN_SECTOR
- @staticmethod
- def _is_vis(channel):
- """Determine whether the given channel is a visible channel."""
- if isinstance(channel, str):
- return channel == '00_7'
- elif isinstance(channel, int):
- return channel == 1
- else:
- raise ValueError('Invalid channel')
-
@staticmethod
def _get_earth_mask(lat):
"""Identify earth/space pixels.
@@ -679,65 +707,31 @@ def _get_nadir_pixel(earth_mask, sector):
return None, None
- @staticmethod
- def _is_yaw_flip(lat, delta=10):
+ def _is_yaw_flip(self, lat):
"""Determine whether the satellite is yaw-flipped ('upside down')."""
logger.debug('Computing yaw flip flag')
# In case of yaw-flip the data and coordinates in the netCDF files are
# also flipped. Just check whether the latitude increases or decrases
# with the line number.
+ delta = self.yaw_flip_sampling_distance
crow, ccol = np.array(lat.shape) // 2
return (lat[crow+delta, ccol] - lat[crow, ccol]).values > 0
def _get_area_def_uniform_sampling(self, lon0, channel):
"""Get area definition with uniform sampling."""
logger.debug('Computing area definition')
-
if lon0 is not None:
- # Define proj4 projection parameters
- proj_dict = {'a': EQUATOR_RADIUS,
- 'b': POLE_RADIUS,
- 'lon_0': lon0,
- 'h': ALTITUDE,
- 'proj': 'geos',
- 'units': 'm'}
-
- # Calculate maximum scanning angles
- xmax, ymax = get_geostationary_angle_extent(
- namedtuple('area', ['proj_dict'])(proj_dict))
-
- # Derive area extent using small angle approximation (maximum
- # scanning angle is ~8.6 degrees)
- llx, lly, urx, ury = ALTITUDE * np.array([-xmax, -ymax, xmax, ymax])
- area_extent = [llx, lly, urx, ury]
-
- # Original image is oversampled. Create pyresample area definition
- # with uniform sampling in N-S and E-W direction
- if self._is_vis(channel):
- sampling = SAMPLING_NS_VIS
- else:
- sampling = SAMPLING_NS_IR
- pix_size = ALTITUDE * sampling
- area_def = pyresample.geometry.AreaDefinition(
- 'goes_geos_uniform',
- '{} geostationary projection (uniform sampling)'.format(self.platform_name),
- 'goes_geos_uniform',
- proj_dict,
- np.rint((urx - llx) / pix_size).astype(int),
- np.rint((ury - lly) / pix_size).astype(int),
- area_extent)
-
- return area_def
- else:
- return None
+ est = AreaDefEstimator(self.platform_name, channel)
+ return est.get_area_def_with_uniform_sampling(lon0)
+ return None
@property
def start_time(self):
"""Start timestamp of the dataset."""
dt = self.nc['time'].dt
- return datetime(year=dt.year, month=dt.month, day=dt.day,
- hour=dt.hour, minute=dt.minute,
- second=dt.second, microsecond=dt.microsecond)
+ return datetime(year=int(dt.year), month=int(dt.month), day=int(dt.day),
+ hour=int(dt.hour), minute=int(dt.minute),
+ second=int(dt.second), microsecond=int(dt.microsecond))
@property
def end_time(self):
@@ -803,7 +797,7 @@ def _counts2radiance(self, counts, coefs, channel):
"""Convert raw detector counts to radiance."""
logger.debug('Converting counts to radiance')
- if self._is_vis(channel):
+ if is_vis_channel(channel):
# Since the scanline-detector assignment is unknown, use the average
# coefficients for all scanlines.
slope = np.array(coefs['slope']).mean()
@@ -816,7 +810,7 @@ def _counts2radiance(self, counts, coefs, channel):
def _calibrate(self, radiance, coefs, channel, calibration):
"""Convert radiance to reflectance or brightness temperature."""
- if self._is_vis(channel):
+ if is_vis_channel(channel):
if not calibration == 'reflectance':
raise ValueError('Cannot calibrate VIS channel to '
'{}'.format(calibration))
@@ -946,10 +940,7 @@ def _update_metadata(self, data, ds_info):
# Attributes only available for full disc images. YAML reader
# doesn't like it if satellite_* is present but None
data.attrs.update(
- {'satellite_longitude': self.meta['lon0'],
- 'satellite_latitude': self.meta['lat0'],
- 'satellite_altitude': ALTITUDE,
- 'nadir_row': self.meta['nadir_row'],
+ {'nadir_row': self.meta['nadir_row'],
'nadir_col': self.meta['nadir_col'],
'area_def_uniform_sampling': self.meta['area_def_uni']}
)
@@ -993,6 +984,15 @@ def available_datasets(self, configured_datasets=None):
yield is_avail, ds_info
+def is_vis_channel(channel):
+ """Determine whether the given channel is a visible channel."""
+ if isinstance(channel, str):
+ return channel == '00_7'
+ if isinstance(channel, int):
+ return channel == 1
+ raise ValueError('Invalid channel')
+
+
class GOESNCFileHandler(GOESNCBaseFileHandler):
"""File handler for GOES Imager data in netCDF format."""
@@ -1034,14 +1034,14 @@ def get_dataset(self, key, info):
def calibrate(self, counts, calibration, channel):
"""Perform calibration."""
# Convert 16bit counts from netCDF4 file to the original 10bit
- # GVAR counts by dividing by 32. See [FAQ].
+ # GVAR counts by dividing by 32. See [GLOSSARY].
counts = counts / 32.
coefs = CALIB_COEFS[self.platform_name][channel]
if calibration == 'counts':
return counts
- elif calibration in ['radiance', 'reflectance',
- 'brightness_temperature']:
+ if calibration in ['radiance', 'reflectance',
+ 'brightness_temperature']:
radiance = self._counts2radiance(counts=counts, coefs=coefs,
channel=channel)
if calibration == 'radiance':
@@ -1049,9 +1049,8 @@ def calibrate(self, counts, calibration, channel):
return self._calibrate(radiance=radiance, coefs=coefs,
channel=channel, calibration=calibration)
- else:
- raise ValueError('Unsupported calibration for channel {}: {}'
- .format(channel, calibration))
+
+ raise ValueError('Unsupported calibration for channel {}: {}'.format(channel, calibration))
class GOESEUMNCFileHandler(GOESNCBaseFileHandler):
@@ -1094,19 +1093,19 @@ def get_dataset(self, key, info):
def calibrate(self, data, calibration, channel):
"""Perform calibration."""
coefs = CALIB_COEFS[self.platform_name][channel]
- is_vis = self._is_vis(channel)
+ is_vis = is_vis_channel(channel)
# IR files provide radiances, VIS file provides reflectances
if is_vis and calibration == 'reflectance':
return data
- elif not is_vis and calibration == 'radiance':
+ if not is_vis and calibration == 'radiance':
return data
- elif not is_vis and calibration == 'brightness_temperature':
+ if not is_vis and calibration == 'brightness_temperature':
return self._calibrate(radiance=data, calibration=calibration,
coefs=coefs, channel=channel)
- else:
- raise ValueError('Unsupported calibration for channel {}: {}'
- .format(channel, calibration))
+
+ raise ValueError('Unsupported calibration for channel {}: {}'
+ .format(channel, calibration))
class GOESEUMGEONCFileHandler(BaseFileHandler):
@@ -1212,8 +1211,7 @@ def _load_url_or_file(self, url):
response = requests.get(url)
if response.ok:
return response.text
- else:
- raise requests.HTTPError
+ raise requests.HTTPError
except (MissingSchema, requests.HTTPError):
# Not a valid URL, is it a file?
try:
@@ -1298,7 +1296,7 @@ def _get_table(self, root, heading, heading_type, ):
if not headings:
raise ValueError('Cannot find a coefficient table matching text '
'"{}"'.format(heading))
- elif len(headings) > 1:
+ if len(headings) > 1:
raise ValueError('Found multiple headings matching text "{}"'
.format(heading))
table = headings[0].next_sibling.next_sibling
@@ -1360,3 +1358,83 @@ def test_coefs(ir_url, vis_url):
logger.info('Coefficients OK')
return True
+
+
+class AreaDefEstimator:
+ """Estimate area definition for GOES-Imager."""
+
+ def __init__(self, platform_name, channel):
+ """Create the instance."""
+ self.platform_name = platform_name
+ self.channel = channel
+
+ def get_area_def_with_uniform_sampling(self, projection_longitude):
+ """Get area definition with uniform sampling.
+
+ The area definition is based on geometry and instrument properties:
+ Pixel size is derived from altitude and N-S sampling angle. Area extent
+ is based on the maximum scanning angles at the limb of the earth.
+ """
+ projection = self._get_projection(projection_longitude)
+ area_extent = self._get_area_extent_at_max_scan_angle(projection)
+ shape = self._get_shape_with_uniform_pixel_size(area_extent)
+ return self._create_area_def(projection, area_extent, shape)
+
+ def _get_projection(self, projection_longitude):
+ return {
+ 'a': EQUATOR_RADIUS,
+ 'b': POLE_RADIUS,
+ 'lon_0': projection_longitude,
+ 'h': ALTITUDE,
+ 'proj': 'geos',
+ 'units': 'm'
+ }
+
+ def _get_area_extent_at_max_scan_angle(self, proj_dict):
+ xmax, ymax = self._get_max_scan_angle(proj_dict)
+ return ALTITUDE * np.array([-xmax, -ymax, xmax, ymax])
+
+ def _get_max_scan_angle(self, proj_dict):
+ dummy_area = pyresample.geometry.AreaDefinition(
+ area_id='dummy',
+ proj_id='dummy',
+ description='dummy',
+ projection=proj_dict,
+ width=2,
+ height=2,
+ area_extent=[-1, -1, 1, 1]
+ ) # only projection is relevant here
+ xmax, ymax = get_geostationary_angle_extent(dummy_area)
+ return xmax, ymax
+
+ def _get_shape_with_uniform_pixel_size(self, area_extent):
+ llx, lly, urx, ury = area_extent
+ pix_size = self._get_uniform_pixel_size()
+ width = np.rint((urx - llx) / pix_size).astype(int)
+ height = np.rint((ury - lly) / pix_size).astype(int)
+ return width, height
+
+ def _get_uniform_pixel_size(self):
+ if is_vis_channel(self.channel):
+ sampling = SAMPLING_NS_VIS
+ else:
+ sampling = SAMPLING_NS_IR
+ pix_size = ALTITUDE * sampling
+ return pix_size
+
+ def _create_area_def(self, projection, area_extent, shape):
+ width, height = shape
+ return pyresample.geometry.AreaDefinition(
+ area_id='goes_geos_uniform',
+ proj_id='goes_geos_uniform',
+ description=self._get_area_description(),
+ projection=projection,
+ width=width,
+ height=height,
+ area_extent=area_extent
+ )
+
+ def _get_area_description(self):
+ return '{} geostationary projection (uniform sampling)'.format(
+ self.platform_name
+ )
diff --git a/satpy/readers/gpm_imerg.py b/satpy/readers/gpm_imerg.py
index fb08d354ef..3a68f8a9bb 100644
--- a/satpy/readers/gpm_imerg.py
+++ b/satpy/readers/gpm_imerg.py
@@ -25,10 +25,13 @@
import logging
from datetime import datetime
+
+import dask.array as da
+import h5py
import numpy as np
-from satpy.readers.hdf5_utils import HDF5FileHandler
from pyresample.geometry import AreaDefinition
-import h5py
+
+from satpy.readers.hdf5_utils import HDF5FileHandler
logger = logging.getLogger(__name__)
@@ -67,18 +70,21 @@ def get_dataset(self, dataset_id, ds_info):
"""Load a dataset."""
file_key = ds_info.get('file_key', dataset_id['name'])
dsname = 'Grid/' + file_key
- data = self[dsname].squeeze().transpose()
- data.values = np.flipud(data.values)
+ data = self.get(dsname)
+ data = data.squeeze().transpose()
+ if data.ndim >= 2:
+ data = data.rename({data.dims[-2]: 'y', data.dims[-1]: 'x'})
+ data.data = da.flip(data.data, axis=0)
fill = data.attrs['_FillValue']
- pts = (data.values == fill).nonzero()
- data.values[pts] = np.nan
+ data = data.where(data != fill)
for key in list(data.attrs.keys()):
val = data.attrs[key]
if isinstance(val, h5py.h5r.Reference):
del data.attrs[key]
-
+ if isinstance(val, np.ndarray) and isinstance(val[0][0], h5py.h5r.Reference):
+ del data.attrs[key]
return data
def get_area_def(self, dsid):
diff --git a/satpy/readers/grib.py b/satpy/readers/grib.py
index 35957b207b..27d9f3bf6d 100644
--- a/satpy/readers/grib.py
+++ b/satpy/readers/grib.py
@@ -23,17 +23,18 @@
"""
import logging
+from datetime import datetime
+
+import dask.array as da
import numpy as np
+import pygrib
import xarray as xr
-import dask.array as da
from pyproj import Proj
from pyresample import geometry
-from datetime import datetime
from satpy import CHUNK_SIZE
-from satpy.readers.file_handlers import BaseFileHandler
from satpy.dataset import DataQuery
-import pygrib
+from satpy.readers.file_handlers import BaseFileHandler
LOG = logging.getLogger(__name__)
@@ -101,9 +102,9 @@ def _create_dataset_ids(self, keys):
self._msg_datasets[msg_id] = ds_info
@staticmethod
- def _convert_datetime(msg, date_key, time_key, format="%Y%m%d%H%M"):
+ def _convert_datetime(msg, date_key, time_key, date_format="%Y%m%d%H%M"):
date_str = "{:d}{:04d}".format(msg[date_key], msg[time_key])
- return datetime.strptime(date_str, format)
+ return datetime.strptime(date_str, date_format)
@property
def start_time(self):
diff --git a/satpy/readers/hdf4_utils.py b/satpy/readers/hdf4_utils.py
index 5dd4982969..7cc226ef0f 100644
--- a/satpy/readers/hdf4_utils.py
+++ b/satpy/readers/hdf4_utils.py
@@ -19,10 +19,10 @@
import logging
-from pyhdf.SD import SD, SDC, SDS
import dask.array as da
-import xarray as xr
import numpy as np
+import xarray as xr
+from pyhdf.SD import SD, SDC, SDS
from satpy import CHUNK_SIZE
from satpy.readers.file_handlers import BaseFileHandler
diff --git a/satpy/readers/hdf5_utils.py b/satpy/readers/hdf5_utils.py
index 820f90c4de..13a3a3e36f 100644
--- a/satpy/readers/hdf5_utils.py
+++ b/satpy/readers/hdf5_utils.py
@@ -18,14 +18,15 @@
"""Helpers for reading hdf5-based files."""
import logging
+
+import dask.array as da
import h5py
import numpy as np
import xarray as xr
-import dask.array as da
+from satpy import CHUNK_SIZE
from satpy.readers.file_handlers import BaseFileHandler
from satpy.readers.utils import np2str
-from satpy import CHUNK_SIZE
LOG = logging.getLogger(__name__)
diff --git a/satpy/readers/hdfeos_base.py b/satpy/readers/hdfeos_base.py
index 7dd93073fe..3f9e62bf9b 100644
--- a/satpy/readers/hdfeos_base.py
+++ b/satpy/readers/hdfeos_base.py
@@ -17,17 +17,20 @@
# satpy. If not, see .
"""Base HDF-EOS reader."""
-import re
-import logging
+from __future__ import annotations
+import logging
+import re
+from ast import literal_eval
+from contextlib import suppress
from datetime import datetime
-import xarray as xr
-import numpy as np
+import numpy as np
+import xarray as xr
from pyhdf.error import HDF4Error
from pyhdf.SD import SD
-from satpy import CHUNK_SIZE
+from satpy import CHUNK_SIZE, DataID
from satpy.readers.file_handlers import BaseFileHandler
logger = logging.getLogger(__name__)
@@ -35,14 +38,45 @@
def interpolate(clons, clats, csatz, src_resolution, dst_resolution):
"""Interpolate two parallel datasets jointly."""
+ if csatz is None:
+ return _interpolate_no_angles(clons, clats, src_resolution, dst_resolution)
+ return _interpolate_with_angles(clons, clats, csatz, src_resolution, dst_resolution)
+
+
+def _interpolate_with_angles(clons, clats, csatz, src_resolution, dst_resolution):
from geotiepoints.modisinterpolator import modis_1km_to_250m, modis_1km_to_500m, modis_5km_to_1km
+ # (src_res, dst_res, is satz not None) -> interp function
interpolation_functions = {
(5000, 1000): modis_5km_to_1km,
(1000, 500): modis_1km_to_500m,
(1000, 250): modis_1km_to_250m
}
+ return _find_and_run_interpolation(interpolation_functions, src_resolution, dst_resolution,
+ (clons, clats, csatz))
+
+def _interpolate_no_angles(clons, clats, src_resolution, dst_resolution):
+ interpolation_functions = {}
+
+ try:
+ from geotiepoints.simple_modis_interpolator import modis_1km_to_250m as simple_1km_to_250m
+ from geotiepoints.simple_modis_interpolator import modis_1km_to_500m as simple_1km_to_500m
+ except ImportError:
+ raise NotImplementedError(
+ f"Interpolation from {src_resolution}m to {dst_resolution}m "
+ "without satellite zenith angle information is not "
+ "implemented. Try updating your version of "
+ "python-geotiepoints.")
+ else:
+ interpolation_functions[(1000, 500)] = simple_1km_to_500m
+ interpolation_functions[(1000, 250)] = simple_1km_to_250m
+
+ return _find_and_run_interpolation(interpolation_functions, src_resolution, dst_resolution,
+ (clons, clats))
+
+
+def _find_and_run_interpolation(interpolation_functions, src_resolution, dst_resolution, args):
try:
interpolation_function = interpolation_functions[(src_resolution, dst_resolution)]
except KeyError:
@@ -51,14 +85,13 @@ def interpolate(clons, clats, csatz, src_resolution, dst_resolution):
raise NotImplementedError(error_message)
logger.debug("Interpolating from {} to {}".format(src_resolution, dst_resolution))
-
- return interpolation_function(clons, clats, csatz)
+ return interpolation_function(*args)
class HDFEOSBaseFileReader(BaseFileHandler):
"""Base file handler for HDF EOS data for both L1b and L2 products."""
- def __init__(self, filename, filename_info, filetype_info):
+ def __init__(self, filename, filename_info, filetype_info, **kwargs):
"""Initialize the base reader."""
BaseFileHandler.__init__(self, filename, filename_info, filetype_info)
try:
@@ -67,72 +100,106 @@ def __init__(self, filename, filename_info, filetype_info):
error_message = "Could not load data from file {}: {}".format(self.filename, err)
raise ValueError(error_message)
- # Read metadata
- self.metadata = self.read_mda(self.sd.attributes()['CoreMetadata.0'])
- self.metadata.update(self.read_mda(
- self.sd.attributes()['StructMetadata.0'])
- )
- self.metadata.update(self.read_mda(
- self.sd.attributes()['ArchiveMetadata.0'])
- )
+ self.metadata = self._load_all_metadata_attributes()
- @staticmethod
- def read_mda(attribute):
+ def _load_all_metadata_attributes(self):
+ metadata = {}
+ attrs = self.sd.attributes()
+ for md_key in ("CoreMetadata.0", "StructMetadata.0", "ArchiveMetadata.0"):
+ try:
+ str_val = attrs[md_key]
+ except KeyError:
+ continue
+ else:
+ metadata.update(self.read_mda(str_val))
+ return metadata
+
+ @classmethod
+ def read_mda(cls, attribute):
"""Read the EOS metadata."""
- lines = attribute.split('\n')
- mda = {}
- current_dict = mda
- path = []
- prev_line = None
+ line_iterator = iter(attribute.split('\n'))
+ return cls._read_mda(line_iterator)
+
+ @classmethod
+ def _read_mda(cls, lines, element=None):
+ current_dict = {}
+
for line in lines:
if not line:
continue
if line == 'END':
- break
- if prev_line:
- line = prev_line + line
- key, val = line.split('=')
- key = key.strip()
- val = val.strip()
- try:
- val = eval(val)
- except NameError:
- pass
- except SyntaxError:
- prev_line = line
- continue
- prev_line = None
+ return current_dict
+
+ key, val = cls._split_line(line, lines)
+
if key in ['GROUP', 'OBJECT']:
- new_dict = {}
- path.append(val)
- current_dict[val] = new_dict
- current_dict = new_dict
+ current_dict[val] = cls._read_mda(lines, val)
elif key in ['END_GROUP', 'END_OBJECT']:
- if val != path[-1]:
- raise SyntaxError
- path = path[:-1]
- current_dict = mda
- for item in path:
- current_dict = current_dict[item]
+ if val != element:
+ raise SyntaxError("Non-matching end-tag")
+ return current_dict
elif key in ['CLASS', 'NUM_VAL']:
pass
else:
current_dict[key] = val
- return mda
+ logger.warning("Malformed EOS metadata, missing an END.")
+ return current_dict
+
+ @classmethod
+ def _split_line(cls, line, lines):
+ key, val = line.split('=')
+ key = key.strip()
+ val = val.strip()
+ try:
+ with suppress(ValueError):
+ val = literal_eval(val)
+ except SyntaxError:
+ key, val = cls._split_line(line + next(lines), lines)
+ return key, val
+
+ @property
+ def metadata_platform_name(self):
+ """Platform name from the internal file metadata."""
+ try:
+ # Example: 'Terra' or 'Aqua'
+ return self.metadata['INVENTORYMETADATA']['ASSOCIATEDPLATFORMINSTRUMENTSENSOR'][
+ 'ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER']['ASSOCIATEDPLATFORMSHORTNAME']['VALUE']
+ except KeyError:
+ return self._platform_name_from_filename()
+
+ def _platform_name_from_filename(self):
+ platform_indicator = self.filename_info["platform_indicator"]
+ if platform_indicator in ("t", "O"):
+ # t1.* or MOD*
+ return "Terra"
+ # a1.* or MYD*
+ return "Aqua"
@property
def start_time(self):
"""Get the start time of the dataset."""
- date = (self.metadata['INVENTORYMETADATA']['RANGEDATETIME']['RANGEBEGINNINGDATE']['VALUE'] + ' ' +
- self.metadata['INVENTORYMETADATA']['RANGEDATETIME']['RANGEBEGINNINGTIME']['VALUE'])
- return datetime.strptime(date, '%Y-%m-%d %H:%M:%S.%f')
+ try:
+ date = (self.metadata['INVENTORYMETADATA']['RANGEDATETIME']['RANGEBEGINNINGDATE']['VALUE'] + ' ' +
+ self.metadata['INVENTORYMETADATA']['RANGEDATETIME']['RANGEBEGINNINGTIME']['VALUE'])
+ return datetime.strptime(date, '%Y-%m-%d %H:%M:%S.%f')
+ except KeyError:
+ return self._start_time_from_filename()
+
+ def _start_time_from_filename(self):
+ for fn_key in ("start_time", "acquisition_time"):
+ if fn_key in self.filename_info:
+ return self.filename_info[fn_key]
+ raise RuntimeError("Could not determine file start time")
@property
def end_time(self):
"""Get the end time of the dataset."""
- date = (self.metadata['INVENTORYMETADATA']['RANGEDATETIME']['RANGEENDINGDATE']['VALUE'] + ' ' +
- self.metadata['INVENTORYMETADATA']['RANGEDATETIME']['RANGEENDINGTIME']['VALUE'])
- return datetime.strptime(date, '%Y-%m-%d %H:%M:%S.%f')
+ try:
+ date = (self.metadata['INVENTORYMETADATA']['RANGEDATETIME']['RANGEENDINGDATE']['VALUE'] + ' ' +
+ self.metadata['INVENTORYMETADATA']['RANGEDATETIME']['RANGEENDINGTIME']['VALUE'])
+ return datetime.strptime(date, '%Y-%m-%d %H:%M:%S.%f')
+ except KeyError:
+ return self.start_time
def _read_dataset_in_file(self, dataset_name):
if dataset_name not in self.sd.datasets():
@@ -144,32 +211,82 @@ def _read_dataset_in_file(self, dataset_name):
dataset = self.sd.select(dataset_name)
return dataset
- def load_dataset(self, dataset_name):
+ def load_dataset(self, dataset_name, is_category=False):
"""Load the dataset from HDF EOS file."""
from satpy.readers.hdf4_utils import from_sds
dataset = self._read_dataset_in_file(dataset_name)
- fill_value = dataset._FillValue
dask_arr = from_sds(dataset, chunks=CHUNK_SIZE)
dims = ('y', 'x') if dask_arr.ndim == 2 else None
data = xr.DataArray(dask_arr, dims=dims,
attrs=dataset.attributes())
+ data = self._scale_and_mask_data_array(data, is_category=is_category)
- # preserve integer data types if possible
- if np.issubdtype(data.dtype, np.integer):
- new_fill = fill_value
- else:
- new_fill = np.nan
- data.attrs.pop('_FillValue', None)
- good_mask = data != fill_value
+ return data
+
+ def _scale_and_mask_data_array(self, data, is_category=False):
+ """Unscale byte data and mask invalid/fill values.
- scale_factor = data.attrs.get('scale_factor')
- if scale_factor is not None:
- data = data * scale_factor
+ MODIS requires unscaling the in-file bytes in an unexpected way::
- data = data.where(good_mask, new_fill)
+ data = (byte_value - add_offset) * scale_factor
+
+ See the below L1B User's Guide Appendix C for more information:
+
+ https://mcst.gsfc.nasa.gov/sites/default/files/file_attachments/M1054E_PUG_2017_0901_V6.2.2_Terra_V6.2.1_Aqua.pdf
+
+ """
+ good_mask, new_fill = self._get_good_data_mask(data, is_category=is_category)
+ scale_factor = data.attrs.pop('scale_factor', None)
+ add_offset = data.attrs.pop('add_offset', None)
+ # don't scale category products, even though scale_factor may equal 1
+ # we still need to convert integers to floats
+ if scale_factor is not None and not is_category:
+ if add_offset is not None and add_offset != 0:
+ data = data - np.float32(add_offset)
+ data = data * np.float32(scale_factor)
+
+ if good_mask is not None:
+ data = data.where(good_mask, new_fill)
return data
+ def _get_good_data_mask(self, data_arr, is_category=False):
+ try:
+ fill_value = data_arr.attrs["_FillValue"]
+ except KeyError:
+ return None, None
+
+ # preserve integer data types if possible
+ if is_category and np.issubdtype(data_arr.dtype, np.integer):
+ # no need to mask, the fill value is already what it needs to be
+ return None, None
+ new_fill = np.nan
+ data_arr.attrs.pop('_FillValue', None)
+ good_mask = data_arr != fill_value
+ return good_mask, new_fill
+
+ def _add_satpy_metadata(self, data_id: DataID, data_arr: xr.DataArray):
+ """Add metadata that is specific to Satpy."""
+ new_attrs = {
+ 'platform_name': 'EOS-' + self.metadata_platform_name,
+ 'sensor': 'modis',
+ }
+
+ res = data_id["resolution"]
+ rps = self._resolution_to_rows_per_scan(res)
+ new_attrs["rows_per_scan"] = rps
+
+ data_arr.attrs.update(new_attrs)
+
+ def _resolution_to_rows_per_scan(self, resolution: int) -> int:
+ known_rps = {
+ 5000: 2,
+ 1000: 10,
+ 500: 20,
+ 250: 40,
+ }
+ return known_rps.get(resolution, 10)
+
class HDFEOSGeoReader(HDFEOSBaseFileReader):
"""Handler for the geographical datasets."""
@@ -185,41 +302,45 @@ class HDFEOSGeoReader(HDFEOSBaseFileReader):
'solar_zenith_angle': ('SolarZenith', 'Solar_Zenith'),
}
- def __init__(self, filename, filename_info, filetype_info):
+ def __init__(self, filename, filename_info, filetype_info, **kwargs):
"""Initialize the geographical reader."""
- HDFEOSBaseFileReader.__init__(self, filename, filename_info, filetype_info)
+ HDFEOSBaseFileReader.__init__(self, filename, filename_info, filetype_info, **kwargs)
self.cache = {}
@staticmethod
- def read_geo_resolution(metadata):
- """Parse metadata to find the geolocation resolution.
+ def is_geo_loadable_dataset(dataset_name: str) -> bool:
+ """Determine if this dataset should be loaded as a Geo dataset."""
+ return dataset_name in HDFEOSGeoReader.DATASET_NAMES
- It is implemented as a staticmethod to match read_mda pattern.
-
- """
+ @staticmethod
+ def read_geo_resolution(metadata):
+ """Parse metadata to find the geolocation resolution."""
# level 1 files
try:
- ds = metadata['INVENTORYMETADATA']['COLLECTIONDESCRIPTIONCLASS']['SHORTNAME']['VALUE']
- if ds.endswith('D03'):
- return 1000
- else:
- # 1km files have 5km geolocation usually
- return 5000
+ return HDFEOSGeoReader._geo_resolution_for_l1b(metadata)
except KeyError:
- pass
+ try:
+ return HDFEOSGeoReader._geo_resolution_for_l2_l1b(metadata)
+ except (AttributeError, KeyError):
+ raise RuntimeError("Could not determine resolution from file metadata")
+ @staticmethod
+ def _geo_resolution_for_l1b(metadata):
+ ds = metadata['INVENTORYMETADATA']['COLLECTIONDESCRIPTIONCLASS']['SHORTNAME']['VALUE']
+ if ds.endswith('D03') or ds.endswith('HKM') or ds.endswith('QKM'):
+ return 1000
+ # 1km files have 5km geolocation usually
+ return 5000
+
+ @staticmethod
+ def _geo_resolution_for_l2_l1b(metadata):
# data files probably have this level 2 files
# this does not work for L1B 1KM data files because they are listed
# as 1KM data but the geo data inside is at 5km
- try:
- latitude_dim = metadata['SwathStructure']['SWATH_1']['DimensionMap']['DimensionMap_2']['GeoDimension']
- resolution_regex = re.compile(r'(?P\d+)(km|KM)')
- resolution_match = resolution_regex.search(latitude_dim)
- return int(resolution_match.group('resolution')) * 1000
- except (AttributeError, KeyError):
- pass
-
- raise RuntimeError("Could not determine resolution from file metadata")
+ latitude_dim = metadata['SwathStructure']['SWATH_1']['DimensionMap']['DimensionMap_2']['GeoDimension']
+ resolution_regex = re.compile(r'(?P\d+)(km|KM)')
+ resolution_match = resolution_regex.search(latitude_dim)
+ return int(resolution_match.group('resolution')) * 1000
@property
def geo_resolution(self):
@@ -236,7 +357,7 @@ def _load_ds_by_name(self, ds_name):
return self.load_dataset(var_names[1])
return self.load_dataset(var_names)
- def get_interpolated_dataset(self, name1, name2, resolution, sensor_zenith, offset=0):
+ def get_interpolated_dataset(self, name1, name2, resolution, offset=0):
"""Load and interpolate datasets."""
try:
result1 = self.cache[(name1, resolution)]
@@ -244,6 +365,12 @@ def get_interpolated_dataset(self, name1, name2, resolution, sensor_zenith, offs
except KeyError:
result1 = self._load_ds_by_name(name1)
result2 = self._load_ds_by_name(name2) - offset
+ try:
+ sensor_zenith = self._load_ds_by_name('satellite_zenith_angle')
+ except KeyError:
+ # no sensor zenith angle, do "simple" interpolation
+ sensor_zenith = None
+
result1, result2 = interpolate(
result1, result2, sensor_zenith,
self.geo_resolution, resolution
@@ -251,14 +378,14 @@ def get_interpolated_dataset(self, name1, name2, resolution, sensor_zenith, offs
self.cache[(name1, resolution)] = result1
self.cache[(name2, resolution)] = result2 + offset
- def get_dataset(self, dataset_keys, dataset_info):
+ def get_dataset(self, dataset_id: DataID, dataset_info: dict) -> xr.DataArray:
"""Get the geolocation dataset."""
# Name of the dataset as it appears in the HDF EOS file
in_file_dataset_name = dataset_info.get('file_key')
# Name of the dataset in the YAML file
- dataset_name = dataset_keys['name']
+ dataset_name = dataset_id['name']
# Resolution asked
- resolution = dataset_keys['resolution']
+ resolution = dataset_id['resolution']
if in_file_dataset_name is not None:
# if the YAML was configured with a specific name use that
data = self.load_dataset(in_file_dataset_name)
@@ -274,24 +401,24 @@ def get_dataset(self, dataset_keys, dataset_info):
"configured".format(dataset_name))
# The data must be interpolated
- sensor_zenith = self._load_ds_by_name('satellite_zenith_angle')
logger.debug("Loading %s", dataset_name)
if dataset_name in ['longitude', 'latitude']:
self.get_interpolated_dataset('longitude', 'latitude',
- resolution, sensor_zenith)
+ resolution)
elif dataset_name in ['satellite_azimuth_angle', 'satellite_zenith_angle']:
# Sensor dataset names differs between L1b and L2 products
self.get_interpolated_dataset('satellite_azimuth_angle', 'satellite_zenith_angle',
- resolution, sensor_zenith, offset=90)
+ resolution, offset=90)
elif dataset_name in ['solar_azimuth_angle', 'solar_zenith_angle']:
# Sensor dataset names differs between L1b and L2 products
self.get_interpolated_dataset('solar_azimuth_angle', 'solar_zenith_angle',
- resolution, sensor_zenith, offset=90)
+ resolution, offset=90)
data = self.cache[dataset_name, resolution]
for key in ('standard_name', 'units'):
if key in dataset_info:
data.attrs[key] = dataset_info[key]
+ self._add_satpy_metadata(dataset_id, data)
return data
diff --git a/satpy/readers/hrit_base.py b/satpy/readers/hrit_base.py
index 5740d3e8b0..ae38c18b8e 100644
--- a/satpy/readers/hrit_base.py
+++ b/satpy/readers/hrit_base.py
@@ -29,19 +29,23 @@
"""
import logging
-from datetime import timedelta
-from tempfile import gettempdir
import os
+from contextlib import contextmanager, nullcontext
+from datetime import timedelta
from io import BytesIO
-from subprocess import Popen, PIPE
+from subprocess import PIPE, Popen # nosec B404
+from tempfile import gettempdir
+import dask
+import dask.array as da
import numpy as np
import xarray as xr
-
-import dask.array as da
from pyresample import geometry
-from satpy.readers.file_handlers import BaseFileHandler
+
+import satpy.readers.utils as utils
+from satpy.readers import FSFile
from satpy.readers.eum_base import time_cds_short
+from satpy.readers.file_handlers import BaseFileHandler
from satpy.readers.seviri_base import dec10216
logger = logging.getLogger('hrit_base')
@@ -75,8 +79,6 @@
key_header = np.dtype([('key', '|S1')])
-base_variable_length_headers = {}
-
base_text_headers = {image_data_function: 'image_data_function',
annotation_header: 'annotation_header',
ancillary_text: 'ancillary_text',
@@ -134,7 +136,7 @@ def decompress(infile, outdir='.'):
cwd = os.getcwd()
os.chdir(outdir)
- p = Popen([cmd, infile], stdout=PIPE)
+ p = Popen([cmd, infile], stdout=PIPE) # nosec B603
stdout = BytesIO(p.communicate()[0])
status = p.returncode
os.chdir(cwd)
@@ -150,6 +152,18 @@ def decompress(infile, outdir='.'):
return os.path.join(outdir, outfile.decode('utf-8'))
+def get_header_id(fp):
+ """Return the HRIT header common data."""
+ data = fp.read(common_hdr.itemsize)
+ return np.frombuffer(data, dtype=common_hdr, count=1)[0]
+
+
+def get_header_content(fp, header_dtype, count=1):
+ """Return the content of the HRIT header."""
+ data = fp.read(header_dtype.itemsize * count)
+ return np.frombuffer(data, dtype=header_dtype, count=count)
+
+
class HRITFileHandler(BaseFileHandler):
"""HRIT standard format reader."""
@@ -157,17 +171,10 @@ def __init__(self, filename, filename_info, filetype_info, hdr_info):
"""Initialize the reader."""
super(HRITFileHandler, self).__init__(filename, filename_info,
filetype_info)
- self.mda = {}
- self._get_hd(hdr_info)
- if self.mda.get('compression_flag_for_data'):
- logger.debug('Unpacking %s', filename)
- try:
- self.filename = decompress(filename, gettempdir())
- except IOError as err:
- logger.warning("Unpacking failed: %s", str(err))
- self.mda = {}
- self._get_hd(hdr_info)
+ self.mda = {}
+ self.hdr_info = hdr_info
+ self._get_hd(self.hdr_info)
self._start_time = filename_info['start_time']
self._end_time = self._start_time + timedelta(minutes=15)
@@ -176,17 +183,15 @@ def _get_hd(self, hdr_info):
"""Open the file, read and get the basic file header info and set the mda dictionary."""
hdr_map, variable_length_headers, text_headers = hdr_info
- with open(self.filename) as fp:
+ with utils.generic_open(self.filename, mode='rb') as fp:
total_header_length = 16
while fp.tell() < total_header_length:
- hdr_id = np.fromfile(fp, dtype=common_hdr, count=1)[0]
+ hdr_id = get_header_id(fp)
the_type = hdr_map[hdr_id['hdr_id']]
if the_type in variable_length_headers:
field_length = int((hdr_id['record_length'] - 3) /
the_type.itemsize)
- current_hdr = np.fromfile(fp,
- dtype=the_type,
- count=field_length)
+ current_hdr = get_header_content(fp, the_type, field_length)
key = variable_length_headers[the_type]
if key in self.mda:
if not isinstance(self.mda[key], list):
@@ -199,14 +204,10 @@ def _get_hd(self, hdr_info):
the_type.itemsize)
char = list(the_type.fields.values())[0][0].char
new_type = np.dtype(char + str(field_length))
- current_hdr = np.fromfile(fp,
- dtype=new_type,
- count=1)[0]
+ current_hdr = get_header_content(fp, new_type)[0]
self.mda[text_headers[the_type]] = current_hdr
else:
- current_hdr = np.fromfile(fp,
- dtype=the_type,
- count=1)[0]
+ current_hdr = get_header_content(fp, the_type)[0]
self.mda.update(
dict(zip(current_hdr.dtype.names, current_hdr)))
@@ -221,10 +222,6 @@ def _get_hd(self, hdr_info):
'SSP_longitude': 0.0}
self.mda['orbital_parameters'] = {}
- def get_shape(self, dsid, ds_info):
- """Get shape."""
- return int(self.mda['number_of_lines']), int(self.mda['number_of_columns'])
-
@property
def start_time(self):
"""Get start time."""
@@ -314,20 +311,101 @@ def get_area_def(self, dsid):
def read_band(self, key, info):
"""Read the data."""
- shape = int(np.ceil(self.mda['data_field_length'] / 8.))
- if self.mda['number_of_bits_per_pixel'] == 16:
- dtype = '>u2'
- shape //= 2
- elif self.mda['number_of_bits_per_pixel'] in [8, 10]:
- dtype = np.uint8
- shape = (shape, )
- data = np.memmap(self.filename, mode='r',
- offset=self.mda['total_header_length'],
- dtype=dtype,
- shape=shape)
- data = da.from_array(data, chunks=shape[0])
- if self.mda['number_of_bits_per_pixel'] == 10:
+ output_dtype, output_shape = self._get_output_info()
+ return da.from_delayed(_read_data(self.filename, self.mda),
+ shape=output_shape,
+ dtype=output_dtype)
+
+ def _get_output_info(self):
+ bpp = self.mda['number_of_bits_per_pixel']
+ if bpp in [10, 16]:
+ output_dtype = np.uint16
+ elif bpp == 8:
+ output_dtype = np.uint8
+ else:
+ raise ValueError(f"Unexpected number of bits per pixel: {bpp}")
+ output_shape = (self.mda['number_of_lines'], self.mda['number_of_columns'])
+ return output_dtype, output_shape
+
+
+@dask.delayed
+def _read_data(filename, mda):
+ return HRITSegment(filename, mda).read_data()
+
+
+@contextmanager
+def decompressed(filename):
+ """Decompress context manager."""
+ try:
+ new_filename = decompress(filename, gettempdir())
+ except IOError as err:
+ logger.error("Unpacking failed: %s", str(err))
+ raise
+ yield new_filename
+ os.remove(new_filename)
+
+
+class HRITSegment:
+ """An HRIT segment with data."""
+
+ def __init__(self, filename, mda):
+ """Set up the segment."""
+ self.filename = filename
+ self.mda = mda
+ self.lines = mda['number_of_lines']
+ self.cols = mda['number_of_columns']
+ self.bpp = mda['number_of_bits_per_pixel']
+ self.compressed = mda['compression_flag_for_data'] == 1
+ self.offset = mda['total_header_length']
+ self.zipped = os.fspath(filename).endswith('.bz2')
+
+ def read_data(self):
+ """Read the data."""
+ data = self._read_data_from_file()
+ if self.bpp == 10:
data = dec10216(data)
- data = data.reshape((self.mda['number_of_lines'],
- self.mda['number_of_columns']))
+ data = data.reshape((self.lines, self.cols))
return data
+
+ def _read_data_from_file(self):
+ if self._is_file_like():
+ return self._read_file_like()
+ return self._read_data_from_disk()
+
+ def _is_file_like(self):
+ return isinstance(self.filename, FSFile)
+
+ def _read_data_from_disk(self):
+ # For reading the image data, unzip_context is faster than generic_open
+ dtype, shape = self._get_input_info()
+ with utils.unzip_context(self.filename) as fn:
+ with decompressed(fn) if self.compressed else nullcontext(fn) as filename:
+ return np.fromfile(filename,
+ offset=self.offset,
+ dtype=dtype,
+ count=np.prod(shape))
+
+ def _read_file_like(self):
+ # filename is likely to be a file-like object, already in memory
+ dtype, shape = self._get_input_info()
+ with utils.generic_open(self.filename, mode="rb") as fp:
+ no_elements = np.prod(shape)
+ fp.seek(self.offset)
+ return np.frombuffer(
+ fp.read(np.dtype(dtype).itemsize * no_elements),
+ dtype=np.dtype(dtype),
+ count=no_elements.item()
+ ).reshape(shape)
+
+ def _get_input_info(self):
+ total_bits = int(self.lines) * int(self.cols) * int(self.bpp)
+ input_shape = int(np.ceil(total_bits / 8.))
+ if self.bpp == 16:
+ input_dtype = '>u2'
+ input_shape //= 2
+ elif self.bpp in [8, 10]:
+ input_dtype = np.uint8
+ else:
+ raise ValueError(f"Unexpected number of bits per pixel: {self.bpp}")
+ input_shape = (input_shape,)
+ return input_dtype, input_shape
diff --git a/satpy/readers/hrit_jma.py b/satpy/readers/hrit_jma.py
index de24f4e6be..4da5e0d12f 100644
--- a/satpy/readers/hrit_jma.py
+++ b/satpy/readers/hrit_jma.py
@@ -61,9 +61,6 @@
* y (y) float64 5.5e+06 5.498e+06 5.496e+06 ... -5.496e+06 -5.498e+06
* x (x) float64 -5.498e+06 -5.496e+06 -5.494e+06 ... 5.498e+06 5.5e+06
Attributes:
- satellite_longitude: 140.7
- satellite_latitude: 0.0
- satellite_altitude: 35785831.0
orbital_parameters: {'projection_longitude': 140.7, 'projection_latitud...
standard_name: toa_brightness_temperature
level: None
@@ -86,6 +83,25 @@
the remaining scanlines are computed using linear interpolation. This is what you'll find in the
``acq_time`` coordinate of the dataset.
+Compression
+-----------
+
+Gzip-compressed MTSAT files can be decompressed on the fly using
+:class:`~satpy.readers.FSFile`:
+
+.. code-block:: python
+
+ import fsspec
+ from satpy import Scene
+ from satpy.readers import FSFile
+
+ filename = "/data/HRIT_MTSAT1_20090101_0630_DK01IR1.gz"
+ open_file = fsspec.open(filename, compression="gzip")
+ fs_file = FSFile(open_file)
+ scn = Scene([fs_file], reader="jami_hrit")
+ scn.load(["IR1"])
+
+
.. _JMA HRIT - Mission Specific Implementation: http://www.jma.go.jp/jma/jma-eng/satellite/introduction/4_2HRIT.pdf
.. _JAMI/Imager sample data: https://www.data.jma.go.jp/mscweb/en/operation/hrit_sample.html
.. _AHI sample data: https://www.data.jma.go.jp/mscweb/en/himawari89/space_segment/sample_hrit.html
@@ -97,10 +113,14 @@
import numpy as np
import xarray as xr
-from satpy.readers.hrit_base import (HRITFileHandler, ancillary_text,
- annotation_header, base_hdr_map,
- image_data_function)
from satpy.readers._geos_area import get_area_definition, get_area_extent
+from satpy.readers.hrit_base import (
+ HRITFileHandler,
+ ancillary_text,
+ annotation_header,
+ base_hdr_map,
+ image_data_function,
+)
from satpy.readers.utils import get_geostationary_mask
logger = logging.getLogger('hrit_jma')
@@ -122,7 +142,7 @@
image_quality_information = np.dtype([('quality', '|S1')])
-jma_variable_length_headers = {}
+jma_variable_length_headers: dict = {}
jma_text_headers = {image_data_function: 'image_data_function',
annotation_header: 'annotation_header',
@@ -184,9 +204,36 @@ def mjd2datetime64(mjd):
class HRITJMAFileHandler(HRITFileHandler):
- """JMA HRIT format reader."""
+ """JMA HRIT format reader.
+
+ By default, the reader uses the start time parsed from the filename. To use exact time, computed
+ from the metadata, the user can define a keyword argument::
- def __init__(self, filename, filename_info, filetype_info):
+ scene = Scene(filenames=filenames,
+ reader='ahi_hrit',
+ reader_kwargs={'use_acquisition_time_as_start_time': True})
+
+ As this time is different for every channel, time-dependent calculations like SZA correction
+ can be pretty slow when multiple channels are used.
+
+ The exact scanline times are always available as coordinates of an individual channels::
+
+ scene.load(["B03"])
+ print(scene["B03].coords["acq_time"].data)
+
+ would print something similar to::
+
+ array(['2021-12-08T06:00:20.131200000', '2021-12-08T06:00:20.191948000',
+ '2021-12-08T06:00:20.252695000', ...,
+ '2021-12-08T06:09:39.449390000', '2021-12-08T06:09:39.510295000',
+ '2021-12-08T06:09:39.571200000'], dtype='datetime64[ns]')
+
+ The first value represents the exact start time, and the last one the exact end time of the data
+ acquisition.
+
+ """
+
+ def __init__(self, filename, filename_info, filetype_info, use_acquisition_time_as_start_time=False):
"""Initialize the reader."""
super(HRITJMAFileHandler, self).__init__(filename, filename_info,
filetype_info,
@@ -194,6 +241,7 @@ def __init__(self, filename, filename_info, filetype_info):
jma_variable_length_headers,
jma_text_headers))
+ self._use_acquisition_time_as_start_time = use_acquisition_time_as_start_time
self.mda['segment_sequence_number'] = self.mda['image_segm_seq_no']
self.mda['planned_end_segment_number'] = self.mda['total_no_image_segm']
self.mda['planned_start_segment_number'] = 1
@@ -349,9 +397,6 @@ def get_dataset(self, key, info):
# Update attributes
res.attrs.update(info)
res.attrs['platform_name'] = self.platform
- res.attrs['satellite_longitude'] = float(self.mda['projection_parameters']['SSP_longitude'])
- res.attrs['satellite_latitude'] = 0.
- res.attrs['satellite_altitude'] = float(self.mda['projection_parameters']['h'])
res.attrs['orbital_parameters'] = {
'projection_longitude': float(self.mda['projection_parameters']['SSP_longitude']),
'projection_latitude': 0.,
@@ -411,14 +456,14 @@ def calibrate(self, data, calibration):
if calibration == 'counts':
return data
- elif calibration == 'radiance':
+ if calibration == 'radiance':
raise NotImplementedError("Can't calibrate to radiance.")
- else:
- cal = self.calibration_table
- res = data.data.map_blocks(self._interp, cal, dtype=cal[:, 0].dtype)
- res = xr.DataArray(res,
- dims=data.dims, attrs=data.attrs,
- coords=data.coords)
+
+ cal = self.calibration_table
+ res = data.data.map_blocks(self._interp, cal, dtype=cal[:, 0].dtype)
+ res = xr.DataArray(res,
+ dims=data.dims, attrs=data.attrs,
+ coords=data.coords)
res = res.where(data < 65535)
logger.debug("Calibration time " + str(datetime.now() - tic))
return res
@@ -426,7 +471,9 @@ def calibrate(self, data, calibration):
@property
def start_time(self):
"""Get start time of the scan."""
- return self.acq_time[0].astype(datetime)
+ if self._use_acquisition_time_as_start_time:
+ return self.acq_time[0].astype(datetime)
+ return self._start_time
@property
def end_time(self):
diff --git a/satpy/readers/hrpt.py b/satpy/readers/hrpt.py
index df23cc5548..cbde23559c 100644
--- a/satpy/readers/hrpt.py
+++ b/satpy/readers/hrpt.py
@@ -41,7 +41,7 @@
from pyorbital.orbital import Orbital
from satpy._compat import cached_property
-from satpy.readers.aapp_l1b import LINE_CHUNK
+from satpy.readers.aapp_l1b import get_avhrr_lac_chunks
from satpy.readers.file_handlers import BaseFileHandler
logger = logging.getLogger(__name__)
@@ -137,6 +137,11 @@ def times(self):
"""Get the timestamps for each line."""
return time_seconds(self._data["timecode"], self.year)
+ @cached_property
+ def _chunks(self):
+ """Get the best chunks for this data."""
+ return get_avhrr_lac_chunks((self._data.shape[0], 2048), float)
+
@cached_property
def _data(self):
"""Get the data."""
@@ -171,7 +176,7 @@ def get_dataset(self, key, info):
def _get_channel_data(self, key):
"""Get channel data."""
- data = da.from_array(self._data["image_data"][:, :, _get_channel_index(key)], chunks=(LINE_CHUNK, 2048))
+ data = da.from_array(self._data["image_data"][:, :, _get_channel_index(key)], chunks=self._chunks)
if key['calibration'] != 'counts':
if key['name'] in ['1', '2', '3a']:
data = self.calibrate_solar_channel(data, key)
@@ -184,9 +189,9 @@ def _get_navigation_data(self, key):
"""Get navigation data."""
lons, lats = self.lons_lats
if key['name'] == 'latitude':
- data = da.from_array(lats, chunks=(LINE_CHUNK, 2048))
+ data = da.from_array(lats, chunks=self._chunks)
else:
- data = da.from_array(lons, chunks=(LINE_CHUNK, 2048))
+ data = da.from_array(lons, chunks=self._chunks)
return data
def _get_ch3_mask_or_true(self, key):
diff --git a/satpy/readers/hsaf_grib.py b/satpy/readers/hsaf_grib.py
index f51036a38f..a43927d5d4 100644
--- a/satpy/readers/hsaf_grib.py
+++ b/satpy/readers/hsaf_grib.py
@@ -23,15 +23,16 @@
"""
import logging
+from datetime import datetime, timedelta
+
+import dask.array as da
import numpy as np
+import pygrib
import xarray as xr
-import dask.array as da
from pyresample import geometry
-from datetime import datetime, timedelta
from satpy import CHUNK_SIZE
from satpy.readers.file_handlers import BaseFileHandler
-import pygrib
LOG = logging.getLogger(__name__)
diff --git a/satpy/readers/hy2_scat_l2b_h5.py b/satpy/readers/hy2_scat_l2b_h5.py
index 217b6a0dd5..64520bae9a 100644
--- a/satpy/readers/hy2_scat_l2b_h5.py
+++ b/satpy/readers/hy2_scat_l2b_h5.py
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-# Copyright (c) 2020 Satpy developers
+# Copyright (c) 2020,2021 Satpy developers
#
# This file is part of satpy.
#
@@ -14,11 +14,17 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
-"""HY-2B L2B Reader, distributed by Eumetsat in HDF5 format."""
+
+"""HY-2B L2B Reader.
+
+Distributed by Eumetsat in HDF5 format.
+Also handle the HDF5 files from NSOAS, based on a file example.
+"""
+
+from datetime import datetime
import numpy as np
import xarray as xr
-from datetime import datetime
from satpy.readers.hdf5_utils import HDF5FileHandler
@@ -55,9 +61,12 @@ def get_variable_metadata(self):
"Orbit_Number": self['/attr/Orbit_Number'],
"Output_L2B_Filename": self['/attr/Output_L2B_Filename'],
"Production_Date_Time": self['/attr/Production_Date_Time'],
- "L2B_Expected_WVC_Rows": self['/attr/L2B_Expected_WVC_Rows'],
- "L2B_Number_WVC_cells": self['/attr/L2B_Number_WVC_cells'],
+ "L2B_Expected_WVC_Rows": self['/attr/L2B_Expected_WVC_Rows']
})
+ try:
+ info.update({"L2B_Number_WVC_cells": self['/attr/L2B_Number_WVC_cells']})
+ except KeyError:
+ info.update({"L2B_Expected_WVC_Cells": self['/attr/L2B_Expected_WVC_Cells']})
return info
def get_metadata(self):
@@ -91,16 +100,20 @@ def get_dataset(self, key, info):
if self[key['name']].ndim == 3:
dims = ['y', 'x', 'selection']
data = self[key['name']]
+ if "valid range" in data.attrs:
+ data.attrs.update({'valid_range': data.attrs.pop('valid range')})
if key['name'] in 'wvc_row_time':
data = data.rename({data.dims[0]: 'y'})
else:
dim_map = {curr_dim: new_dim for curr_dim, new_dim in zip(data.dims, dims)}
data = data.rename(dim_map)
- data = self._mask_data(key['name'], data)
- data = self._scale_data(key['name'], data)
+ data = self._mask_data(data)
+ data = self._scale_data(data)
if key['name'] in 'wvc_lon':
+ _attrs = data.attrs
data = xr.where(data > 180, data - 360., data)
+ data.attrs.update(_attrs)
data.attrs.update(info)
data.attrs.update(self.get_metadata())
data.attrs.update(self.get_variable_metadata())
@@ -109,13 +122,14 @@ def get_dataset(self, key, info):
return data
- def _scale_data(self, key_name, data):
- return data * self[key_name].attrs['scale_factor'] + self[key_name].attrs['add_offset']
-
- def _mask_data(self, key_name, data):
- data = xr.where(data == self[key_name].attrs['fill_value'], np.nan, data)
+ def _scale_data(self, data):
+ return data * data.attrs['scale_factor'] + data.attrs['add_offset']
- valid_range = self[key_name].attrs['valid range']
+ def _mask_data(self, data):
+ _attrs = data.attrs
+ valid_range = data.attrs['valid_range']
+ data = xr.where(data == data.attrs['fill_value'], np.nan, data)
data = xr.where(data < valid_range[0], np.nan, data)
data = xr.where(data > valid_range[1], np.nan, data)
+ data.attrs.update(_attrs)
return data
diff --git a/satpy/readers/iasi_l2.py b/satpy/readers/iasi_l2.py
index 37cc787e70..da734ae777 100644
--- a/satpy/readers/iasi_l2.py
+++ b/satpy/readers/iasi_l2.py
@@ -17,15 +17,16 @@
# satpy. If not, see .
"""IASI L2 HDF5 files."""
+import datetime as dt
+import logging
+
+import dask.array as da
import h5py
import numpy as np
import xarray as xr
-import dask.array as da
-import datetime as dt
-import logging
-from satpy.readers.file_handlers import BaseFileHandler
from satpy import CHUNK_SIZE
+from satpy.readers.file_handlers import BaseFileHandler
# Scan timing values taken from
# http://oiswww.eumetsat.org/WEBOPS/eps-pg/IASI-L1/IASIL1-PG-4ProdOverview.htm
diff --git a/satpy/readers/iasi_l2_so2_bufr.py b/satpy/readers/iasi_l2_so2_bufr.py
index 10847750e0..46adaa5192 100644
--- a/satpy/readers/iasi_l2_so2_bufr.py
+++ b/satpy/readers/iasi_l2_so2_bufr.py
@@ -86,9 +86,10 @@
import logging
from datetime import datetime
+
+import dask.array as da
import numpy as np
import xarray as xr
-import dask.array as da
try:
import eccodes as ec
@@ -97,8 +98,8 @@
"""Missing eccodes-python and/or eccodes C-library installation. Use conda to install eccodes.
Error: """, e)
-from satpy.readers.file_handlers import BaseFileHandler
from satpy import CHUNK_SIZE
+from satpy.readers.file_handlers import BaseFileHandler
logger = logging.getLogger('IASIL2SO2BUFR')
diff --git a/satpy/readers/ici_l1b_nc.py b/satpy/readers/ici_l1b_nc.py
new file mode 100644
index 0000000000..a4f15c3c35
--- /dev/null
+++ b/satpy/readers/ici_l1b_nc.py
@@ -0,0 +1,456 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+# Copyright (c) 2022 Satpy developers
+#
+# satpy is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# satpy is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with satpy. If not, see .
+"""EUMETSAT EPS-SG Ice Cloud Imager (ICI) Level 1B products reader.
+
+The format is explained in the
+`EPS-SG ICI Level 1B Product Format Specification V3A`_.
+
+This version is applicable for the ici test data released in Jan 2021.
+
+.. _EPS-SG ICI Level 1B Product Format Specification V3A: https://www.eumetsat.int/media/47582
+
+"""
+
+import logging
+from datetime import datetime
+from enum import Enum
+from functools import cached_property
+
+import dask.array as da
+import numpy as np
+import xarray as xr
+from geotiepoints.geointerpolator import GeoInterpolator
+
+from satpy.readers.netcdf_utils import NetCDF4FileHandler
+
+logger = logging.getLogger(__name__)
+
+
+# PLANCK COEFFICIENTS FOR CALIBRATION AS DEFINED BY EUMETSAT
+C1 = 1.191042e-5 # [mW/(sr·m2·cm-4)]
+C2 = 1.4387752 # [K·cm]
+# MEAN EARTH RADIUS AS DEFINED BY IUGG
+MEAN_EARTH_RADIUS = 6371008.7714 # [m]
+
+
+class InterpolationType(Enum):
+ """Enum for interpolation types."""
+
+ LONLAT = 0
+ SOLAR_ANGLES = 1
+ OBSERVATION_ANGLES = 2
+
+
+class IciL1bNCFileHandler(NetCDF4FileHandler):
+ """Reader class for ICI L1B products in netCDF format."""
+
+ def __init__(self, filename, filename_info, filetype_info, **kwargs):
+ """Read the calibration data and prepare the class for dataset reading.""" # noqa: E501
+ super().__init__(
+ filename, filename_info, filetype_info, auto_maskandscale=True,
+ )
+ # Read the variables which are required for the calibration
+ measurement = 'data/measurement_data'
+ self._bt_conversion_a = self[f'{measurement}/bt_conversion_a'].values
+ self._bt_conversion_b = self[f'{measurement}/bt_conversion_b'].values
+ self._channel_cw = self[f'{measurement}/centre_wavenumber'].values
+ self._n_samples = self[measurement].n_samples.size
+ self._filetype_info = filetype_info
+ self.orthorect = filetype_info.get('orthorect', True)
+
+ @property
+ def start_time(self):
+ """Get observation start time."""
+ try:
+ start_time = datetime.strptime(
+ self['/attr/sensing_start_time_utc'],
+ '%Y%m%d%H%M%S.%f',
+ )
+ except ValueError:
+ start_time = datetime.strptime(
+ self['/attr/sensing_start_time_utc'],
+ '%Y-%m-%d %H:%M:%S.%f',
+ )
+ return start_time
+
+ @property
+ def end_time(self):
+ """Get observation end time."""
+ try:
+ end_time = datetime.strptime(
+ self['/attr/sensing_end_time_utc'],
+ '%Y%m%d%H%M%S.%f',
+ )
+ except ValueError:
+ end_time = datetime.strptime(
+ self['/attr/sensing_end_time_utc'],
+ '%Y-%m-%d %H:%M:%S.%f',
+ )
+ return end_time
+
+ @property
+ def platform_name(self):
+ """Return platform name."""
+ return self['/attr/spacecraft']
+
+ @property
+ def sensor(self):
+ """Return sensor."""
+ return self['/attr/instrument']
+
+ @property
+ def ssp_lon(self):
+ """Return subsatellite point longitude."""
+ # This parameter is not applicable to ICI?
+ return None
+
+ @property
+ def observation_azimuth(self):
+ """Get observation azimuth angles."""
+ observation_azimuth, _ = self.observation_azimuth_and_zenith
+ return observation_azimuth
+
+ @property
+ def observation_zenith(self):
+ """Get observation zenith angles."""
+ _, observation_zenith = self.observation_azimuth_and_zenith
+ return observation_zenith
+
+ @property
+ def solar_azimuth(self):
+ """Get solar azimuth angles."""
+ solar_azimuth, _ = self.solar_azimuth_and_zenith
+ return solar_azimuth
+
+ @property
+ def solar_zenith(self):
+ """Get solar zenith angles."""
+ _, solar_zenith = self.solar_azimuth_and_zenith
+ return solar_zenith
+
+ @property
+ def longitude(self):
+ """Get longitude coordinates."""
+ longitude, _ = self.longitude_and_latitude
+ return longitude
+
+ @property
+ def latitude(self):
+ """Get latitude coordinates."""
+ _, latitude = self.longitude_and_latitude
+ return latitude
+
+ @cached_property
+ def observation_azimuth_and_zenith(self):
+ """Get observation azimuth and zenith angles."""
+ return self._interpolate(InterpolationType.OBSERVATION_ANGLES)
+
+ @cached_property
+ def solar_azimuth_and_zenith(self):
+ """Get solar azimuth and zenith angles."""
+ return self._interpolate(InterpolationType.SOLAR_ANGLES)
+
+ @cached_property
+ def longitude_and_latitude(self):
+ """Get longitude and latitude coordinates."""
+ return self._interpolate(InterpolationType.LONLAT)
+
+ @staticmethod
+ def _interpolate_geo(
+ longitude,
+ latitude,
+ n_samples,
+ ):
+ """
+ Perform the interpolation of geographic coordinates from tie points to pixel points.
+
+ Args:
+ longitude: xarray DataArray containing the longitude dataset to
+ interpolate.
+ latitude: xarray DataArray containing the longitude dataset to
+ interpolate.
+ n_samples: int describing number of samples per scan to interpolate
+ onto.
+
+ Returns:
+ tuple of arrays containing the interpolate values, all the original
+ metadata and the updated dimension names.
+
+ """
+ third_dim_name = longitude.dims[2]
+ horns = longitude[third_dim_name]
+ n_scan = longitude.n_scan
+ n_subs = longitude.n_subs
+ lons = da.zeros((n_scan.size, n_samples, horns.size))
+ lats = da.zeros((n_scan.size, n_samples, horns.size))
+ n_subs = np.linspace(0, n_samples - 1, n_subs.size).astype(int)
+ for horn in horns.values:
+ satint = GeoInterpolator(
+ (longitude.values[:, :, horn], latitude.values[:, :, horn]),
+ (n_scan.values, n_subs),
+ (n_scan.values, np.arange(n_samples)),
+ )
+ lons_horn, lats_horn = satint.interpolate()
+ lons[:, :, horn] = lons_horn
+ lats[:, :, horn] = lats_horn
+ dims = ['y', 'x', third_dim_name]
+ lon = xr.DataArray(
+ lons,
+ attrs=longitude.attrs,
+ dims=dims,
+ coords={third_dim_name: horns},
+ )
+ lat = xr.DataArray(
+ lats,
+ attrs=latitude.attrs,
+ dims=dims,
+ coords={third_dim_name: horns},
+ )
+ return lon, lat
+
+ def _interpolate_viewing_angle(
+ self,
+ azimuth,
+ zenith,
+ n_samples,
+ ):
+ """
+ Perform the interpolation of angular coordinates from tie points to pixel points.
+
+ Args:
+ azimuth: xarray DataArray containing the azimuth angle dataset to
+ interpolate.
+ zenith: xarray DataArray containing the zenith angle dataset to
+ interpolate.
+ n_samples: int describing number of samples per scan to interpolate
+ onto.
+
+ Returns:
+ tuple of arrays containing the interpolate values, all the original
+ metadata and the updated dimension names.
+
+ """
+ # interpolate onto spherical coords system with origin at equator
+ azimuth, zenith = self._interpolate_geo(azimuth, 90. - zenith, n_samples)
+ # transform back such that the origin is at the north pole
+ zenith = 90. - zenith
+ return azimuth, zenith
+
+ def _interpolate(
+ self,
+ interpolation_type,
+ ):
+ """Interpolate from tie points to pixel points."""
+ try:
+ if interpolation_type is InterpolationType.SOLAR_ANGLES:
+ var_key1 = self.filetype_info['solar_azimuth']
+ var_key2 = self.filetype_info['solar_zenith']
+ interp_method = self._interpolate_viewing_angle
+ elif interpolation_type is InterpolationType.OBSERVATION_ANGLES:
+ var_key1 = self.filetype_info['observation_azimuth']
+ var_key2 = self.filetype_info['observation_zenith']
+ interp_method = self._interpolate_viewing_angle
+ else:
+ var_key1 = self.filetype_info['longitude']
+ var_key2 = self.filetype_info['latitude']
+ interp_method = self._interpolate_geo
+ return interp_method(
+ self[var_key1],
+ self[var_key2],
+ self._n_samples,
+ )
+ except KeyError:
+ logger.warning(f'Datasets for {interpolation_type.name} interpolation not correctly defined in YAML file') # noqa: E501
+ return None, None
+
+ @staticmethod
+ def _calibrate_bt(radiance, cw, a, b):
+ """Perform the calibration to brightness temperature.
+
+ Args:
+ radiance: xarray DataArray or numpy ndarray containing the
+ radiance values.
+ cw: center wavenumber [cm-1].
+ a: temperature coefficient [-].
+ b: temperature coefficient [K].
+
+ Returns:
+ DataArray: array containing the calibrated brightness
+ temperature values.
+
+ """
+ return b + (a * C2 * cw / np.log(1 + C1 * cw ** 3 / radiance))
+
+ def _calibrate(self, variable, dataset_info):
+ """Perform the calibration.
+
+ Args:
+ variable: xarray DataArray containing the dataset to calibrate.
+ dataset_info: dictionary of information about the dataset.
+
+ Returns:
+ DataArray: array containing the calibrated values and all the
+ original metadata.
+
+ """
+ calibration_name = dataset_info['calibration']
+ if calibration_name == 'brightness_temperature':
+ chan_index = dataset_info['chan_index']
+ cw = self._channel_cw[chan_index]
+ a = self._bt_conversion_a[chan_index]
+ b = self._bt_conversion_b[chan_index]
+ calibrated_variable = self._calibrate_bt(variable, cw, a, b)
+ calibrated_variable.attrs = variable.attrs
+ elif calibration_name == 'radiance':
+ calibrated_variable = variable
+ else:
+ raise ValueError("Unknown calibration %s for dataset %s" % (calibration_name, dataset_info['name'])) # noqa: E501
+
+ return calibrated_variable
+
+ def _orthorectify(self, variable, orthorect_data_name):
+ """Perform the orthorectification.
+
+ Args:
+ variable: xarray DataArray containing the dataset to correct for
+ orthorectification.
+ orthorect_data_name: name of the orthorectification correction data
+ in the product.
+
+ Returns:
+ DataArray: array containing the corrected values and all the
+ original metadata.
+
+ """
+ try:
+ # Convert the orthorectification delta values from meters to
+ # degrees based on the simplified formula using mean Earth radius
+ orthorect_data = self[orthorect_data_name]
+ dim = self._get_third_dimension_name(orthorect_data)
+ orthorect_data = orthorect_data.sel({dim: variable[dim]})
+ variable += np.degrees(orthorect_data.values / MEAN_EARTH_RADIUS)
+ except KeyError:
+ logger.warning('Required dataset %s for orthorectification not available, skipping', orthorect_data_name) # noqa: E501
+ return variable
+
+ @staticmethod
+ def _standardize_dims(variable):
+ """Standardize dims to y, x."""
+ if 'n_scan' in variable.dims:
+ variable = variable.rename({'n_scan': 'y'})
+ if 'n_samples' in variable.dims:
+ variable = variable.rename({'n_samples': 'x'})
+ if variable.dims[0] == 'x':
+ variable = variable.transpose('y', 'x')
+ return variable
+
+ def _filter_variable(self, variable, dataset_info):
+ """Filter variable in the third dimension."""
+ dim = self._get_third_dimension_name(variable)
+ if dim is not None and dim in dataset_info:
+ variable = variable.sel({dim: dataset_info[dim]})
+ return variable
+
+ @staticmethod
+ def _drop_coords(variable):
+ """Drop coords that are not in dims."""
+ for coord in variable.coords:
+ if coord not in variable.dims:
+ variable = variable.drop_vars(coord)
+ return variable
+
+ @staticmethod
+ def _get_third_dimension_name(variable):
+ """Get name of the third dimension of the variable."""
+ dims = variable.dims
+ if len(dims) < 3:
+ return None
+ return dims[2]
+
+ def _fetch_variable(self, var_key):
+ """Fetch variable."""
+ if var_key in [
+ 'longitude',
+ 'latitude',
+ 'observation_zenith',
+ 'observation_azimuth',
+ 'solar_zenith',
+ 'solar_azimuth',
+ ] and getattr(self, var_key) is not None:
+ variable = getattr(self, var_key).copy()
+ else:
+ variable = self[var_key]
+ return variable
+
+ def get_dataset(self, dataset_id, dataset_info):
+ """Get dataset using file_key in dataset_info."""
+ var_key = dataset_info['file_key']
+ logger.debug(f'Reading in file to get dataset with key {var_key}.')
+ try:
+ variable = self._fetch_variable(var_key)
+ except KeyError:
+ logger.warning(f'Could not find key {var_key} in NetCDF file, no valid Dataset created') # noqa: E501
+ return None
+ variable = self._filter_variable(variable, dataset_info)
+ if dataset_info.get('calibration') is not None:
+ variable = self._calibrate(variable, dataset_info)
+ if self.orthorect:
+ orthorect_data_name = dataset_info.get('orthorect_data', None)
+ if orthorect_data_name is not None:
+ variable = self._orthorectify(variable, orthorect_data_name)
+ variable = self._manage_attributes(variable, dataset_info)
+ variable = self._drop_coords(variable)
+ variable = self._standardize_dims(variable)
+ return variable
+
+ def _manage_attributes(self, variable, dataset_info):
+ """Manage attributes of the dataset."""
+ variable.attrs.setdefault('units', None)
+ variable.attrs.update(dataset_info)
+ variable.attrs.update(self._get_global_attributes())
+ return variable
+
+ def _get_global_attributes(self):
+ """Create a dictionary of global attributes."""
+ return {
+ 'filename': self.filename,
+ 'start_time': self.start_time,
+ 'end_time': self.end_time,
+ 'spacecraft_name': self.platform_name,
+ 'ssp_lon': self.ssp_lon,
+ 'sensor': self.sensor,
+ 'filename_start_time': self.filename_info['sensing_start_time'],
+ 'filename_end_time': self.filename_info['sensing_end_time'],
+ 'platform_name': self.platform_name,
+ 'quality_group': self._get_quality_attributes(),
+ }
+
+ def _get_quality_attributes(self):
+ """Get quality attributes."""
+ quality_group = self['quality']
+ quality_dict = {}
+ for key in quality_group:
+ # Add the values (as Numpy array) of each variable in the group
+ # where possible
+ try:
+ quality_dict[key] = quality_group[key].values
+ except ValueError:
+ quality_dict[key] = None
+ # Add the attributes of the quality group
+ quality_dict.update(quality_group.attrs)
+ return quality_dict
diff --git a/satpy/readers/li_l2.py b/satpy/readers/li_l2.py
index ae799f49bd..3fa8b0534b 100644
--- a/satpy/readers/li_l2.py
+++ b/satpy/readers/li_l2.py
@@ -15,6 +15,7 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
+# type: ignore
"""Interface to MTG-LI L2 product NetCDF files
The reader is based on preliminary test data provided by EUMETSAT.
@@ -22,15 +23,17 @@
"LI L2 Product User Guide [LIL2PUG] Draft version" documentation.
"""
-import h5netcdf
import logging
-import numpy as np
from datetime import datetime
+
+import h5netcdf
+import numpy as np
from pyresample import geometry
-from satpy.readers.file_handlers import BaseFileHandler
+
# FIXME: This is not xarray/dask compatible
# TODO: Once migrated to xarray/dask, remove ignored path in setup.cfg
from satpy.dataset import Dataset
+from satpy.readers.file_handlers import BaseFileHandler
logger = logging.getLogger(__name__)
diff --git a/satpy/readers/maia.py b/satpy/readers/maia.py
index 5a45170d4f..7a271924c1 100644
--- a/satpy/readers/maia.py
+++ b/satpy/readers/maia.py
@@ -27,13 +27,13 @@
"""
import logging
+import dask.array as da
import h5py
import numpy as np
from xarray import DataArray
-import dask.array as da
-from satpy.readers.file_handlers import BaseFileHandler
from satpy import CHUNK_SIZE
+from satpy.readers.file_handlers import BaseFileHandler
logger = logging.getLogger(__name__)
diff --git a/satpy/readers/mersi2_l1b.py b/satpy/readers/mersi2_l1b.py
index 8ea589cfcc..eccfd0e78c 100644
--- a/satpy/readers/mersi2_l1b.py
+++ b/satpy/readers/mersi2_l1b.py
@@ -25,10 +25,12 @@
"""
from datetime import datetime
-from satpy.readers.hdf5_utils import HDF5FileHandler
-from pyspectral.blackbody import blackbody_wn_rad2temp as rad2temp
-import numpy as np
+
import dask.array as da
+import numpy as np
+from pyspectral.blackbody import blackbody_wn_rad2temp as rad2temp
+
+from satpy.readers.hdf5_utils import HDF5FileHandler
class MERSI2L1B(HDF5FileHandler):
@@ -94,23 +96,7 @@ def get_dataset(self, dataset_id, ds_info):
if 'rows_per_scan' in self.filetype_info:
attrs.setdefault('rows_per_scan', self.filetype_info['rows_per_scan'])
- fill_value = attrs.pop('FillValue', np.nan) # covered by valid_range
- valid_range = attrs.pop('valid_range', None)
- if dataset_id.get('calibration') == 'counts':
- # preserve integer type of counts if possible
- attrs['_FillValue'] = fill_value
- new_fill = fill_value
- else:
- new_fill = np.nan
- if valid_range is not None:
- # Due to a bug in the valid_range upper limit in the 10.8(24) and 12.0(25)
- # in the HDF data, this is hardcoded here.
- if dataset_id['name'] in ['24', '25'] and valid_range[1] == 4095:
- valid_range[1] = 25000
- # typically bad_values == 65535, saturated == 65534
- # dead detector == 65533
- data = data.where((data >= valid_range[0]) &
- (data <= valid_range[1]), new_fill)
+ data = self._mask_data(data, dataset_id, attrs)
slope = attrs.pop('Slope', None)
intercept = attrs.pop('Intercept', None)
@@ -128,36 +114,12 @@ def get_dataset(self, dataset_id, ds_info):
ds_info['calibration_index'])
data = coeffs[0] + coeffs[1] * data + coeffs[2] * data**2
elif dataset_id.get('calibration') == "brightness_temperature":
- cal_index = ds_info['calibration_index']
- # Apparently we don't use these calibration factors for Rad -> BT
- # coeffs = self._get_coefficients(ds_info['calibration_key'], cal_index)
- # # coefficients are per-scan, we need to repeat the values for a
- # # clean alignment
- # coeffs = np.repeat(coeffs, data.shape[0] // coeffs.shape[1], axis=1)
- # coeffs = coeffs.rename({
- # coeffs.dims[0]: 'coefficients', coeffs.dims[1]: 'y'
- # }) # match data dims
- # data = coeffs[0] + coeffs[1] * data + coeffs[2] * data**2 + coeffs[3] * data**3
-
+ calibration_index = ds_info['calibration_index']
# Converts um^-1 (wavenumbers) and (mW/m^2)/(str/cm^-1) (radiance data)
# to SI units m^-1, mW*m^-3*str^-1.
wave_number = 1. / (dataset_id['wavelength'][1] / 1e6)
- # pass the dask array
- bt_data = rad2temp(wave_number, data.data * 1e-5) # brightness temperature
- if isinstance(bt_data, np.ndarray):
- # old versions of pyspectral produce numpy arrays
- data.data = da.from_array(bt_data, chunks=data.data.chunks)
- else:
- # new versions of pyspectral can do dask arrays
- data.data = bt_data
- # additional corrections from the file
- corr_coeff_a = float(self['/attr/TBB_Trans_Coefficient_A'][cal_index])
- corr_coeff_b = float(self['/attr/TBB_Trans_Coefficient_B'][cal_index])
- if corr_coeff_a != 0:
- data = (data - corr_coeff_b) / corr_coeff_a
- # Some BT bands seem to have 0 in the first 10 columns
- # and it is an invalid Kelvin measurement, so let's mask
- data = data.where(data != 0)
+
+ data = self._get_bt_dataset(data, calibration_index, wave_number)
data.attrs = attrs
# convert bytes to str
@@ -172,3 +134,57 @@ def get_dataset(self, dataset_id, ds_info):
})
return data
+
+ def _mask_data(self, data, dataset_id, attrs):
+ """Mask the data using fill_value and valid_range attributes."""
+ fill_value = attrs.pop('FillValue', np.nan) # covered by valid_range
+ valid_range = attrs.pop('valid_range', None)
+ if dataset_id.get('calibration') == 'counts':
+ # preserve integer type of counts if possible
+ attrs['_FillValue'] = fill_value
+ new_fill = fill_value
+ else:
+ new_fill = np.nan
+ if valid_range is not None:
+ # Due to a bug in the valid_range upper limit in the 10.8(24) and 12.0(25)
+ # in the HDF data, this is hardcoded here.
+ if dataset_id['name'] in ['24', '25'] and valid_range[1] == 4095:
+ valid_range[1] = 25000
+ # typically bad_values == 65535, saturated == 65534
+ # dead detector == 65533
+ data = data.where((data >= valid_range[0]) &
+ (data <= valid_range[1]), new_fill)
+ return data
+
+ def _get_bt_dataset(self, data, calibration_index, wave_number):
+ """Get the dataset as brightness temperature.
+
+ Apparently we don't use these calibration factors for Rad -> BT::
+
+ coeffs = self._get_coefficients(ds_info['calibration_key'], calibration_index)
+ # coefficients are per-scan, we need to repeat the values for a
+ # clean alignment
+ coeffs = np.repeat(coeffs, data.shape[0] // coeffs.shape[1], axis=1)
+ coeffs = coeffs.rename({
+ coeffs.dims[0]: 'coefficients', coeffs.dims[1]: 'y'
+ }) # match data dims
+ data = coeffs[0] + coeffs[1] * data + coeffs[2] * data**2 + coeffs[3] * data**3
+
+ """
+ # pass the dask array
+ bt_data = rad2temp(wave_number, data.data * 1e-5) # brightness temperature
+ if isinstance(bt_data, np.ndarray):
+ # old versions of pyspectral produce numpy arrays
+ data.data = da.from_array(bt_data, chunks=data.data.chunks)
+ else:
+ # new versions of pyspectral can do dask arrays
+ data.data = bt_data
+ # additional corrections from the file
+ corr_coeff_a = float(self['/attr/TBB_Trans_Coefficient_A'][calibration_index])
+ corr_coeff_b = float(self['/attr/TBB_Trans_Coefficient_B'][calibration_index])
+ if corr_coeff_a != 0:
+ data = (data - corr_coeff_b) / corr_coeff_a
+ # Some BT bands seem to have 0 in the first 10 columns
+ # and it is an invalid Kelvin measurement, so let's mask
+ data = data.where(data != 0)
+ return data
diff --git a/satpy/readers/mimic_TPW2_nc.py b/satpy/readers/mimic_TPW2_nc.py
index 027ff06a80..d4b7422ab1 100644
--- a/satpy/readers/mimic_TPW2_nc.py
+++ b/satpy/readers/mimic_TPW2_nc.py
@@ -32,11 +32,13 @@
http://tropic.ssec.wisc.edu/real-time/mtpw2/credits.html
"""
+import logging
+
import numpy as np
import xarray as xr
-from satpy.readers.netcdf_utils import NetCDF4FileHandler, netCDF4
from pyresample.geometry import AreaDefinition
-import logging
+
+from satpy.readers.netcdf_utils import NetCDF4FileHandler, netCDF4
logger = logging.getLogger(__name__)
diff --git a/satpy/readers/mirs.py b/satpy/readers/mirs.py
index eb8bee2448..3198d555a3 100644
--- a/satpy/readers/mirs.py
+++ b/satpy/readers/mirs.py
@@ -17,16 +17,18 @@
# satpy. If not, see .
"""Interface to MiRS product."""
-import os
-import logging
import datetime
+import logging
+import os
+from collections import Counter
+
+import dask.array as da
import numpy as np
import xarray as xr
-import dask.array as da
-from collections import Counter
+
from satpy import CHUNK_SIZE
-from satpy.readers.file_handlers import BaseFileHandler
from satpy.aux_download import retrieve
+from satpy.readers.file_handlers import BaseFileHandler
LOG = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
@@ -35,7 +37,7 @@
# try getting setuptools/distribute's version of resource retrieval first
from pkg_resources import resource_string as get_resource_string
except ImportError:
- from pkgutil import get_data as get_resource_string
+ from pkgutil import get_data as get_resource_string # type: ignore
#
@@ -96,7 +98,7 @@ def read_atms_limb_correction_coefficients(fn):
n_chn = 22
n_fov = 96
# make the string a generator
- coeff_str = (line.strip() for line in coeff_str)
+ coeff_lines = (line.strip() for line in coeff_str)
all_coeffs = np.zeros((n_chn, n_fov, n_chn), dtype=np.float32)
all_amean = np.zeros((n_chn, n_fov, n_chn), dtype=np.float32)
@@ -107,14 +109,17 @@ def read_atms_limb_correction_coefficients(fn):
# There should be 22 sections
for chan_idx in range(n_chn):
# blank line at the start of each section
- _ = next(coeff_str)
+ _ = next(coeff_lines)
# section header
- _nx, nchx, dmean = [x.strip() for x in next(coeff_str).split(" ") if x]
+ next_line = next(coeff_lines)
+
+ _nx, nchx, dmean = [x.strip() for x in next_line.split(" ") if x]
all_nchx[chan_idx] = nchx = int(nchx)
all_dmean[chan_idx] = float(dmean)
# coeff locations (indexes to put the future coefficients in)
- locations = [int(x.strip()) for x in next(coeff_str).split(" ") if x]
+ next_line = next(coeff_lines)
+ locations = [int(x.strip()) for x in next_line.split(" ") if x]
if len(locations) != nchx:
raise RuntimeError
for x in range(nchx):
@@ -123,7 +128,7 @@ def read_atms_limb_correction_coefficients(fn):
# Read 'nchx' coefficients for each of 96 FOV
for fov_idx in range(n_fov):
# chan_num, fov_num, *coefficients, error
- coeff_line_parts = [x.strip() for x in next(coeff_str).split(" ") if x][2:]
+ coeff_line_parts = [x.strip() for x in next(coeff_lines).split(" ") if x][2:]
coeffs = [float(x) for x in coeff_line_parts[:nchx]]
ameans = [float(x) for x in coeff_line_parts[nchx:-1]]
# not used but nice to know the purpose of the last column.
@@ -161,7 +166,7 @@ def get_coeff_by_sfc(coeff_fn, bt_data, idx):
c_size = bt_data[idx, :, :].chunks
correction = da.map_blocks(apply_atms_limb_correction,
bt_data, idx,
- *sfc_coeff, chunks=c_size)
+ *sfc_coeff, chunks=c_size, meta=np.array((), dtype=bt_data.dtype))
return correction
@@ -279,15 +284,13 @@ def force_date(self, key):
"""Force datetime.date for combine."""
if isinstance(self.filename_info[key], datetime.datetime):
return self.filename_info[key].date()
- else:
- return self.filename_info[key]
+ return self.filename_info[key]
def force_time(self, key):
"""Force datetime.time for combine."""
if isinstance(self.filename_info.get(key), datetime.datetime):
return self.filename_info.get(key).time()
- else:
- return self.filename_info.get(key)
+ return self.filename_info.get(key)
@property
def _get_coeff_filenames(self):
@@ -320,9 +323,9 @@ def _nan_for_dtype(data_arr_dtype):
# if we don't have to
if data_arr_dtype.type == np.float32:
return np.float32(np.nan)
- elif np.issubdtype(data_arr_dtype, np.timedelta64):
+ if np.issubdtype(data_arr_dtype, np.timedelta64):
return np.timedelta64('NaT')
- elif np.issubdtype(data_arr_dtype, np.datetime64):
+ if np.issubdtype(data_arr_dtype, np.datetime64):
return np.datetime64('NaT')
return np.nan
diff --git a/satpy/readers/modis_l1b.py b/satpy/readers/modis_l1b.py
index a4e888c612..51d6108f3e 100644
--- a/satpy/readers/modis_l1b.py
+++ b/satpy/readers/modis_l1b.py
@@ -25,11 +25,39 @@
a pattern similar to the following one:
.. parsed-literal::
+
M[O/Y]D02[1/H/Q]KM.A[date].[time].[collection].[processing_time].hdf
Other patterns where "collection" and/or "proccessing_time" are missing might also work
(see the readers yaml file for details). Geolocation files (MOD03) are also supported.
-
+The IMAPP direct broadcast naming format is also supported with names like:
+``a1.12226.1846.1000m.hdf``.
+
+Saturation Handling
+-------------------
+
+Band 2 of the MODIS sensor is available in 250m, 500m, and 1km resolutions.
+The band data may include a special fill value to indicate when the detector
+was saturated in the 250m version of the data. When the data is aggregated to
+coarser resolutions this saturation fill value is converted to a
+"can't aggregate" fill value. By default, Satpy will replace these fill values
+with NaN to indicate they are invalid. This is typically undesired when
+generating images for the data as they appear as "holes" in bright clouds.
+To control this the keyword argument ``mask_saturated`` can be passed and set
+to ``False`` to set these two fill values to the maximum valid value.
+
+.. code-block:: python
+
+ scene = satpy.Scene(filenames=filenames,
+ reader='modis_l1b',
+ reader_kwargs={'mask_saturated': False})
+ scene.load(['2'])
+
+Note that the saturation fill value can appear in other bands (ex. bands 7-19)
+in addition to band 2. Also, the "can't aggregate" fill value is a generic
+"catch all" for any problems encountered when aggregating high resolution bands
+to lower resolutions. Filling this with the max valid value could replace
+non-saturated invalid pixels with valid values.
Geolocation files
-----------------
@@ -46,11 +74,11 @@
import logging
import numpy as np
-
import xarray as xr
+
from satpy import CHUNK_SIZE
-from satpy.readers.hdfeos_base import HDFEOSBaseFileReader, HDFEOSGeoReader
from satpy.readers.hdf4_utils import from_sds
+from satpy.readers.hdfeos_base import HDFEOSBaseFileReader, HDFEOSGeoReader
logger = logging.getLogger(__name__)
@@ -62,9 +90,20 @@ class HDFEOSBandReader(HDFEOSBaseFileReader):
"Q": 250,
"H": 500}
- def __init__(self, filename, filename_info, filetype_info):
+ res_to_possible_variable_names = {
+ 1000: ['EV_250_Aggr1km_RefSB',
+ 'EV_500_Aggr1km_RefSB',
+ 'EV_1KM_RefSB',
+ 'EV_1KM_Emissive'],
+ 500: ['EV_250_Aggr500_RefSB',
+ 'EV_500_RefSB'],
+ 250: ['EV_250_RefSB'],
+ }
+
+ def __init__(self, filename, filename_info, filetype_info, mask_saturated=True, **kwargs):
"""Init the file handler."""
- HDFEOSBaseFileReader.__init__(self, filename, filename_info, filetype_info)
+ super().__init__(filename, filename_info, filetype_info, **kwargs)
+ self._mask_saturated = mask_saturated
ds = self.metadata['INVENTORYMETADATA'][
'COLLECTIONDESCRIPTIONCLASS']['SHORTNAME']['VALUE']
@@ -72,125 +111,145 @@ def __init__(self, filename, filename_info, filetype_info):
def get_dataset(self, key, info):
"""Read data from file and return the corresponding projectables."""
- datadict = {
- 1000: ['EV_250_Aggr1km_RefSB',
- 'EV_500_Aggr1km_RefSB',
- 'EV_1KM_RefSB',
- 'EV_1KM_Emissive'],
- 500: ['EV_250_Aggr500_RefSB',
- 'EV_500_RefSB'],
- 250: ['EV_250_RefSB']}
-
- platform_name = self.metadata['INVENTORYMETADATA']['ASSOCIATEDPLATFORMINSTRUMENTSENSOR'][
- 'ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER']['ASSOCIATEDPLATFORMSHORTNAME']['VALUE']
-
- info.update({'platform_name': 'EOS-' + platform_name})
- info.update({'sensor': 'modis'})
-
if self.resolution != key['resolution']:
return
-
- datasets = datadict[self.resolution]
- for dataset in datasets:
- subdata = self.sd.select(dataset)
+ var_name, band_index = self._get_band_variable_name_and_index(key["name"])
+ subdata = self.sd.select(var_name)
+ var_attrs = subdata.attributes()
+ uncertainty = self.sd.select(var_name + "_Uncert_Indexes")
+ array = xr.DataArray(from_sds(subdata, chunks=CHUNK_SIZE)[band_index, :, :],
+ dims=['y', 'x']).astype(np.float32)
+ valid_range = var_attrs['valid_range']
+ valid_min = np.float32(valid_range[0])
+ valid_max = np.float32(valid_range[1])
+ if not self._mask_saturated:
+ array = self._fill_saturated(array, valid_max)
+ array = self._mask_invalid(array, valid_min, valid_max)
+ array = self._mask_uncertain_pixels(array, uncertainty, band_index)
+ projectable = self._calibrate_data(key, info, array, var_attrs, band_index)
+
+ # if ((platform_name == 'Aqua' and key['name'] in ["6", "27", "36"]) or
+ # (platform_name == 'Terra' and key['name'] in ["29"])):
+ # height, width = projectable.shape
+ # row_indices = projectable.mask.sum(1) == width
+ # if row_indices.sum() != height:
+ # projectable.mask[row_indices, :] = True
+
+ # Get the orbit number
+ # if not satscene.orbit:
+ # mda = self.data.attributes()["CoreMetadata.0"]
+ # orbit_idx = mda.index("ORBITNUMBER")
+ # satscene.orbit = mda[orbit_idx + 111:orbit_idx + 116]
+
+ # Trimming out dead sensor lines (detectors) on terra:
+ # (in addition channel 27, 30, 34, 35, and 36 are nosiy)
+ # if satscene.satname == "terra":
+ # for band in ["29"]:
+ # if not satscene[band].is_loaded() or satscene[band].data.mask.all():
+ # continue
+ # width = satscene[band].data.shape[1]
+ # height = satscene[band].data.shape[0]
+ # indices = satscene[band].data.mask.sum(1) < width
+ # if indices.sum() == height:
+ # continue
+ # satscene[band] = satscene[band].data[indices, :]
+ # satscene[band].area = geometry.SwathDefinition(
+ # lons=satscene[band].area.lons[indices, :],
+ # lats=satscene[band].area.lats[indices, :])
+ self._add_satpy_metadata(key, projectable)
+ return projectable
+
+ def _get_band_variable_name_and_index(self, band_name):
+ variable_names = self.res_to_possible_variable_names[self.resolution]
+ for variable_name in variable_names:
+ subdata = self.sd.select(variable_name)
var_attrs = subdata.attributes()
- band_names = var_attrs["band_names"].split(",")
-
- # get the relative indices of the desired channel
try:
- index = band_names.index(key['name'])
+ band_index = self._get_band_index(var_attrs, band_name)
except ValueError:
+ # can't find band in list of bands
continue
- uncertainty = self.sd.select(dataset + "_Uncert_Indexes")
- array = xr.DataArray(from_sds(subdata, chunks=CHUNK_SIZE)[index, :, :],
- dims=['y', 'x']).astype(np.float32)
- valid_range = var_attrs['valid_range']
-
- # Fill values:
- # Data Value Meaning
- # 65535 Fill Value (includes reflective band data at night mode
- # and completely missing L1A scans)
- # 65534 L1A DN is missing within a scan
- # 65533 Detector is saturated
- # 65532 Cannot compute zero point DN, e.g., SV is saturated
- # 65531 Detector is dead (see comments below)
- # 65530 RSB dn** below the minimum of the scaling range
- # 65529 TEB radiance or RSB dn** exceeds the maximum of the
- # scaling range
- # 65528 Aggregation algorithm failure
- # 65527 Rotation of Earth view Sector from nominal science
- # collection position
- # 65526 Calibration coefficient b1 could not be computed
- # 65525 Subframe is dead
- # 65524 Both sides of the PCLW electronics on simultaneously
- # 65501 - 65523 (reserved for future use)
- # 65500 NAD closed upper limit
-
- array = array.where(array >= np.float32(valid_range[0]))
- array = array.where(array <= np.float32(valid_range[1]))
- array = array.where(from_sds(uncertainty, chunks=CHUNK_SIZE)[index, :, :] < 15)
-
- if key['calibration'] == 'brightness_temperature':
- projectable = calibrate_bt(array, var_attrs, index, key['name'])
- info.setdefault('units', 'K')
- info.setdefault('standard_name', 'toa_brightness_temperature')
- elif key['calibration'] == 'reflectance':
- projectable = calibrate_refl(array, var_attrs, index)
- info.setdefault('units', '%')
- info.setdefault('standard_name',
- 'toa_bidirectional_reflectance')
- elif key['calibration'] == 'radiance':
- projectable = calibrate_radiance(array, var_attrs, index)
- info.setdefault('units', var_attrs.get('radiance_units'))
- info.setdefault('standard_name',
- 'toa_outgoing_radiance_per_unit_wavelength')
- elif key['calibration'] == 'counts':
- projectable = calibrate_counts(array, var_attrs, index)
- info.setdefault('units', 'counts')
- info.setdefault('standard_name', 'counts') # made up
- else:
- raise ValueError("Unknown calibration for "
- "key: {}".format(key))
- projectable.attrs = info
-
- # if ((platform_name == 'Aqua' and key['name'] in ["6", "27", "36"]) or
- # (platform_name == 'Terra' and key['name'] in ["29"])):
- # height, width = projectable.shape
- # row_indices = projectable.mask.sum(1) == width
- # if row_indices.sum() != height:
- # projectable.mask[row_indices, :] = True
-
- # Get the orbit number
- # if not satscene.orbit:
- # mda = self.data.attributes()["CoreMetadata.0"]
- # orbit_idx = mda.index("ORBITNUMBER")
- # satscene.orbit = mda[orbit_idx + 111:orbit_idx + 116]
-
- # Trimming out dead sensor lines (detectors) on terra:
- # (in addition channel 27, 30, 34, 35, and 36 are nosiy)
- # if satscene.satname == "terra":
- # for band in ["29"]:
- # if not satscene[band].is_loaded() or satscene[band].data.mask.all():
- # continue
- # width = satscene[band].data.shape[1]
- # height = satscene[band].data.shape[0]
- # indices = satscene[band].data.mask.sum(1) < width
- # if indices.sum() == height:
- # continue
- # satscene[band] = satscene[band].data[indices, :]
- # satscene[band].area = geometry.SwathDefinition(
- # lons=satscene[band].area.lons[indices, :],
- # lats=satscene[band].area.lats[indices, :])
- return projectable
+ return variable_name, band_index
+
+ def _get_band_index(self, var_attrs, band_name):
+ """Get the relative indices of the desired channel."""
+ band_names = var_attrs["band_names"].split(",")
+ index = band_names.index(band_name)
+ return index
+
+ def _fill_saturated(self, array, valid_max):
+ """Replace saturation-related values with max reflectance.
+
+ If the file handler was created with ``mask_saturated`` set to
+ ``True`` then all invalid/fill values are set to NaN. If ``False``
+ then the fill values 65528 and 65533 are set to the maximum valid
+ value. These values correspond to "can't aggregate" and "saturation".
+
+ Fill values:
+
+ * 65535 Fill Value (includes reflective band data at night mode
+ and completely missing L1A scans)
+ * 65534 L1A DN is missing within a scan
+ * 65533 Detector is saturated
+ * 65532 Cannot compute zero point DN, e.g., SV is saturated
+ * 65531 Detector is dead (see comments below)
+ * 65530 RSB dn** below the minimum of the scaling range
+ * 65529 TEB radiance or RSB dn exceeds the maximum of the scaling range
+ * 65528 Aggregation algorithm failure
+ * 65527 Rotation of Earth view Sector from nominal science collection position
+ * 65526 Calibration coefficient b1 could not be computed
+ * 65525 Subframe is dead
+ * 65524 Both sides of the PCLW electronics on simultaneously
+ * 65501 - 65523 (reserved for future use)
+ * 65500 NAD closed upper limit
+
+ """
+ return array.where((array != 65533) & (array != 65528), valid_max)
+
+ def _mask_invalid(self, array, valid_min, valid_max):
+ """Replace fill values with NaN."""
+ return array.where((array >= valid_min) & (array <= valid_max))
+
+ def _mask_uncertain_pixels(self, array, uncertainty, band_index):
+ if not self._mask_saturated:
+ return array
+ band_uncertainty = from_sds(uncertainty, chunks=CHUNK_SIZE)[band_index, :, :]
+ array = array.where(band_uncertainty < 15)
+ return array
+
+ def _calibrate_data(self, key, info, array, var_attrs, index):
+ if key['calibration'] == 'brightness_temperature':
+ projectable = calibrate_bt(array, var_attrs, index, key['name'])
+ info.setdefault('units', 'K')
+ info.setdefault('standard_name', 'toa_brightness_temperature')
+ elif key['calibration'] == 'reflectance':
+ projectable = calibrate_refl(array, var_attrs, index)
+ info.setdefault('units', '%')
+ info.setdefault('standard_name',
+ 'toa_bidirectional_reflectance')
+ elif key['calibration'] == 'radiance':
+ projectable = calibrate_radiance(array, var_attrs, index)
+ info.setdefault('units', var_attrs.get('radiance_units'))
+ info.setdefault('standard_name',
+ 'toa_outgoing_radiance_per_unit_wavelength')
+ elif key['calibration'] == 'counts':
+ projectable = calibrate_counts(array, var_attrs, index)
+ info.setdefault('units', 'counts')
+ info.setdefault('standard_name', 'counts') # made up
+ else:
+ raise ValueError("Unknown calibration for "
+ "key: {}".format(key))
+ projectable.attrs = info
+ return projectable
class MixedHDFEOSReader(HDFEOSGeoReader, HDFEOSBandReader):
"""A file handler for the files that have both regular bands and geographical information in them."""
- def __init__(self, filename, filename_info, filetype_info):
+ def __init__(self, filename, filename_info, filetype_info, **kwargs):
"""Init the file handler."""
- HDFEOSGeoReader.__init__(self, filename, filename_info, filetype_info)
- HDFEOSBandReader.__init__(self, filename, filename_info, filetype_info)
+ HDFEOSGeoReader.__init__(self, filename, filename_info, filetype_info, **kwargs)
+ HDFEOSBandReader.__init__(self, filename, filename_info, filetype_info, **kwargs)
def get_dataset(self, key, info):
"""Get the dataset."""
diff --git a/satpy/readers/modis_l2.py b/satpy/readers/modis_l2.py
index cf8c1914ce..108921e2b5 100644
--- a/satpy/readers/modis_l2.py
+++ b/satpy/readers/modis_l2.py
@@ -46,127 +46,193 @@
"""
import logging
+import dask.array as da
import numpy as np
import xarray as xr
-
from satpy import CHUNK_SIZE
-from satpy.readers.hdfeos_base import HDFEOSGeoReader
from satpy.readers.hdf4_utils import from_sds
+from satpy.readers.hdfeos_base import HDFEOSGeoReader
logger = logging.getLogger(__name__)
class ModisL2HDFFileHandler(HDFEOSGeoReader):
- """File handler for MODIS HDF-EOS Level 2 files."""
+ """File handler for MODIS HDF-EOS Level 2 files.
+
+ Includes error handling for files produced by IMAPP produced files.
+
+ """
+
+ def _load_all_metadata_attributes(self):
+ try:
+ return super()._load_all_metadata_attributes()
+ except KeyError:
+ return {}
+
+ @property
+ def is_imapp_mask_byte1(self):
+ """Get if this file is the IMAPP 'mask_byte1' file type."""
+ return "mask_byte1" in self.filetype_info["file_type"]
+
+ @property
+ def start_time(self):
+ """Get the start time of the dataset."""
+ try:
+ return super().start_time
+ except KeyError:
+ try:
+ return self.filename_info["start_time"]
+ except KeyError:
+ return self.filename_info["acquisition_time"]
+
+ @property
+ def end_time(self):
+ """Get the end time of the dataset."""
+ try:
+ return super().end_time
+ except KeyError:
+ return self.start_time
+
+ @staticmethod
+ def read_geo_resolution(metadata):
+ """Parse metadata to find the geolocation resolution.
+
+ It is implemented as a staticmethod to match read_mda pattern.
+
+ """
+ try:
+ return HDFEOSGeoReader.read_geo_resolution(metadata)
+ except RuntimeError:
+ # most L2 products are 5000m
+ return 5000
def _select_hdf_dataset(self, hdf_dataset_name, byte_dimension):
"""Load a dataset from HDF-EOS level 2 file."""
- hdf_dataset = self.sd.select(hdf_dataset_name)
+ dataset = self.sd.select(hdf_dataset_name)
+ dask_arr = from_sds(dataset, chunks=CHUNK_SIZE)
+ attrs = dataset.attributes()
+ dims = ['y', 'x']
if byte_dimension == 0:
- dataset = xr.DataArray(from_sds(hdf_dataset, chunks=CHUNK_SIZE),
- dims=['i', 'y', 'x']).astype(np.uint8)
+ dims = ['i', 'y', 'x']
+ dask_arr = dask_arr.astype(np.uint8)
elif byte_dimension == 2:
- dataset = xr.DataArray(from_sds(hdf_dataset, chunks=CHUNK_SIZE),
- dims=['y', 'x', 'i']).astype(np.uint8)
+ dims = ['y', 'x', 'i']
+ dask_arr = dask_arr.astype(np.uint8)
+ dataset = xr.DataArray(dask_arr, dims=dims, attrs=attrs)
+ if 'i' in dataset.dims:
# Reorder dimensions for consistency
dataset = dataset.transpose('i', 'y', 'x')
-
return dataset
- def _parse_resolution_info(self, info, resolution):
- if isinstance(info, list):
- if len(info) == 1 and isinstance(info[0], int):
- return info[0]
- # Check if the values are stored in a with resolution as a key
- if isinstance(info[0], dict):
- for elem in info:
- try:
- return elem[resolution]
- except KeyError:
- pass
- # The information doesn't concern the current resolution
- return None
- return info
-
def get_dataset(self, dataset_id, dataset_info):
"""Get DataArray for specified dataset."""
dataset_name = dataset_id['name']
- if dataset_name in HDFEOSGeoReader.DATASET_NAMES:
+ if self.is_geo_loadable_dataset(dataset_name):
return HDFEOSGeoReader.get_dataset(self, dataset_id, dataset_info)
dataset_name_in_file = dataset_info['file_key']
+ if self.is_imapp_mask_byte1:
+ dataset_name_in_file = dataset_info.get('imapp_file_key', dataset_name_in_file)
# The dataset asked correspond to a given set of bits of the HDF EOS dataset
if 'byte' in dataset_info and 'byte_dimension' in dataset_info:
- byte_dimension = dataset_info['byte_dimension'] # Where the information is stored
- dataset = self._select_hdf_dataset(dataset_name_in_file, byte_dimension)
-
- byte_information = self._parse_resolution_info(dataset_info['byte'], dataset_id['resolution'])
- # At which bit starts the information
- bit_start = self._parse_resolution_info(dataset_info['bit_start'], dataset_id['resolution'])
- # How many bits store the information
- bit_count = self._parse_resolution_info(dataset_info['bit_count'], dataset_id['resolution'])
-
- # Only one byte: select the byte information
- if isinstance(byte_information, int):
- byte_dataset = dataset[byte_information, :, :]
-
- # Two bytes: recombine the two bytes
- elif isinstance(byte_information, list) and len(byte_information) == 2:
- # We recombine the two bytes
- dataset_a = dataset[byte_information[0], :, :]
- dataset_b = dataset[byte_information[1], :, :]
- dataset_a = np.uint16(dataset_a)
- dataset_a = np.left_shift(dataset_a, 8) # dataset_a << 8
- byte_dataset = np.bitwise_or(dataset_a, dataset_b).astype(np.uint16)
- shape = byte_dataset.shape
- # We replicate the concatenated byte with the right shape
- byte_dataset = np.repeat(np.repeat(byte_dataset, 4, axis=0), 4, axis=1)
- # All bits carry information, we update bit_start consequently
- bit_start = np.arange(16, dtype=np.uint16).reshape((4, 4))
- bit_start = np.tile(bit_start, (shape[0], shape[1]))
-
- # Compute the final bit mask
- dataset = bits_strip(bit_start, bit_count, byte_dataset)
-
- # Apply quality assurance filter
- if 'quality_assurance' in dataset_info:
- quality_assurance_required = self._parse_resolution_info(
- dataset_info['quality_assurance'], dataset_id['resolution']
- )
- if quality_assurance_required is True:
- # Get quality assurance dataset recursively
- quality_assurance_dataset_id = dataset_id.from_dict(
- dict(name='quality_assurance', resolution=1000)
- )
- quality_assurance_dataset_info = {
- 'name': 'quality_assurance',
- 'resolution': [1000],
- 'byte_dimension': 2,
- 'byte': [0],
- 'bit_start': 0,
- 'bit_count': 1,
- 'file_key': 'Quality_Assurance'
- }
- quality_assurance = self.get_dataset(
- quality_assurance_dataset_id, quality_assurance_dataset_info
- )
- # Duplicate quality assurance dataset to create relevant filter
- duplication_factor = [int(dataset_dim / quality_assurance_dim)
- for dataset_dim, quality_assurance_dim
- in zip(dataset.shape, quality_assurance.shape)]
- quality_assurance = np.tile(quality_assurance, duplication_factor)
- # Replace unassured data by NaN value
- dataset[np.where(quality_assurance == 0)] = np.NaN
-
- # No byte manipulation required
+ dataset = self._extract_and_mask_category_dataset(dataset_id, dataset_info, dataset_name_in_file)
else:
- dataset = self.load_dataset(dataset_name_in_file)
+ # No byte manipulation required
+ dataset = self.load_dataset(dataset_name_in_file, dataset_info.pop("category", False))
+
+ self._add_satpy_metadata(dataset_id, dataset)
+ return dataset
+
+ def _extract_and_mask_category_dataset(self, dataset_id, dataset_info, var_name):
+ # what dimension is per-byte
+ byte_dimension = None if self.is_imapp_mask_byte1 else dataset_info['byte_dimension']
+ dataset = self._select_hdf_dataset(var_name, byte_dimension)
+ # category products always have factor=1/offset=0 so don't apply them
+ # also remove them so they don't screw up future satpy processing
+ dataset.attrs.pop('scale_factor', None)
+ dataset.attrs.pop('add_offset', None)
+ # Don't do this byte work if we are using the IMAPP mask_byte1 file
+ if self.is_imapp_mask_byte1:
+ return dataset
+
+ dataset = _extract_byte_mask(dataset,
+ dataset_info['byte'],
+ dataset_info['bit_start'],
+ dataset_info['bit_count'])
+ dataset = self._mask_with_quality_assurance_if_needed(dataset, dataset_info, dataset_id)
+ return dataset
+ def _mask_with_quality_assurance_if_needed(self, dataset, dataset_info, dataset_id):
+ if not dataset_info.get('quality_assurance', False):
+ return dataset
+
+ # Get quality assurance dataset recursively
+ quality_assurance_dataset_id = dataset_id.from_dict(
+ dict(name='quality_assurance', resolution=1000)
+ )
+ quality_assurance_dataset_info = {
+ 'name': 'quality_assurance',
+ 'resolution': 1000,
+ 'byte_dimension': 2,
+ 'byte': 0,
+ 'bit_start': 0,
+ 'bit_count': 1,
+ 'file_key': 'Quality_Assurance'
+ }
+ quality_assurance = self.get_dataset(
+ quality_assurance_dataset_id, quality_assurance_dataset_info
+ )
+ # Duplicate quality assurance dataset to create relevant filter
+ duplication_factor = [int(dataset_dim / quality_assurance_dim)
+ for dataset_dim, quality_assurance_dim
+ in zip(dataset.shape, quality_assurance.shape)]
+ quality_assurance = np.tile(quality_assurance, duplication_factor)
+ # Replace unassured data by NaN value
+ dataset = dataset.where(quality_assurance != 0, dataset.attrs["_FillValue"])
return dataset
-def bits_strip(bit_start, bit_count, value):
+def _extract_byte_mask(dataset, byte_information, bit_start, bit_count):
+ attrs = dataset.attrs.copy()
+
+ if isinstance(byte_information, int):
+ # Only one byte: select the byte information
+ byte_dataset = dataset[byte_information, :, :]
+ dataset = _bits_strip(bit_start, bit_count, byte_dataset)
+ elif isinstance(byte_information, (list, tuple)) and len(byte_information) == 2:
+ # Two bytes: recombine the two bytes
+ byte_mask = da.map_blocks(
+ _extract_two_byte_mask,
+ dataset.data[byte_information[0]],
+ dataset.data[byte_information[1]],
+ bit_start=bit_start,
+ bit_count=bit_count,
+ dtype=np.uint16,
+ meta=np.array((), dtype=np.uint16),
+ chunks=tuple(tuple(chunk_size * 4 for chunk_size in dim_chunks) for dim_chunks in dataset.chunks[1:]),
+ )
+ dataset = xr.DataArray(byte_mask, dims=dataset.dims[1:])
+
+ # Compute the final bit mask
+ dataset.attrs = attrs
+ return dataset
+
+
+def _extract_two_byte_mask(data_a: np.ndarray, data_b: np.ndarray, bit_start: int, bit_count: int) -> np.ndarray:
+ data_a = data_a.astype(np.uint16, copy=False)
+ data_a = np.left_shift(data_a, 8) # dataset_a << 8
+ byte_dataset = np.bitwise_or(data_a, data_b).astype(np.uint16)
+ shape = byte_dataset.shape
+ # We replicate the concatenated byte with the right shape
+ byte_dataset = np.repeat(np.repeat(byte_dataset, 4, axis=0), 4, axis=1)
+ # All bits carry information, we update bit_start consequently
+ bit_start = np.arange(16, dtype=np.uint16).reshape((4, 4))
+ bit_start = np.tile(bit_start, (shape[0], shape[1]))
+ return _bits_strip(bit_start, bit_count, byte_dataset)
+
+
+def _bits_strip(bit_start, bit_count, value):
"""Extract specified bit from bit representation of integer value.
Parameters
diff --git a/satpy/readers/msi_safe.py b/satpy/readers/msi_safe.py
index 8d9b6ca03f..b25e0e8019 100644
--- a/satpy/readers/msi_safe.py
+++ b/satpy/readers/msi_safe.py
@@ -15,19 +15,36 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
-"""SAFE MSI L1C reader."""
+"""SAFE MSI L1C reader.
+
+The MSI data has a special value for saturated pixels. By default, these
+pixels are set to np.inf, but for some applications it might be desirable
+to have these pixels left untouched.
+For this case, the `mask_saturated` flag is available in the reader, and can be
+toggled with ``reader_kwargs`` upon Scene creation::
+
+ scene = satpy.Scene(filenames,
+ reader='msi_safe',
+ reader_kwargs={'mask_saturated': False})
+ scene.load(['B01'])
+
+L1B format description for the files read here:
+
+ https://sentinels.copernicus.eu/documents/247904/0/Sentinel-2-product-specifications-document-V14-9.pdf/
+
+"""
import logging
+import xml.etree.ElementTree as ET
-import glymur
-import numpy as np
-from xarray import DataArray
import dask.array as da
-import xml.etree.ElementTree as ET
+import numpy as np
+import rioxarray
from pyresample import geometry
-from dask import delayed
+from xarray import DataArray
from satpy import CHUNK_SIZE
+from satpy._compat import cached_property
from satpy.readers.file_handlers import BaseFileHandler
logger = logging.getLogger(__name__)
@@ -42,14 +59,15 @@
class SAFEMSIL1C(BaseFileHandler):
"""File handler for SAFE MSI files (jp2)."""
- def __init__(self, filename, filename_info, filetype_info, mda):
- """Init the reader."""
+ def __init__(self, filename, filename_info, filetype_info, mda, tile_mda, mask_saturated=True):
+ """Initialize the reader."""
super(SAFEMSIL1C, self).__init__(filename, filename_info,
filetype_info)
-
+ del mask_saturated
self._start_time = filename_info['observation_time']
self._end_time = filename_info['observation_time']
self._channel = filename_info['band_name']
+ self._tile_mda = tile_mda
self._mda = mda
self.platform_name = PLATFORMS[filename_info['fmission_id']]
@@ -59,31 +77,20 @@ def get_dataset(self, key, info):
return
logger.debug('Reading %s.', key['name'])
- # FIXME: get this from MTD_MSIL1C.xml
- quantification_value = 10000.
- jp2 = glymur.Jp2k(self.filename)
- bitdepth = 0
- for seg in jp2.codestream.segment:
- try:
- bitdepth = max(bitdepth, seg.bitdepth[0])
- except AttributeError:
- pass
-
- jp2.dtype = (np.uint8 if bitdepth <= 8 else np.uint16)
-
- # Initialize the jp2 reader / doesn't work in a multi-threaded context.
- # jp2[0, 0]
- # data = da.from_array(jp2, chunks=CHUNK_SIZE) / quantification_value * 100
-
- data = da.from_delayed(delayed(jp2.read)(), jp2.shape, jp2.dtype)
- data = data.rechunk(CHUNK_SIZE) / quantification_value * 100
-
- proj = DataArray(data, dims=['y', 'x'])
+ proj = self._read_from_file(key)
proj.attrs = info.copy()
proj.attrs['units'] = '%'
proj.attrs['platform_name'] = self.platform_name
return proj
+ def _read_from_file(self, key):
+ proj = rioxarray.open_rasterio(self.filename, chunks=CHUNK_SIZE)
+ proj = proj.squeeze("band")
+ if key["calibration"] == "reflectance":
+ return self._mda.calibrate_to_reflectances(proj, self._channel)
+ if key["calibration"] == "radiance":
+ return self._mda.calibrate_to_radiances(proj, self._channel)
+
@property
def start_time(self):
"""Get the start time."""
@@ -98,77 +105,180 @@ def get_area_def(self, dsid):
"""Get the area def."""
if self._channel != dsid['name']:
return
- return self._mda.get_area_def(dsid)
+ return self._tile_mda.get_area_def(dsid)
-class SAFEMSIMDXML(BaseFileHandler):
- """File handle for sentinel 2 safe XML manifest."""
+class SAFEMSIXMLMetadata(BaseFileHandler):
+ """Base class for SAFE MSI XML metadata filehandlers."""
- def __init__(self, filename, filename_info, filetype_info):
+ def __init__(self, filename, filename_info, filetype_info, mask_saturated=True):
"""Init the reader."""
- super(SAFEMSIMDXML, self).__init__(filename, filename_info,
- filetype_info)
+ super().__init__(filename, filename_info, filetype_info)
self._start_time = filename_info['observation_time']
self._end_time = filename_info['observation_time']
self.root = ET.parse(self.filename)
- self.tile = filename_info['gtile_number']
+ self.tile = filename_info['dtile_number']
self.platform_name = PLATFORMS[filename_info['fmission_id']]
+ self.mask_saturated = mask_saturated
+ import bottleneck # noqa
+ import geotiepoints # noqa
+
+ @property
+ def end_time(self):
+ """Get end time."""
+ return self._start_time
@property
def start_time(self):
"""Get start time."""
return self._start_time
+
+class SAFEMSIMDXML(SAFEMSIXMLMetadata):
+ """File handle for sentinel 2 safe XML generic metadata."""
+
+ def calibrate_to_reflectances(self, data, band_name):
+ """Calibrate *data* using the radiometric information for the metadata."""
+ quantification = int(self.root.find('.//QUANTIFICATION_VALUE').text)
+ data = self._sanitize_data(data)
+ return (data + self.band_offset(band_name)) / quantification * 100
+
+ def _sanitize_data(self, data):
+ data = data.where(data != self.no_data)
+ if self.mask_saturated:
+ data = data.where(data != self.saturated, np.inf)
+ return data
+
+ def band_offset(self, band):
+ """Get the band offset for *band*."""
+ band_index = self._band_index(band)
+ return self.band_offsets.get(band_index, 0)
+
+ def _band_index(self, band):
+ band_indices = self.band_indices
+ band_conversions = {"B01": "B1", "B02": "B2", "B03": "B3", "B04": "B4", "B05": "B5", "B06": "B6", "B07": "B7",
+ "B08": "B8", "B8A": "B8A", "B09": "B9", "B10": "B10", "B11": "B11", "B12": "B12"}
+ band_index = band_indices[band_conversions[band]]
+ return band_index
+
+ @cached_property
+ def band_indices(self):
+ """Get the band indices from the metadata."""
+ spectral_info = self.root.findall('.//Spectral_Information')
+ band_indices = {spec.attrib["physicalBand"]: int(spec.attrib["bandId"]) for spec in spectral_info}
+ return band_indices
+
+ @cached_property
+ def band_offsets(self):
+ """Get the band offsets from the metadata."""
+ offsets = self.root.find('.//Radiometric_Offset_List')
+ if offsets is not None:
+ band_offsets = {int(off.attrib["band_id"]): float(off.text) for off in offsets}
+ else:
+ band_offsets = {}
+ return band_offsets
+
+ @cached_property
+ def special_values(self):
+ """Get the special values from the metadata."""
+ special_values = self.root.findall('.//Special_Values')
+ special_values_dict = {value[0].text: float(value[1].text) for value in special_values}
+ return special_values_dict
+
@property
- def end_time(self):
- """Get end time."""
- return self._start_time
+ def no_data(self):
+ """Get the nodata value from the metadata."""
+ return self.special_values["NODATA"]
+
+ @property
+ def saturated(self):
+ """Get the saturated value from the metadata."""
+ return self.special_values["SATURATED"]
+
+ def calibrate_to_radiances(self, data, band_name):
+ """Calibrate *data* to radiance using the radiometric information for the metadata."""
+ physical_gain = self.physical_gain(band_name)
+ data = self._sanitize_data(data)
+ return (data + self.band_offset(band_name)) / physical_gain
+
+ def physical_gain(self, band_name):
+ """Get the physical gain for a given *band_name*."""
+ band_index = self._band_index(band_name)
+ return self.physical_gains[band_index]
+
+ @cached_property
+ def physical_gains(self):
+ """Get the physical gains dictionary."""
+ physical_gains = {int(elt.attrib["bandId"]): float(elt.text) for elt in self.root.findall(".//PHYSICAL_GAINS")}
+ return physical_gains
+
+
+def _fill_swath_edges(angles):
+ """Fill gaps at edges of swath."""
+ darr = DataArray(angles, dims=['y', 'x'])
+ darr = darr.bfill('x')
+ darr = darr.ffill('x')
+ darr = darr.bfill('y')
+ darr = darr.ffill('y')
+ angles = darr.data
+ return angles
+
+
+class SAFEMSITileMDXML(SAFEMSIXMLMetadata):
+ """File handle for sentinel 2 safe XML tile metadata."""
+
+ def __init__(self, filename, filename_info, filetype_info, mask_saturated=True):
+ """Init the reader."""
+ super().__init__(filename, filename_info, filetype_info, mask_saturated)
+ self.geocoding = self.root.find('.//Tile_Geocoding')
def get_area_def(self, dsid):
"""Get the area definition of the dataset."""
- try:
- from pyproj import CRS
- except ImportError:
- CRS = None
- geocoding = self.root.find('.//Tile_Geocoding')
- epsg = geocoding.find('HORIZONTAL_CS_CODE').text
- rows = int(geocoding.find('Size[@resolution="' + str(dsid['resolution']) + '"]/NROWS').text)
- cols = int(geocoding.find('Size[@resolution="' + str(dsid['resolution']) + '"]/NCOLS').text)
- geoposition = geocoding.find('Geoposition[@resolution="' + str(dsid['resolution']) + '"]')
+ area_extent = self._area_extent(dsid['resolution'])
+ cols, rows = self._shape(dsid['resolution'])
+ area = geometry.AreaDefinition(
+ self.tile,
+ "On-the-fly area",
+ self.tile,
+ self.projection,
+ cols,
+ rows,
+ area_extent)
+ return area
+
+ @cached_property
+ def projection(self):
+ """Get the geographic projection."""
+ from pyproj import CRS
+ epsg = self.geocoding.find('HORIZONTAL_CS_CODE').text
+ return CRS(epsg)
+
+ def _area_extent(self, resolution):
+ cols, rows = self._shape(resolution)
+ geoposition = self.geocoding.find(f'Geoposition[@resolution="{resolution}"]')
ulx = float(geoposition.find('ULX').text)
uly = float(geoposition.find('ULY').text)
xdim = float(geoposition.find('XDIM').text)
ydim = float(geoposition.find('YDIM').text)
area_extent = (ulx, uly + rows * ydim, ulx + cols * xdim, uly)
- if CRS is not None:
- proj = CRS(epsg)
- else:
- proj = {'init': epsg}
- area = geometry.AreaDefinition(
- self.tile,
- "On-the-fly area",
- self.tile,
- proj,
- cols,
- rows,
- area_extent)
- return area
+ return area_extent
+
+ def _shape(self, resolution):
+ rows = int(self.geocoding.find('Size[@resolution="' + str(resolution) + '"]/NROWS').text)
+ cols = int(self.geocoding.find('Size[@resolution="' + str(resolution) + '"]/NCOLS').text)
+ return cols, rows
@staticmethod
def _do_interp(minterp, xcoord, ycoord):
- interp_points2 = np.vstack((xcoord.ravel(), ycoord.ravel()))
+ interp_points2 = np.vstack((ycoord.ravel(), xcoord.ravel()))
res = minterp(interp_points2)
return res.reshape(xcoord.shape)
def interpolate_angles(self, angles, resolution):
"""Interpolate the angles."""
- # FIXME: interpolate in cartesian coordinates if the lons or lats are
- # problematic
from geotiepoints.multilinear import MultilinearInterpolator
- geocoding = self.root.find('.//Tile_Geocoding')
- rows = int(geocoding.find('Size[@resolution="' + str(resolution) + '"]/NROWS').text)
- cols = int(geocoding.find('Size[@resolution="' + str(resolution) + '"]/NCOLS').text)
+ cols, rows = self._shape(resolution)
smin = [0, 0]
smax = np.array(angles.shape) - 1
@@ -176,8 +286,8 @@ def interpolate_angles(self, angles, resolution):
minterp = MultilinearInterpolator(smin, smax, orders)
minterp.set_values(da.atleast_2d(angles.ravel()))
- x = da.arange(rows, dtype=angles.dtype, chunks=CHUNK_SIZE) / (rows-1) * (angles.shape[0] - 1)
- y = da.arange(cols, dtype=angles.dtype, chunks=CHUNK_SIZE) / (cols-1) * (angles.shape[1] - 1)
+ y = da.arange(rows, dtype=angles.dtype, chunks=CHUNK_SIZE) / (rows-1) * (angles.shape[0] - 1)
+ x = da.arange(cols, dtype=angles.dtype, chunks=CHUNK_SIZE) / (cols-1) * (angles.shape[1] - 1)
xcoord, ycoord = da.meshgrid(x, y)
return da.map_blocks(self._do_interp, minterp, xcoord, ycoord, dtype=angles.dtype, chunks=xcoord.chunks)
@@ -185,19 +295,30 @@ def _get_coarse_dataset(self, key, info):
"""Get the coarse dataset refered to by `key` from the XML data."""
angles = self.root.find('.//Tile_Angles')
if key['name'] in ['solar_zenith_angle', 'solar_azimuth_angle']:
- elts = angles.findall(info['xml_tag'] + '/Values_List/VALUES')
- return np.array([[val for val in elt.text.split()] for elt in elts],
- dtype=np.float64)
-
+ angles = self._get_solar_angles(angles, info)
elif key['name'] in ['satellite_zenith_angle', 'satellite_azimuth_angle']:
- arrays = []
- elts = angles.findall(info['xml_tag'] + '[@bandId="1"]')
- for elt in elts:
- items = elt.findall(info['xml_item'] + '/Values_List/VALUES')
- arrays.append(np.array([[val for val in item.text.split()] for item in items],
- dtype=np.float64))
- return np.nanmean(np.dstack(arrays), -1)
- return None
+ angles = self._get_satellite_angles(angles, info)
+ else:
+ angles = None
+ return angles
+
+ def _get_solar_angles(self, angles, info):
+ angles = self._get_values_from_tag(angles, info['xml_tag'])
+ return angles
+
+ @staticmethod
+ def _get_values_from_tag(xml_tree, xml_tag):
+ elts = xml_tree.findall(xml_tag + '/Values_List/VALUES')
+ return np.array([[val for val in elt.text.split()] for elt in elts],
+ dtype=np.float64)
+
+ def _get_satellite_angles(self, angles, info):
+ arrays = []
+ elts = angles.findall(info['xml_tag'] + '[@bandId="1"]')
+ for elt in elts:
+ arrays.append(self._get_values_from_tag(elt, info['xml_item']))
+ angles = np.nanmean(np.dstack(arrays), -1)
+ return angles
def get_dataset(self, key, info):
"""Get the dataset referred to by `key`."""
@@ -205,11 +326,7 @@ def get_dataset(self, key, info):
if angles is None:
return None
- # Fill gaps at edges of swath
- darr = DataArray(angles, dims=['y', 'x'])
- darr = darr.bfill('x')
- darr = darr.ffill('x')
- angles = darr.data
+ angles = _fill_swath_edges(angles)
res = self.interpolate_angles(angles, key['resolution'])
diff --git a/satpy/readers/msu_gsa_l1b.py b/satpy/readers/msu_gsa_l1b.py
new file mode 100644
index 0000000000..df06239b43
--- /dev/null
+++ b/satpy/readers/msu_gsa_l1b.py
@@ -0,0 +1,111 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2022 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Reader for the Arctica-M1 MSU-GS/A data.
+
+The files for this reader are HDF5 and contain channel data at 1km resolution
+for the VIS channels and 4km resolution for the IR channels. Geolocation data
+is available at both resolutions, as is sun and satellite geometry.
+
+This reader was tested on sample data provided by EUMETSAT.
+
+"""
+from datetime import datetime
+
+import numpy as np
+
+from satpy.readers.hdf5_utils import HDF5FileHandler
+
+
+class MSUGSAFileHandler(HDF5FileHandler):
+ """MSU-GS/A L1B file reader."""
+
+ @property
+ def start_time(self):
+ """Time for timeslot scan start."""
+ dtstr = self['/attr/timestamp_without_timezone']
+ return datetime.strptime(dtstr, "%Y-%m-%dT%H:%M:%S")
+
+ @property
+ def satellite_altitude(self):
+ """Satellite altitude at time of scan.
+
+ There is no documentation but this appears to be
+ height above surface in meters.
+ """
+ return float(self['/attr/satellite_observation_point_height'])
+
+ @property
+ def satellite_latitude(self):
+ """Satellite latitude at time of scan."""
+ return float(self['/attr/satellite_observation_point_latitude'])
+
+ @property
+ def satellite_longitude(self):
+ """Satellite longitude at time of scan."""
+ return float(self['/attr/satellite_observation_point_longitude'])
+
+ @property
+ def sensor_name(self):
+ """Sensor name is hardcoded."""
+ sensor = 'msu_gsa'
+ return sensor
+
+ @property
+ def platform_name(self):
+ """Platform name is also hardcoded."""
+ platform = 'Arctica-M-N1'
+ return platform
+
+ @staticmethod
+ def _apply_scale_offset(in_data):
+ """Apply the scale and offset to data."""
+ scl = in_data.attrs['scale']
+ off = in_data.attrs['offset']
+ return in_data * scl + off
+
+ def get_dataset(self, dataset_id, ds_info):
+ """Load data variable and metadata and calibrate if needed."""
+ file_key = ds_info.get('file_key', dataset_id['name'])
+ data = self[file_key]
+ attrs = data.attrs.copy() # avoid contaminating other band loading
+ attrs.update(ds_info)
+
+ # The fill value also needs to be applied
+ fill_val = attrs.pop('fill_value')
+ data = data.where(data != fill_val, np.nan)
+
+ # Data has a scale and offset that we must apply
+ data = self._apply_scale_offset(data)
+
+ # Data is given as radiance values, we must convert if we want reflectance
+ if dataset_id.get('calibration') == "reflectance":
+ solconst = float(attrs.pop('F_solar_constant'))
+ data = np.pi * data / solconst
+ # Satpy expects reflectance values in 0-100 range
+ data = data * 100.
+
+ data.attrs = attrs
+ data.attrs.update({
+ 'platform_name': self.platform_name,
+ 'sensor': self.sensor_name,
+ 'sat_altitude': self.satellite_altitude,
+ 'sat_latitude': self.satellite_latitude,
+ 'sat_longitude': self.satellite_longitude,
+ })
+
+ return data
diff --git a/satpy/readers/mviri_l1b_fiduceo_nc.py b/satpy/readers/mviri_l1b_fiduceo_nc.py
index d9a77aac0b..de385f67fc 100644
--- a/satpy/readers/mviri_l1b_fiduceo_nc.py
+++ b/satpy/readers/mviri_l1b_fiduceo_nc.py
@@ -148,14 +148,12 @@
- `[Handbook]`_ MFG User Handbook
- `[PUG]`_ FIDUCEO MVIRI FCDR Product User Guide
-.. _[Handbook]: http://www.eumetsat.int/\
-website/wcm/idc/idcplg?IdcService=GET_FILE&dDocName=PDF_TD06_MARF&\
-RevisionSelectionMethod=LatestReleased&Rendition=Web
+.. _[Handbook]: https://www.eumetsat.int/media/7323
.. _[PUG]: http://doi.org/10.15770/EUM_SEC_CLM_0009
"""
import abc
-from functools import lru_cache
+import functools
import warnings
import dask.array as da
@@ -163,11 +161,7 @@
import xarray as xr
from satpy import CHUNK_SIZE
-from satpy.readers._geos_area import (
- sampling_to_lfac_cfac,
- get_area_definition,
- get_area_extent
-)
+from satpy.readers._geos_area import get_area_definition, get_area_extent, sampling_to_lfac_cfac
from satpy.readers.file_handlers import BaseFileHandler
EQUATOR_RADIUS = 6378140.0
@@ -571,6 +565,13 @@ def __init__(self, filename, filename_info, filetype_info,
self.projection_longitude = float(filename_info['projection_longitude'])
self.calib_coefs = self._get_calib_coefs()
+ self._get_angles = functools.lru_cache(maxsize=8)(
+ self._get_angles_uncached
+ )
+ self._get_acq_time = functools.lru_cache(maxsize=3)(
+ self._get_acq_time_uncached
+ )
+
def get_dataset(self, dataset_id, dataset_info):
"""Get the dataset."""
name = dataset_id['name']
@@ -608,11 +609,10 @@ def _get_channel(self, name, resolution, calibration):
ds = qc.mask(ds)
else:
qc.check()
- ds['acq_time'] = ('y', self._get_acq_time(resolution))
+ ds['acq_time'] = self._get_acq_time(resolution)
return ds
- @lru_cache(maxsize=8) # 4 angle datasets with two resolutions each
- def _get_angles(self, name, resolution):
+ def _get_angles_uncached(self, name, resolution):
"""Get angle dataset.
Files provide angles (solar/satellite zenith & azimuth) at a coarser
@@ -695,8 +695,7 @@ def _get_calib_coefs(self):
return coefs
- @lru_cache(maxsize=3) # Three channels
- def _get_acq_time(self, resolution):
+ def _get_acq_time_uncached(self, resolution):
"""Get scanline acquisition time for the given resolution.
Note that the acquisition time does not increase monotonically
diff --git a/satpy/readers/mws_l1b.py b/satpy/readers/mws_l1b.py
new file mode 100644
index 0000000000..b86fbebf00
--- /dev/null
+++ b/satpy/readers/mws_l1b.py
@@ -0,0 +1,289 @@
+# Copyright (c) 2022 Pytroll Developers
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+"""Reader for the EPS-SG Microwave Sounder (MWS) level-1b data.
+
+Documentation: https://www.eumetsat.int/media/44139
+"""
+
+import logging
+from datetime import datetime
+
+import dask.array as da
+import numpy as np
+from netCDF4 import default_fillvals
+
+from .netcdf_utils import NetCDF4FileHandler
+
+logger = logging.getLogger(__name__)
+
+
+# dict containing all available auxiliary data parameters to be read using the index map. Keys are the
+# parameter name and values are the paths to the variable inside the netcdf
+
+AUX_DATA = {
+ 'scantime_utc': 'data/navigation/mws_scantime_utc',
+ 'solar_azimuth': 'data/navigation/mws_solar_azimuth_angle',
+ 'solar_zenith': 'data/navigation/mws_solar_zenith_angle',
+ 'satellite_azimuth': 'data/navigation/mws_satellite_azimuth_angle',
+ 'satellite_zenith': 'data/navigation/mws_satellite_zenith_angle',
+ 'surface_type': 'data/navigation/mws_surface_type',
+ 'terrain_elevation': 'data/navigation/mws_terrain_elevation',
+ 'mws_lat': 'data/navigation/mws_lat',
+ 'mws_lon': 'data/navigation/mws_lon',
+}
+
+MWS_CHANNEL_NAMES_TO_NUMBER = {'1': 1, '2': 2, '3': 3, '4': 4,
+ '5': 5, '6': 6, '7': 7, '8': 8,
+ '9': 9, '10': 10, '11': 11, '12': 12,
+ '13': 13, '14': 14, '15': 15, '16': 16,
+ '17': 17, '18': 18, '19': 19, '20': 20,
+ '21': 21, '22': 22, '23': 23, '24': 24}
+
+MWS_CHANNEL_NAMES = list(MWS_CHANNEL_NAMES_TO_NUMBER.keys())
+MWS_CHANNELS = set(MWS_CHANNEL_NAMES)
+
+
+def get_channel_index_from_name(chname):
+ """Get the MWS channel index from the channel name."""
+ chindex = MWS_CHANNEL_NAMES_TO_NUMBER.get(chname, 0) - 1
+ if 0 <= chindex < 24:
+ return chindex
+ raise AttributeError(f"Channel name '{chname}' not supported")
+
+
+def _get_aux_data_name_from_dsname(dsname):
+ aux_data_name = [key for key in AUX_DATA.keys() if key in dsname]
+ if len(aux_data_name) > 0:
+ return aux_data_name[0]
+
+
+class MWSL1BFile(NetCDF4FileHandler):
+ """Class implementing the EPS-SG-A1 MWS L1b Filehandler.
+
+ This class implements the European Polar System Second Generation (EPS-SG)
+ Microwave Sounder (MWS) Level-1b NetCDF reader. It is designed to be used
+ through the :class:`~satpy.Scene` class using the :mod:`~satpy.Scene.load`
+ method with the reader ``"mws_l1b_nc"``.
+
+ """
+
+ _platform_name_translate = {
+ "SGA1": "Metop-SG-A1",
+ "SGA2": "Metop-SG-A2",
+ "SGA3": "Metop-SG-A3"}
+
+ def __init__(self, filename, filename_info, filetype_info):
+ """Initialize file handler."""
+ super().__init__(filename, filename_info,
+ filetype_info,
+ cache_var_size=10000,
+ cache_handle=True)
+ logger.debug('Reading: {}'.format(self.filename))
+ logger.debug('Start: {}'.format(self.start_time))
+ logger.debug('End: {}'.format(self.end_time))
+
+ self._cache = {}
+
+ self._channel_names = MWS_CHANNEL_NAMES
+
+ @property
+ def start_time(self):
+ """Get start time."""
+ return datetime.strptime(self['/attr/sensing_start_time_utc'],
+ '%Y-%m-%d %H:%M:%S.%f')
+
+ @property
+ def end_time(self):
+ """Get end time."""
+ return datetime.strptime(self['/attr/sensing_end_time_utc'],
+ '%Y-%m-%d %H:%M:%S.%f')
+
+ @property
+ def sensor(self):
+ """Get the sensor name."""
+ return self['/attr/instrument']
+
+ @property
+ def platform_name(self):
+ """Get the platform name."""
+ return self._platform_name_translate.get(self['/attr/spacecraft'])
+
+ @property
+ def sub_satellite_longitude_start(self):
+ """Get the longitude of sub-satellite point at start of the product."""
+ return self['status/satellite/subsat_longitude_start'].data.item()
+
+ @property
+ def sub_satellite_latitude_start(self):
+ """Get the latitude of sub-satellite point at start of the product."""
+ return self['status/satellite/subsat_latitude_start'].data.item()
+
+ @property
+ def sub_satellite_longitude_end(self):
+ """Get the longitude of sub-satellite point at end of the product."""
+ return self['status/satellite/subsat_longitude_end'].data.item()
+
+ @property
+ def sub_satellite_latitude_end(self):
+ """Get the latitude of sub-satellite point at end of the product."""
+ return self['status/satellite/subsat_latitude_end'].data.item()
+
+ def get_dataset(self, dataset_id, dataset_info):
+ """Get dataset using file_key in dataset_info."""
+ logger.debug('Reading {} from {}'.format(dataset_id['name'], self.filename))
+
+ var_key = dataset_info['file_key']
+ if _get_aux_data_name_from_dsname(dataset_id['name']) is not None:
+ variable = self._get_dataset_aux_data(dataset_id['name'])
+ elif any(lb in dataset_id['name'] for lb in MWS_CHANNELS):
+ logger.debug(f'Reading in file to get dataset with key {var_key}.')
+ variable = self._get_dataset_channel(dataset_id, dataset_info)
+ else:
+ logger.warning(f'Could not find key {var_key} in NetCDF file, no valid Dataset created') # noqa: E501
+ return None
+
+ variable = self._manage_attributes(variable, dataset_info)
+ variable = self._drop_coords(variable)
+ variable = self._standardize_dims(variable)
+ return variable
+
+ @staticmethod
+ def _standardize_dims(variable):
+ """Standardize dims to y, x."""
+ if 'n_scans' in variable.dims:
+ variable = variable.rename({'n_fovs': 'x', 'n_scans': 'y'})
+ if variable.dims[0] == 'x':
+ variable = variable.transpose('y', 'x')
+ return variable
+
+ @staticmethod
+ def _drop_coords(variable):
+ """Drop coords that are not in dims."""
+ for coord in variable.coords:
+ if coord not in variable.dims:
+ variable = variable.drop_vars(coord)
+ return variable
+
+ def _manage_attributes(self, variable, dataset_info):
+ """Manage attributes of the dataset."""
+ variable.attrs.setdefault('units', None)
+ variable.attrs.update(dataset_info)
+ variable.attrs.update(self._get_global_attributes())
+ return variable
+
+ def _get_dataset_channel(self, key, dataset_info):
+ """Load dataset corresponding to channel measurement.
+
+ Load a dataset when the key refers to a measurand, whether uncalibrated
+ (counts) or calibrated in terms of brightness temperature or radiance.
+
+ """
+ # Get the dataset
+ # Get metadata for given dataset
+ grp_pth = dataset_info['file_key']
+ channel_index = get_channel_index_from_name(key['name'])
+
+ data = self[grp_pth][:, :, channel_index]
+ attrs = data.attrs.copy()
+
+ fv = attrs.pop(
+ "FillValue",
+ default_fillvals.get(data.dtype.str[1:], np.nan))
+ vr = attrs.get("valid_range", [-np.inf, np.inf])
+
+ if key['calibration'] == "counts":
+ attrs["_FillValue"] = fv
+ nfv = fv
+ else:
+ nfv = np.nan
+ data = data.where(data >= vr[0], nfv)
+ data = data.where(data <= vr[1], nfv)
+
+ # Manage the attributes of the dataset
+ data.attrs.setdefault('units', None)
+ data.attrs.update(dataset_info)
+
+ dataset_attrs = getattr(data, 'attrs', {})
+ dataset_attrs.update(dataset_info)
+ dataset_attrs.update({
+ "platform_name": self.platform_name,
+ "sensor": self.sensor,
+ "orbital_parameters": {'sub_satellite_latitude_start': self.sub_satellite_latitude_start,
+ 'sub_satellite_longitude_start': self.sub_satellite_longitude_start,
+ 'sub_satellite_latitude_end': self.sub_satellite_latitude_end,
+ 'sub_satellite_longitude_end': self.sub_satellite_longitude_end},
+ })
+
+ try:
+ dataset_attrs.update(key.to_dict())
+ except AttributeError:
+ dataset_attrs.update(key)
+
+ data.attrs.update(dataset_attrs)
+ return data
+
+ def _get_dataset_aux_data(self, dsname):
+ """Get the auxiliary data arrays using the index map."""
+ # Geolocation and navigation data:
+ if dsname in ['mws_lat', 'mws_lon',
+ 'solar_azimuth', 'solar_zenith',
+ 'satellite_azimuth', 'satellite_zenith',
+ 'surface_type', 'terrain_elevation']:
+ var_key = AUX_DATA.get(dsname)
+ else:
+ raise NotImplementedError(f"Dataset '{dsname}' not supported!")
+
+ try:
+ variable = self[var_key]
+ except KeyError:
+ logger.exception("Could not find key %s in NetCDF file, no valid Dataset created", var_key)
+ raise
+
+ # Scale the data:
+ if 'scale_factor' in variable.attrs and 'add_offset' in variable.attrs:
+ missing_value = variable.attrs['missing_value']
+ variable.data = da.where(variable.data == missing_value, np.nan,
+ variable.data * variable.attrs['scale_factor'] + variable.attrs['add_offset'])
+
+ return variable
+
+ def _get_global_attributes(self):
+ """Create a dictionary of global attributes."""
+ return {
+ 'filename': self.filename,
+ 'start_time': self.start_time,
+ 'end_time': self.end_time,
+ 'spacecraft_name': self.platform_name,
+ 'sensor': self.sensor,
+ 'filename_start_time': self.filename_info['start_time'],
+ 'filename_end_time': self.filename_info['end_time'],
+ 'platform_name': self.platform_name,
+ 'quality_group': self._get_quality_attributes(),
+ }
+
+ def _get_quality_attributes(self):
+ """Get quality attributes."""
+ quality_group = self['quality']
+ quality_dict = {}
+ for key in quality_group:
+ # Add the values (as Numpy array) of each variable in the group
+ # where possible
+ try:
+ quality_dict[key] = quality_group[key].values
+ except ValueError:
+ quality_dict[key] = None
+
+ quality_dict.update(quality_group.attrs)
+ return quality_dict
diff --git a/satpy/readers/netcdf_utils.py b/satpy/readers/netcdf_utils.py
index 54bc332f20..40beb21cf4 100644
--- a/satpy/readers/netcdf_utils.py
+++ b/satpy/readers/netcdf_utils.py
@@ -17,11 +17,12 @@
# satpy. If not, see .
"""Helpers for reading netcdf-based files."""
-import netCDF4
import logging
+
+import dask.array as da
+import netCDF4
import numpy as np
import xarray as xr
-import dask.array as da
from satpy import CHUNK_SIZE
from satpy.readers.file_handlers import BaseFileHandler
@@ -51,6 +52,10 @@ class NetCDF4FileHandler(BaseFileHandler):
wrapper["/attr/platform_short_name"]
+ Or for all of global attributes:
+
+ wrapper["/attrs"]
+
Note that loading datasets requires reopening the original file
(unless those datasets are cached, see below), but to get just the
shape of the dataset append "/shape" to the item string:
@@ -128,15 +133,29 @@ def __del__(self):
except RuntimeError: # presumably closed already
pass
+ def _collect_global_attrs(self, obj):
+ """Collect all the global attributes for the provided file object."""
+ global_attrs = {}
+ for key in obj.ncattrs():
+ fc_key = f"/attr/{key}"
+ value = self._get_attr_value(obj, key)
+ self.file_content[fc_key] = global_attrs[key] = value
+ self.file_content["/attrs"] = global_attrs
+
def _collect_attrs(self, name, obj):
"""Collect all the attributes for the provided file object."""
for key in obj.ncattrs():
- value = getattr(obj, key)
- fc_key = "{}/attr/{}".format(name, key)
- try:
- self.file_content[fc_key] = np2str(value)
- except ValueError:
- self.file_content[fc_key] = value
+ fc_key = f"{name}/attr/{key}"
+ value = self._get_attr_value(obj, key)
+ self.file_content[fc_key] = value
+
+ def _get_attr_value(self, obj, key):
+ value = getattr(obj, key)
+ try:
+ value = np2str(value)
+ except ValueError:
+ pass
+ return value
def collect_metadata(self, name, obj):
"""Collect all file variables and attributes for the provided file object.
@@ -145,11 +164,21 @@ def collect_metadata(self, name, obj):
"""
# Look through each subgroup
base_name = name + "/" if name else ""
+ self._collect_groups_info(base_name, obj)
+ self._collect_variables_info(base_name, obj)
+ if not name:
+ self._collect_global_attrs(obj)
+ else:
+ self._collect_attrs(name, obj)
+
+ def _collect_groups_info(self, base_name, obj):
for group_name, group_obj in obj.groups.items():
full_group_name = base_name + group_name
self.file_content[full_group_name] = group_obj
self._collect_attrs(full_group_name, group_obj)
self.collect_metadata(full_group_name, group_obj)
+
+ def _collect_variables_info(self, base_name, obj):
for var_name, var_obj in obj.variables.items():
var_name = base_name + var_name
self.file_content[var_name] = var_obj
@@ -157,7 +186,6 @@ def collect_metadata(self, name, obj):
self.file_content[var_name + "/shape"] = var_obj.shape
self.file_content[var_name + "/dimensions"] = var_obj.dimensions
self._collect_attrs(var_name, var_obj)
- self._collect_attrs(name, obj)
def collect_dimensions(self, name, obj):
"""Collect dimensions."""
@@ -189,25 +217,35 @@ def __getitem__(self, key):
"""Get item for given key."""
val = self.file_content[key]
if isinstance(val, netCDF4.Variable):
- if key in self.cached_file_content:
- return self.cached_file_content[key]
- # these datasets are closed and inaccessible when the file is
- # closed, need to reopen
- # TODO: Handle HDF4 versus NetCDF3 versus NetCDF4
- parts = key.rsplit('/', 1)
- if len(parts) == 2:
- group, key = parts
- else:
- group = None
- if self.file_handle is not None:
- val = self._get_var_from_filehandle(group, key)
- else:
- val = self._get_var_from_xr(group, key)
- elif isinstance(val, netCDF4.Group):
- # Full groups are conveniently read with xr even if file_handle is available
- with xr.open_dataset(self.filename, group=key,
- **self._xarray_kwargs) as nc:
- val = nc
+ return self._get_variable(key, val)
+ if isinstance(val, netCDF4.Group):
+ return self._get_group(key, val)
+ return val
+
+ def _get_variable(self, key, val):
+ """Get a variable from the netcdf file."""
+ if key in self.cached_file_content:
+ return self.cached_file_content[key]
+ # these datasets are closed and inaccessible when the file is
+ # closed, need to reopen
+ # TODO: Handle HDF4 versus NetCDF3 versus NetCDF4
+ parts = key.rsplit('/', 1)
+ if len(parts) == 2:
+ group, key = parts
+ else:
+ group = None
+ if self.file_handle is not None:
+ val = self._get_var_from_filehandle(group, key)
+ else:
+ val = self._get_var_from_xr(group, key)
+ return val
+
+ def _get_group(self, key, val):
+ """Get a group from the netcdf file."""
+ # Full groups are conveniently read with xr even if file_handle is available
+ with xr.open_dataset(self.filename, group=key,
+ **self._xarray_kwargs) as nc:
+ val = nc
return val
def _get_var_from_xr(self, group, key):
diff --git a/satpy/readers/nucaps.py b/satpy/readers/nucaps.py
index 6ed1ea48b9..216581f7b3 100644
--- a/satpy/readers/nucaps.py
+++ b/satpy/readers/nucaps.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
-# Copyright (c) 2016 Satpy developers
+# Copyright (c) 2016-2021 Satpy developers
#
# This file is part of satpy.
#
@@ -33,14 +33,15 @@
"""
-import xarray as xr
-import pandas as pd
-import numpy as np
import logging
from collections import defaultdict
-from satpy.readers.yaml_reader import FileYAMLReader
+import numpy as np
+import pandas as pd
+import xarray as xr
+
from satpy.readers.netcdf_utils import NetCDF4FileHandler
+from satpy.readers.yaml_reader import FileYAMLReader
LOG = logging.getLogger(__name__)
@@ -133,14 +134,12 @@ def sensor_names(self):
"""Return standard sensor or instrument name for the file's data."""
try:
res = self['/attr/instrument_name']
- if isinstance(res, np.ndarray):
- res = str(res.astype(str))
res = [x.strip() for x in res.split(',')]
if len(res) == 1:
- return res[0]
- return res
+ return res[0].lower()
except KeyError:
- return ['CrIS', 'ATMS', 'VIIRS']
+ res = ['CrIS', 'ATMS', 'VIIRS']
+ return set(name.lower() for name in res)
def get_shape(self, ds_id, ds_info):
"""Return data array shape for item specified."""
@@ -209,7 +208,7 @@ def get_dataset(self, dataset_id, ds_info):
if 'Number_of_CrIS_FORs' in sp.dims:
sp = sp.rename({'Number_of_CrIS_FORs': 'y'})
if 'surface_pressure' in ds_info:
- ds_info['surface_pressure'] = xr.concat((ds_info['surface_pressure'], sp))
+ ds_info['surface_pressure'] = xr.concat((ds_info['surface_pressure'], sp), dim='y')
else:
ds_info['surface_pressure'] = sp
# include all the pressure levels
@@ -221,6 +220,9 @@ def get_dataset(self, dataset_id, ds_info):
data = data.where((data <= valid_max)) # | (data >= valid_min))
if fill_value is not None:
data = data.where(data != fill_value)
+ # this _FillValue is no longer valid
+ metadata.pop('_FillValue', None)
+ data.attrs.pop('_FillValue', None)
data.attrs.update(metadata)
# Older format
@@ -288,25 +290,7 @@ def load(self, dataset_keys, previous_datasets=None, pressure_levels=None):
"""
dataset_keys = set(self.get_dataset_key(x) for x in dataset_keys)
if pressure_levels is not None:
- # Filter out datasets that don't fit in the correct pressure level
- for ds_id in dataset_keys.copy():
- ds_info = self.all_ids[ds_id]
- ds_level = ds_info.get("pressure_level")
- if ds_level is not None:
- if pressure_levels is True:
- # they want all pressure levels
- continue
- elif len(pressure_levels) == 2 and pressure_levels[0] <= ds_level <= pressure_levels[1]:
- # given a min and a max pressure level
- continue
- elif np.isclose(pressure_levels, ds_level).any():
- # they asked for this specific pressure level
- continue
- else:
- # they don't want this dataset at this pressure level
- LOG.debug("Removing dataset to load: %s", ds_id)
- dataset_keys.remove(ds_id)
- continue
+ self._filter_dataset_keys_outside_pressure_levels(dataset_keys, pressure_levels)
# Add pressure levels to the datasets to load if needed so
# we can do further filtering after loading
@@ -325,66 +309,101 @@ def load(self, dataset_keys, previous_datasets=None, pressure_levels=None):
dataset_keys.remove(plevels_ds_id)
else:
plevels_ds = datasets_loaded[plevels_ds_id]
+ _remove_data_at_pressure_levels(datasets_loaded, plevels_ds, pressure_levels)
- if pressure_levels is True:
- cond = None
- elif len(pressure_levels) == 2:
- cond = (plevels_ds >= pressure_levels[0]) & (plevels_ds <= pressure_levels[1])
- else:
- cond = plevels_ds == pressure_levels
- if cond is not None:
- new_plevels = plevels_ds.where(cond, drop=True)
- else:
- new_plevels = plevels_ds
+ if self.mask_surface:
+ _mask_data_below_surface_pressure(datasets_loaded, dataset_keys)
- for ds_id in datasets_loaded.keys():
- ds_obj = datasets_loaded[ds_id]
- if plevels_ds.dims[0] not in ds_obj.dims:
- continue
+ if self.mask_quality:
+ _mask_data_with_quality_flag(datasets_loaded, dataset_keys)
- if cond is not None:
- datasets_loaded[ds_id] = ds_obj.where(cond, drop=True)
- datasets_loaded[ds_id].attrs['pressure_levels'] = new_plevels
+ return datasets_loaded
- if self.mask_surface:
- LOG.debug("Filtering pressure levels at or below the surface pressure")
- for ds_id in sorted(dataset_keys):
- ds = datasets_loaded[ds_id]
- if "surface_pressure" not in ds.attrs or "pressure_levels" not in ds.attrs:
+ def _filter_dataset_keys_outside_pressure_levels(self, dataset_keys, pressure_levels):
+ for ds_id in dataset_keys.copy():
+ ds_info = self.all_ids[ds_id]
+ ds_level = ds_info.get("pressure_level")
+ if ds_level is not None:
+ if pressure_levels is True:
+ # they want all pressure levels
+ continue
+ elif len(pressure_levels) == 2 and pressure_levels[0] <= ds_level <= pressure_levels[1]:
+ # given a min and a max pressure level
+ continue
+ elif np.isclose(pressure_levels, ds_level).any():
+ # they asked for this specific pressure level
continue
- data_pressure = ds.attrs["pressure_levels"]
- surface_pressure = ds.attrs["surface_pressure"]
- if isinstance(surface_pressure, float):
- # scalar needs to become array for each record
- surface_pressure = np.repeat(surface_pressure, ds.shape[0])
- if surface_pressure.ndim == 1 and surface_pressure.shape[0] == ds.shape[0]:
- # surface is one element per record
- LOG.debug("Filtering %s at and below the surface pressure", ds_id)
- if ds.ndim == 2:
- surface_pressure = np.repeat(surface_pressure[:, None], data_pressure.shape[0], axis=1)
- data_pressure = np.repeat(data_pressure[None, :], surface_pressure.shape[0], axis=0)
- datasets_loaded[ds_id] = ds.where(data_pressure < surface_pressure)
- else:
- # entire dataset represents one pressure level
- data_pressure = ds.attrs["pressure_level"]
- datasets_loaded[ds_id] = ds.where(data_pressure < surface_pressure)
else:
- LOG.warning("Not sure how to handle shape of 'surface_pressure' metadata")
-
- if self.mask_quality:
- LOG.debug("Filtering data based on quality flags")
- for ds_id in sorted(dataset_keys):
- ds = datasets_loaded[ds_id]
- quality_flag = [
- x for x in ds.attrs.get('ancillary_variables', [])
- if x.attrs.get('name') == 'Quality_Flag']
- if not quality_flag:
+ # they don't want this dataset at this pressure level
+ LOG.debug("Removing dataset to load: %s", ds_id)
+ dataset_keys.remove(ds_id)
continue
- quality_flag = quality_flag[0]
- if quality_flag.dims[0] not in ds.dims:
- continue
- LOG.debug("Masking %s where quality flag doesn't equal 1", ds_id)
- datasets_loaded[ds_id] = ds.where(quality_flag == 0)
- return datasets_loaded
+def _remove_data_at_pressure_levels(datasets_loaded, plevels_ds, pressure_levels):
+ cond = _get_pressure_level_condition(plevels_ds, pressure_levels)
+ if cond is not None:
+ new_plevels = plevels_ds.where(cond, drop=True)
+ else:
+ new_plevels = plevels_ds
+ for ds_id in datasets_loaded.keys():
+ ds_obj = datasets_loaded[ds_id]
+ if plevels_ds.dims[0] not in ds_obj.dims:
+ continue
+
+ if cond is not None:
+ datasets_loaded[ds_id] = ds_obj.where(cond, drop=True)
+ datasets_loaded[ds_id].attrs['pressure_levels'] = new_plevels
+
+
+def _get_pressure_level_condition(plevels_ds, pressure_levels):
+ if pressure_levels is True:
+ cond = None
+ elif len(pressure_levels) == 2:
+ cond = (plevels_ds >= pressure_levels[0]) & (plevels_ds <= pressure_levels[1])
+ else:
+ cond = plevels_ds == pressure_levels
+ return cond
+
+
+def _mask_data_below_surface_pressure(datasets_loaded, dataset_keys):
+ LOG.debug("Filtering pressure levels at or below the surface pressure")
+ for ds_id in sorted(dataset_keys):
+ ds = datasets_loaded[ds_id]
+ if "surface_pressure" not in ds.attrs or "pressure_levels" not in ds.attrs:
+ continue
+ data_pressure = ds.attrs["pressure_levels"]
+ surface_pressure = ds.attrs["surface_pressure"]
+ if isinstance(surface_pressure, float):
+ # scalar needs to become array for each record
+ surface_pressure = np.repeat(surface_pressure, ds.shape[0])
+ if surface_pressure.ndim == 1 and surface_pressure.shape[0] == ds.shape[0]:
+ # surface is one element per record
+ LOG.debug("Filtering %s at and below the surface pressure", ds_id)
+ if ds.ndim == 2:
+ surface_pressure = np.repeat(surface_pressure[:, None], data_pressure.shape[0], axis=1)
+ data_pressure = np.repeat(data_pressure[None, :], surface_pressure.shape[0], axis=0)
+ datasets_loaded[ds_id] = ds.where(data_pressure < surface_pressure)
+ else:
+ # entire dataset represents one pressure level
+ data_pressure = ds.attrs["pressure_level"]
+ datasets_loaded[ds_id] = ds.where(data_pressure < surface_pressure)
+ else:
+ LOG.warning("Not sure how to handle shape of 'surface_pressure' metadata")
+
+
+def _mask_data_with_quality_flag(datasets_loaded, dataset_keys):
+ LOG.debug("Filtering data based on quality flags")
+ for ds_id in sorted(dataset_keys):
+ ds = datasets_loaded[ds_id]
+ quality_flag = [
+ x for x in ds.attrs.get('ancillary_variables', [])
+ if x.attrs.get('name') == 'Quality_Flag']
+ if not quality_flag:
+ continue
+
+ quality_flag = quality_flag[0]
+ if quality_flag.dims[0] not in ds.dims:
+ continue
+ LOG.debug("Masking %s where quality flag doesn't equal 1", ds_id)
+ datasets_loaded[ds_id] = ds.where(quality_flag == 0)
diff --git a/satpy/readers/nwcsaf_msg2013_hdf5.py b/satpy/readers/nwcsaf_msg2013_hdf5.py
index 3ce07d859d..a8fdf45f3c 100644
--- a/satpy/readers/nwcsaf_msg2013_hdf5.py
+++ b/satpy/readers/nwcsaf_msg2013_hdf5.py
@@ -29,10 +29,12 @@
import logging
from datetime import datetime
+
+import h5py
import numpy as np
-from satpy.readers.hdf5_utils import HDF5FileHandler
from pyresample.geometry import AreaDefinition
-import h5py
+
+from satpy.readers.hdf5_utils import HDF5FileHandler
logger = logging.getLogger(__name__)
diff --git a/satpy/readers/nwcsaf_nc.py b/satpy/readers/nwcsaf_nc.py
index 0b1babd319..9bdafbec7c 100644
--- a/satpy/readers/nwcsaf_nc.py
+++ b/satpy/readers/nwcsaf_nc.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
-# Copyright (c) 2017-2020 Satpy developers
+# Copyright (c) 2017-2022 Satpy developers
#
# This file is part of satpy.
#
@@ -22,6 +22,7 @@
"""
+import functools
import logging
import os
from datetime import datetime
@@ -29,9 +30,9 @@
import dask.array as da
import numpy as np
import xarray as xr
-
-from pyresample.geometry import AreaDefinition
from pyproj import CRS
+from pyresample.geometry import AreaDefinition
+
from satpy import CHUNK_SIZE
from satpy.readers.file_handlers import BaseFileHandler
from satpy.readers.utils import unzip_file
@@ -89,6 +90,7 @@ def __init__(self, filename, filename_info, filetype_info):
self.pps = False
self.platform_name = None
self.sensor = None
+ self.file_key_prefix = filetype_info.get("file_key_prefix", "")
try:
# NWCSAF/Geo:
@@ -102,6 +104,10 @@ def __init__(self, filename, filename_info, filetype_info):
self.set_platform_and_sensor(**kwrgs)
+ self.upsample_geolocation = functools.lru_cache(maxsize=1)(
+ self._upsample_geolocation_uncached
+ )
+
def set_platform_and_sensor(self, **kwargs):
"""Set some metadata: platform_name, sensors, and pps (identifying PPS or Geo)."""
try:
@@ -130,22 +136,37 @@ def get_dataset(self, dsid, info):
logger.debug('Get the data set from cache: %s.', dsid_name)
return self.cache[dsid_name]
if dsid_name in ['lon', 'lat'] and dsid_name not in self.nc:
- dsid_name = dsid_name + '_reduced'
+ # Get full resolution lon,lat from the reduced (tie points) grid
+ lon, lat = self.upsample_geolocation()
+ if dsid_name == "lon":
+ return lon
+ else:
+ return lat
logger.debug('Reading %s.', dsid_name)
- variable = self.nc[dsid_name]
+ file_key = self._get_filekeys(dsid_name, info)
+ variable = self.nc[file_key]
variable = self.remove_timedim(variable)
- variable = self.scale_dataset(dsid, variable, info)
+ variable = self.scale_dataset(variable, info)
- if dsid_name.endswith('_reduced'):
- # Get full resolution lon,lat from the reduced (tie points) grid
- self.upsample_geolocation(dsid, info)
+ return variable
- return self.cache[dsid['name']]
+ def _get_varname_in_file(self, info, info_type="file_key"):
+ if isinstance(info[info_type], list):
+ for key in info[info_type]:
+ file_key = self.file_key_prefix + key
+ if file_key in self.nc:
+ return file_key
+ return self.file_key_prefix + info[info_type]
- return variable
+ def _get_filekeys(self, dsid_name, info):
+ try:
+ file_key = self._get_varname_in_file(info, info_type="file_key")
+ except KeyError:
+ file_key = dsid_name
+ return file_key
- def scale_dataset(self, dsid, variable, info):
+ def scale_dataset(self, variable, info):
"""Scale the data set, applying the attributes from the netCDF file.
The scale and offset attributes will then be removed from the resulting variable.
@@ -155,21 +176,7 @@ def scale_dataset(self, dsid, variable, info):
scale = variable.attrs.get('scale_factor', np.array(1))
offset = variable.attrs.get('add_offset', np.array(0))
if np.issubdtype((scale + offset).dtype, np.floating) or np.issubdtype(variable.dtype, np.floating):
- if '_FillValue' in variable.attrs:
- variable = variable.where(
- variable != variable.attrs['_FillValue'])
- variable.attrs['_FillValue'] = np.nan
- if 'valid_range' in variable.attrs:
- variable = variable.where(
- variable <= variable.attrs['valid_range'][1])
- variable = variable.where(
- variable >= variable.attrs['valid_range'][0])
- if 'valid_max' in variable.attrs:
- variable = variable.where(
- variable <= variable.attrs['valid_max'])
- if 'valid_min' in variable.attrs:
- variable = variable.where(
- variable >= variable.attrs['valid_min'])
+ variable = self._mask_variable(variable)
attrs = variable.attrs.copy()
variable = variable * scale + offset
variable.attrs = attrs
@@ -193,49 +200,72 @@ def scale_dataset(self, dsid, variable, info):
pass
if 'palette_meanings' in variable.attrs:
- if 'scale_offset_dataset' in info:
- so_dataset = self.nc[info['scale_offset_dataset']]
- scale = so_dataset.attrs['scale_factor']
- offset = so_dataset.attrs['add_offset']
- else:
- scale = 1
- offset = 0
-
- variable.attrs['palette_meanings'] = [int(val)
- for val in variable.attrs['palette_meanings'].split()]
- if variable.attrs['palette_meanings'][0] == 1:
- variable.attrs['palette_meanings'] = [0] + variable.attrs['palette_meanings']
- variable = xr.DataArray(da.vstack((np.array(variable.attrs['fill_value_color']), variable.data)),
- coords=variable.coords, dims=variable.dims, attrs=variable.attrs)
-
- val, idx = np.unique(variable.attrs['palette_meanings'], return_index=True)
- variable.attrs['palette_meanings'] = val * scale + offset
- variable = variable[idx]
+ variable = self._prepare_variable_for_palette(variable, info)
if 'standard_name' in info:
variable.attrs.setdefault('standard_name', info['standard_name'])
- if self.sw_version == 'NWC/PPS version v2014' and dsid['name'] == 'ctth_alti':
+ variable = self._adjust_variable_for_legacy_software(variable)
+
+ return variable
+
+ @staticmethod
+ def _mask_variable(variable):
+ if '_FillValue' in variable.attrs:
+ variable = variable.where(
+ variable != variable.attrs['_FillValue'])
+ variable.attrs['_FillValue'] = np.nan
+ if 'valid_range' in variable.attrs:
+ variable = variable.where(
+ variable <= variable.attrs['valid_range'][1])
+ variable = variable.where(
+ variable >= variable.attrs['valid_range'][0])
+ if 'valid_max' in variable.attrs:
+ variable = variable.where(
+ variable <= variable.attrs['valid_max'])
+ if 'valid_min' in variable.attrs:
+ variable = variable.where(
+ variable >= variable.attrs['valid_min'])
+ return variable
+
+ def _prepare_variable_for_palette(self, variable, info):
+ try:
+ so_dataset = self.nc[self._get_varname_in_file(info, info_type='scale_offset_dataset')]
+ except KeyError:
+ scale = 1
+ offset = 0
+ else:
+ scale = so_dataset.attrs['scale_factor']
+ offset = so_dataset.attrs['add_offset']
+ variable.attrs['palette_meanings'] = [int(val)
+ for val in variable.attrs['palette_meanings'].split()]
+ if variable.attrs['palette_meanings'][0] == 1:
+ variable.attrs['palette_meanings'] = [0] + variable.attrs['palette_meanings']
+ variable = xr.DataArray(da.vstack((np.array(variable.attrs['fill_value_color']), variable.data)),
+ coords=variable.coords, dims=variable.dims, attrs=variable.attrs)
+ val, idx = np.unique(variable.attrs['palette_meanings'], return_index=True)
+ variable.attrs['palette_meanings'] = val * scale + offset
+ variable = variable[idx]
+ return variable
+
+ def _adjust_variable_for_legacy_software(self, variable):
+ if self.sw_version == 'NWC/PPS version v2014' and variable.attrs.get('standard_name') == 'cloud_top_altitude':
# pps 2014 valid range and palette don't match
variable.attrs['valid_range'] = (0., 9000.)
- if self.sw_version == 'NWC/PPS version v2014' and dsid['name'] == 'ctth_alti_pal':
+ if (self.sw_version == 'NWC/PPS version v2014' and
+ variable.attrs.get('long_name') == 'RGB Palette for ctth_alti'):
# pps 2014 palette has the nodata color (black) first
variable = variable[1:, :]
- if self.sw_version == 'NWC/GEO version v2016' and dsid['name'] == 'ctth_alti':
- # Geo 2016/18 valid range and palette don't match
- # Valid range is 0 to 27000 in the file. But after scaling the valid range becomes -2000 to 25000
- # This now fixed by the scaling of the valid range above.
- pass
-
return variable
- def upsample_geolocation(self, dsid, info):
+ def _upsample_geolocation_uncached(self):
"""Upsample the geolocation (lon,lat) from the tiepoint grid."""
from geotiepoints import SatelliteInterpolator
+
# Read the fields needed:
col_indices = self.nc['nx_reduced'].values
row_indices = self.nc['ny_reduced'].values
- lat_reduced = self.scale_dataset(dsid, self.nc['lat_reduced'], info)
- lon_reduced = self.scale_dataset(dsid, self.nc['lon_reduced'], info)
+ lat_reduced = self.scale_dataset(self.nc['lat_reduced'], {})
+ lon_reduced = self.scale_dataset(self.nc['lon_reduced'], {})
shape = (self.nc['y'].shape[0], self.nc['x'].shape[0])
cols_full = np.arange(shape[1])
@@ -247,10 +277,9 @@ def upsample_geolocation(self, dsid, info):
(rows_full, cols_full))
lons, lats = satint.interpolate()
- self.cache['lon'] = xr.DataArray(lons, attrs=lon_reduced.attrs, dims=['y', 'x'])
- self.cache['lat'] = xr.DataArray(lats, attrs=lat_reduced.attrs, dims=['y', 'x'])
-
- return
+ lon = xr.DataArray(lons, attrs=lon_reduced.attrs, dims=['y', 'x'])
+ lat = xr.DataArray(lats, attrs=lat_reduced.attrs, dims=['y', 'x'])
+ return lon, lat
def get_area_def(self, dsid):
"""Get the area definition of the datasets in the file.
diff --git a/satpy/readers/oceancolorcci_l3_nc.py b/satpy/readers/oceancolorcci_l3_nc.py
new file mode 100644
index 0000000000..9754fb020f
--- /dev/null
+++ b/satpy/readers/oceancolorcci_l3_nc.py
@@ -0,0 +1,116 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2022 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+
+"""Reader for files produced by ESA's Ocean Color CCI project.
+
+This reader currently supports the lat/lon gridded products and does not yet support the
+products on a sinusoidal grid. The products on each of the composite periods (1, 5 and 8 day plus monthly)
+are supported and both the merged product files (OC_PRODUCTS) and single product (RRS, CHLOR_A, IOP, K_490) are
+supported.
+"""
+import logging
+from datetime import datetime
+
+import dask.array as da
+import numpy as np
+from pyresample import geometry
+
+from satpy.readers.netcdf_utils import NetCDF4FileHandler
+
+logger = logging.getLogger(__name__)
+
+
+class OCCCIFileHandler(NetCDF4FileHandler):
+ """File handler for Ocean Color CCI netCDF files."""
+
+ @staticmethod
+ def _parse_datetime(datestr):
+ """Parse datetime."""
+ return datetime.strptime(datestr, "%Y%m%d%H%MZ")
+
+ @property
+ def start_time(self):
+ """Get the start time."""
+ return self._parse_datetime(self['/attr/time_coverage_start'])
+
+ @property
+ def end_time(self):
+ """Get the end time."""
+ return self._parse_datetime(self['/attr/time_coverage_end'])
+
+ @property
+ def composite_period(self):
+ """Determine composite period from filename information."""
+ comp1 = self.filename_info['composite_period_1']
+ comp2 = self.filename_info['composite_period_2']
+ if comp2 == 'MONTHLY' and comp1 == "1M":
+ return 'monthly'
+ elif comp1 == '1D':
+ return 'daily'
+ elif comp1 == '5D':
+ return '5-day'
+ elif comp1 == '8D':
+ return '8-day'
+ else:
+ raise ValueError(f"Unknown data compositing period: {comp1}_{comp2}")
+
+ def _update_attrs(self, dataset, dataset_info):
+ """Update dataset attributes."""
+ dataset.attrs.update(self[dataset_info['nc_key']].attrs)
+ dataset.attrs.update(dataset_info)
+ dataset.attrs['sensor'] = 'merged'
+ dataset.attrs['composite_period'] = self.composite_period
+ # remove attributes from original file which don't apply anymore
+ dataset.attrs.pop("nc_key")
+
+ def get_dataset(self, dataset_id, ds_info):
+ """Get dataset."""
+ dataset = da.squeeze(self[ds_info['nc_key']])
+ if '_FillValue' in dataset.attrs:
+ dataset.data = da.where(dataset.data == dataset.attrs['_FillValue'], np.nan, dataset.data)
+ self._update_attrs(dataset, ds_info)
+ return dataset
+
+ def get_area_def(self, dsid):
+ """Get the area definition based on information in file.
+
+ There is no area definition in the file itself, so we have to compute it
+ from the metadata, which specifies the area extent and pixel resolution.
+ """
+ proj_param = 'EPSG:4326'
+
+ lon_res = float(self['/attr/geospatial_lon_resolution'])
+ lat_res = float(self['/attr/geospatial_lat_resolution'])
+
+ min_lon = self['/attr/geospatial_lon_min']
+ max_lon = self['/attr/geospatial_lon_max']
+ min_lat = self['/attr/geospatial_lat_min']
+ max_lat = self['/attr/geospatial_lat_max']
+
+ area_extent = (min_lon, min_lat, max_lon, max_lat)
+ lon_size = np.round((max_lon - min_lon) / lon_res).astype(int)
+ lat_size = np.round((max_lat - min_lat) / lat_res).astype(int)
+
+ area = geometry.AreaDefinition('gridded_occci',
+ 'Full globe gridded area',
+ 'longlat',
+ proj_param,
+ lon_size,
+ lat_size,
+ area_extent)
+ return area
diff --git a/satpy/readers/olci_nc.py b/satpy/readers/olci_nc.py
index 04632b7a61..ae0bcd8366 100644
--- a/satpy/readers/olci_nc.py
+++ b/satpy/readers/olci_nc.py
@@ -48,12 +48,11 @@
import xarray as xr
from satpy import CHUNK_SIZE
+from satpy._compat import cached_property
from satpy.readers import open_file_or_filename
from satpy.readers.file_handlers import BaseFileHandler
from satpy.utils import angle2xyz, xyz2angle
-from satpy._compat import cached_property
-
logger = logging.getLogger(__name__)
PLATFORM_NAMES = {'S3A': 'Sentinel-3A',
@@ -97,9 +96,13 @@ def __getitem__(self, item):
class NCOLCIBase(BaseFileHandler):
"""The OLCI reader base."""
- def __init__(self, filename, filename_info, filetype_info, engine=None):
+ rows_name = "rows"
+ cols_name = "columns"
+
+ def __init__(self, filename, filename_info, filetype_info,
+ engine=None):
"""Init the olci reader base."""
- super(NCOLCIBase, self).__init__(filename, filename_info, filetype_info)
+ super().__init__(filename, filename_info, filetype_info)
self._engine = engine
self._start_time = filename_info['start_time']
self._end_time = filename_info['end_time']
@@ -116,9 +119,9 @@ def nc(self):
decode_cf=True,
mask_and_scale=True,
engine=self._engine,
- chunks={'columns': CHUNK_SIZE,
- 'rows': CHUNK_SIZE})
- return dataset.rename({'columns': 'x', 'rows': 'y'})
+ chunks={self.cols_name: CHUNK_SIZE,
+ self.rows_name: CHUNK_SIZE})
+ return dataset.rename({self.cols_name: 'x', self.rows_name: 'y'})
@property
def start_time(self):
@@ -139,29 +142,24 @@ def get_dataset(self, key, info):
def __del__(self):
"""Close the NetCDF file that may still be open."""
- with suppress(IOError, OSError, AttributeError):
+ with suppress(IOError, OSError, AttributeError, TypeError):
self.nc.close()
class NCOLCICal(NCOLCIBase):
"""Dummy class for calibration."""
- pass
-
class NCOLCIGeo(NCOLCIBase):
"""Dummy class for navigation."""
- pass
-
class NCOLCIChannelBase(NCOLCIBase):
"""Base class for channel reading."""
def __init__(self, filename, filename_info, filetype_info, engine=None):
"""Init the file handler."""
- super(NCOLCIChannelBase, self).__init__(filename, filename_info, filetype_info)
-
+ super().__init__(filename, filename_info, filetype_info, engine)
self.channel = filename_info.get('dataset_name')
self.reflectance_prefix = 'Oa'
self.reflectance_suffix = '_reflectance'
@@ -172,8 +170,7 @@ class NCOLCI1B(NCOLCIChannelBase):
def __init__(self, filename, filename_info, filetype_info, cal, engine=None):
"""Init the file handler."""
- super(NCOLCI1B, self).__init__(filename, filename_info,
- filetype_info)
+ super().__init__(filename, filename_info, filetype_info, engine)
self.cal = cal.nc
@staticmethod
@@ -242,32 +239,18 @@ def getbitmask(self, wqsf, items=None):
return reduce(np.logical_or, [bflags[item] for item in items])
-class NCOLCILowResData(BaseFileHandler):
+class NCOLCILowResData(NCOLCIBase):
"""Handler for low resolution data."""
- def __init__(self, filename, filename_info, filetype_info, engine=None):
- """Init the file handler."""
- super(NCOLCILowResData, self).__init__(filename, filename_info, filetype_info)
- self.nc = None
- # TODO: get metadata from the manifest file (xfdumanifest.xml)
- self.platform_name = PLATFORM_NAMES[filename_info['mission_id']]
- self.sensor = 'olci'
- self.cache = {}
- self.engine = engine
-
- def _open_dataset(self):
- if self.nc is None:
- self.nc = xr.open_dataset(self.filename,
- decode_cf=True,
- mask_and_scale=True,
- engine=self.engine,
- chunks={'tie_columns': CHUNK_SIZE,
- 'tie_rows': CHUNK_SIZE})
-
- self.nc = self.nc.rename({'tie_columns': 'x', 'tie_rows': 'y'})
+ rows_name = "tie_rows"
+ cols_name = "tie_columns"
- self.l_step = self.nc.attrs['al_subsampling_factor']
- self.c_step = self.nc.attrs['ac_subsampling_factor']
+ def __init__(self, filename, filename_info, filetype_info,
+ engine=None):
+ """Init the file handler."""
+ super().__init__(filename, filename_info, filetype_info, engine)
+ self.l_step = self.nc.attrs['al_subsampling_factor']
+ self.c_step = self.nc.attrs['ac_subsampling_factor']
def _do_interpolate(self, data):
@@ -293,16 +276,10 @@ def _do_interpolate(self, data):
return [xr.DataArray(da.from_array(x, chunks=(CHUNK_SIZE, CHUNK_SIZE)),
dims=['y', 'x']) for x in int_data]
+ @property
def _need_interpolation(self):
return (self.c_step != 1 or self.l_step != 1)
- def __del__(self):
- """Close the NetCDF file that may still be open."""
- try:
- self.nc.close()
- except (OSError, AttributeError):
- pass
-
class NCOLCIAngles(NCOLCILowResData):
"""File handler for the OLCI angles."""
@@ -317,49 +294,22 @@ def get_dataset(self, key, info):
if key['name'] not in self.datasets:
return
- self._open_dataset()
-
logger.debug('Reading %s.', key['name'])
- if self._need_interpolation() and self.cache.get(key['name']) is None:
-
+ if self._need_interpolation:
if key['name'].startswith('satellite'):
- zen = self.nc[self.datasets['satellite_zenith_angle']]
- zattrs = zen.attrs
- azi = self.nc[self.datasets['satellite_azimuth_angle']]
- aattrs = azi.attrs
+ azi, zen = self.satellite_angles
elif key['name'].startswith('solar'):
- zen = self.nc[self.datasets['solar_zenith_angle']]
- zattrs = zen.attrs
- azi = self.nc[self.datasets['solar_azimuth_angle']]
- aattrs = azi.attrs
+ azi, zen = self.sun_angles
else:
raise NotImplementedError("Don't know how to read " + key['name'])
- x, y, z = angle2xyz(azi, zen)
-
- x, y, z = self._do_interpolate((x, y, z))
-
- azi, zen = xyz2angle(x, y, z)
- azi.attrs = aattrs
- zen.attrs = zattrs
-
if 'zenith' in key['name']:
values = zen
elif 'azimuth' in key['name']:
values = azi
else:
raise NotImplementedError("Don't know how to read " + key['name'])
-
- if key['name'].startswith('satellite'):
- self.cache['satellite_zenith_angle'] = zen
- self.cache['satellite_azimuth_angle'] = azi
- elif key['name'].startswith('solar'):
- self.cache['solar_zenith_angle'] = zen
- self.cache['solar_azimuth_angle'] = azi
-
- elif key['name'] in self.cache:
- values = self.cache[key['name']]
else:
values = self.nc[self.datasets[key['name']]]
@@ -369,12 +319,31 @@ def get_dataset(self, key, info):
values.attrs.update(key.to_dict())
return values
- def __del__(self):
- """Close the NetCDF file that may still be open."""
- try:
- self.nc.close()
- except (OSError, AttributeError):
- pass
+ @cached_property
+ def sun_angles(self):
+ """Return the sun angles."""
+ zen = self.nc[self.datasets['solar_zenith_angle']]
+ azi = self.nc[self.datasets['solar_azimuth_angle']]
+ azi, zen = self._interpolate_angles(azi, zen)
+ return azi, zen
+
+ @cached_property
+ def satellite_angles(self):
+ """Return the satellite angles."""
+ zen = self.nc[self.datasets['satellite_zenith_angle']]
+ azi = self.nc[self.datasets['satellite_azimuth_angle']]
+ azi, zen = self._interpolate_angles(azi, zen)
+ return azi, zen
+
+ def _interpolate_angles(self, azi, zen):
+ aattrs = azi.attrs
+ zattrs = zen.attrs
+ x, y, z = angle2xyz(azi, zen)
+ x, y, z = self._do_interpolate((x, y, z))
+ azi, zen = xyz2angle(x, y, z)
+ azi.attrs = aattrs
+ zen.attrs = zattrs
+ return azi, zen
class NCOLCIMeteo(NCOLCILowResData):
@@ -382,6 +351,12 @@ class NCOLCIMeteo(NCOLCILowResData):
datasets = ['humidity', 'sea_level_pressure', 'total_columnar_water_vapour', 'total_ozone']
+ def __init__(self, filename, filename_info, filetype_info,
+ engine=None):
+ """Init the file handler."""
+ super().__init__(filename, filename_info, filetype_info, engine)
+ self.cache = {}
+
# TODO: the following depends on more than columns, rows
# float atmospheric_temperature_profile(tie_rows, tie_columns, tie_pressure_levels) ;
# float horizontal_wind(tie_rows, tie_columns, wind_vectors) ;
@@ -392,11 +367,9 @@ def get_dataset(self, key, info):
if key['name'] not in self.datasets:
return
- self._open_dataset()
-
logger.debug('Reading %s.', key['name'])
- if self._need_interpolation() and self.cache.get(key['name']) is None:
+ if self._need_interpolation and self.cache.get(key['name']) is None:
data = self.nc[key['name']]
diff --git a/satpy/readers/omps_edr.py b/satpy/readers/omps_edr.py
index 55b591c0ca..9de71d4efa 100644
--- a/satpy/readers/omps_edr.py
+++ b/satpy/readers/omps_edr.py
@@ -16,9 +16,10 @@
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
"""Interface to OMPS EDR format."""
+import logging
from datetime import datetime, timedelta
+
import numpy as np
-import logging
from satpy.readers.hdf5_utils import HDF5FileHandler
@@ -77,7 +78,7 @@ def get_metadata(self, dataset_id, ds_info):
file_units = self.get(var_path + '/attr/units', self.get(var_path + '/attr/Units'))
if file_units is None:
raise KeyError("File variable '{}' has no units attribute".format(var_path))
- elif file_units == 'deg':
+ if file_units == 'deg':
file_units = 'degrees'
elif file_units == 'Unitless':
file_units = '1'
diff --git a/satpy/readers/pmw_channels_definitions.py b/satpy/readers/pmw_channels_definitions.py
new file mode 100644
index 0000000000..f1c10b8459
--- /dev/null
+++ b/satpy/readers/pmw_channels_definitions.py
@@ -0,0 +1,407 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2022 Satpy Developers
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+
+"""Passive Microwave instrument and channel specific features."""
+
+import numbers
+from contextlib import suppress
+from typing import NamedTuple
+
+import numpy as np
+
+
+class FrequencyBandBaseArithmetics:
+ """Mixin class with basic frequency comparison operations."""
+
+ def __lt__(self, other):
+ """Compare to another frequency."""
+ if other is None:
+ return False
+ return super().__lt__(other)
+
+ def __gt__(self, other):
+ """Compare to another frequency."""
+ if other is None:
+ return True
+ return super().__gt__(other)
+
+ @classmethod
+ def convert(cls, frq):
+ """Convert `frq` to this type if possible."""
+ if isinstance(frq, dict):
+ return cls(**frq)
+ return frq
+
+
+class FrequencyQuadrupleSideBandBase(NamedTuple):
+ """Base class for a frequency quadruple side band.
+
+ Frequency Quadruple Side Band is supposed to describe the special type of
+ bands commonly used in temperature sounding from Passive Microwave
+ Sensors. When the absorption band being observed is symmetrical it is
+ advantageous (giving better NeDT) to sense in a band both right and left of
+ the central absorption frequency. But to avoid (CO2) absorption lines
+ symmetrically positioned on each side of the main absorption band it is
+ common to split the side bands in two 'side-side' bands.
+
+ This is needed because of this bug: https://bugs.python.org/issue41629
+
+ """
+
+ central: float
+ side: float
+ sideside: float
+ bandwidth: float
+ unit: str = "GHz"
+
+
+class FrequencyQuadrupleSideBand(FrequencyBandBaseArithmetics, FrequencyQuadrupleSideBandBase):
+ """The frequency quadruple side band class.
+
+ The elements of the quadruple-side-band type frequency band are the
+ central frquency, the relative (main) side band frequency (relative to the
+ center - left and right), the sub-side band frequency (relative to the
+ offset side-band(s)) and their bandwidths. Optionally a unit (defaults to
+ GHz) may be specified. No clever unit conversion is done here, it's just
+ used for checking that two ranges are comparable.
+
+ Frequency Quadruple Side Band is supposed to describe the special type of
+ bands commonly used in temperature sounding from Passive Microwave
+ Sensors. When the absorption band being observed is symmetrical it is
+ advantageous (giving better NeDT) to sense in a band both right and left of
+ the central absorption frequency. But to avoid (CO2) absorption lines
+ symmetrically positioned on each side of the main absorption band it is
+ common to split the side bands in two 'side-side' bands.
+
+ """
+
+ def __eq__(self, other):
+ """Return if two channel frequencies are equal.
+
+ Args:
+ other (tuple or scalar): (central frq, side band frq, side-side band frq,
+ and band width frq) or scalar frq
+
+ Return:
+ True if other is a scalar and min <= other <= max, or if other is a
+ tuple equal to self, or if other is a number contained by self.
+ False otherwise.
+
+ """
+ if other is None:
+ return False
+ if isinstance(other, numbers.Number):
+ return other in self
+ if isinstance(other, (tuple, list)) and len(other) == 4:
+ return other in self
+ return super().__eq__(other)
+
+ def __str__(self):
+ """Format for print out."""
+ return f"central={self.central} {self.unit} ±{self.side} ±{self.sideside} width={self.bandwidth} {self.unit}"
+
+ def __hash__(self):
+ """Hash this tuple."""
+ return tuple.__hash__(self)
+
+ def __contains__(self, other):
+ """Check if this quadruple-side-band 'contains' *other*."""
+ if other is None:
+ return False
+
+ # The four centrals:
+ central_left_left = self.central - self.side - self.sideside
+ central_left_right = self.central - self.side + self.sideside
+ central_right_left = self.central + self.side - self.sideside
+ central_right_right = self.central + self.side + self.sideside
+
+ four_centrals = [central_left_left, central_left_right,
+ central_right_left, central_right_right]
+ if isinstance(other, numbers.Number):
+ for central in four_centrals:
+ if _is_inside_interval(other, central, self.bandwidth):
+ return True
+
+ return False
+
+ if isinstance(other, (tuple, list)) and len(other) == 5:
+ raise NotImplementedError("Can't check if one frequency quadruple side band is contained in another.")
+
+ with suppress(AttributeError):
+ if self.unit != other.unit:
+ raise NotImplementedError("Can't compare frequency ranges with different units.")
+
+ return False
+
+ def distance(self, value):
+ """Get the distance to the quadruple side band.
+
+ Determining the distance in frequency space between two quadruple side
+ bands can be quite ambiguous, as such bands are in effect a set of 4
+ narrow bands, two on each side of the main absorption band, and on each
+ side, one on each side of the secondary absorption lines. To keep it as
+ simple as possible we have until further decided to define the distance
+ between such two bands to infinity if they are determined to be equal.
+
+ If the frequency entered is a single value, the distance will be the
+ minimum of the distances to the two outermost sides of the quadruple
+ side band.
+
+ If the frequency entered is a tuple or list and the two quadruple
+ frequency bands are contained in each other (equal) the distance will
+ always be zero.
+
+ """
+ left_left = self.central - self.side - self.sideside
+ right_right = self.central + self.side + self.sideside
+
+ if self == value:
+ try:
+ left_side_dist = abs(value.central - value.side - value.sideside - left_left)
+ right_side_dist = abs(value.central + value.side + value.sideside - right_right)
+ except AttributeError:
+ left_side_dist = abs(value - left_left)
+ right_side_dist = abs(value - right_right)
+
+ return min(left_side_dist, right_side_dist)
+ else:
+ return np.inf
+
+
+class FrequencyDoubleSideBandBase(NamedTuple):
+ """Base class for a frequency double side band.
+
+ Frequency Double Side Band is supposed to describe the special type of bands
+ commonly used in humidty sounding from Passive Microwave Sensors. When the
+ absorption band being observed is symmetrical it is advantageous (giving
+ better NeDT) to sense in a band both right and left of the central
+ absorption frequency.
+
+ This is needed because of this bug: https://bugs.python.org/issue41629
+
+ """
+
+ central: float
+ side: float
+ bandwidth: float
+ unit: str = "GHz"
+
+
+class FrequencyDoubleSideBand(FrequencyBandBaseArithmetics, FrequencyDoubleSideBandBase):
+ """The frequency double side band class.
+
+ The elements of the double-side-band type frequency band are the central
+ frquency, the relative side band frequency (relative to the center - left
+ and right) and their bandwidths, and optionally a unit (defaults to
+ GHz). No clever unit conversion is done here, it's just used for checking
+ that two ranges are comparable.
+
+ Frequency Double Side Band is supposed to describe the special type of bands
+ commonly used in humidty sounding from Passive Microwave Sensors. When the
+ absorption band being observed is symmetrical it is advantageous (giving
+ better NeDT) to sense in a band both right and left of the central
+ absorption frequency.
+
+ """
+
+ def __eq__(self, other):
+ """Return if two channel frequencies are equal.
+
+ Args:
+ other (tuple or scalar): (central frq, side band frq and band width frq) or scalar frq
+
+ Return:
+ True if other is a scalar and min <= other <= max, or if other is a
+ tuple equal to self, or if other is a number contained by self.
+ False otherwise.
+
+ """
+ if other is None:
+ return False
+ if isinstance(other, numbers.Number):
+ return other in self
+ if isinstance(other, (tuple, list)) and len(other) == 3:
+ return other in self
+ return super().__eq__(other)
+
+ def __str__(self):
+ """Format for print out."""
+ return f"central={self.central} {self.unit} ±{self.side} width={self.bandwidth} {self.unit}"
+
+ def __hash__(self):
+ """Hash this tuple."""
+ return tuple.__hash__(self)
+
+ def __contains__(self, other):
+ """Check if this double-side-band 'contains' *other*."""
+ if other is None:
+ return False
+
+ leftside = self.central - self.side
+ rightside = self.central + self.side
+
+ if isinstance(other, numbers.Number):
+ if self._check_band_contains_other((leftside, self.bandwidth), (other, 0)):
+ return True
+ return self._check_band_contains_other((rightside, self.bandwidth), (other, 0))
+
+ other_leftside, other_rightside, other_bandwidth = 0, 0, 0
+ if isinstance(other, (tuple, list)) and len(other) == 3:
+ other_leftside = other[0] - other[1]
+ other_rightside = other[0] + other[1]
+ other_bandwidth = other[2]
+ else:
+ with suppress(AttributeError):
+ if self.unit != other.unit:
+ raise NotImplementedError("Can't compare frequency ranges with different units.")
+ other_leftside = other.central - other.side
+ other_rightside = other.central + other.side
+ other_bandwidth = other.bandwidth
+
+ if self._check_band_contains_other((leftside, self.bandwidth), (other_leftside, other_bandwidth)):
+ return True
+ return self._check_band_contains_other((rightside, self.bandwidth), (other_rightside, other_bandwidth))
+
+ @staticmethod
+ def _check_band_contains_other(band, other_band):
+ """Check that a band contains another band.
+
+ A band is here defined as a tuple of a central frequency and a bandwidth.
+ """
+ central1, width1 = band
+ central_other, width_other = other_band
+
+ if ((central1 - width1/2. <= central_other - width_other/2.) and
+ (central1 + width1/2. >= central_other + width_other/2.)):
+ return True
+ return False
+
+ def distance(self, value):
+ """Get the distance to the double side band.
+
+ Determining the distance in frequency space between two double side
+ bands can be quite ambiguous, as such bands are in effect a set of 2
+ narrow bands, one on each side of the absorption line. To keep it
+ as simple as possible we have until further decided to set the
+ distance between such two bands to infitiy if neither of them are
+ contained in the other.
+
+ If the frequency entered is a single value and this frequency falls
+ inside one of the side bands, the distance will be the minimum of the
+ distances to the two outermost sides of the double side band. However,
+ is such a single frequency value falls outside one of the two side
+ bands, the distance will be set to infitiy.
+
+ If the frequency entered is a tuple the distance will either be 0 (if
+ one is containde in the other) or infinity.
+ """
+ if self == value:
+ try:
+ left_side_dist = abs(value.central - value.side - (self.central - self.side))
+ right_side_dist = abs(value.central + value.side - (self.central + self.side))
+ except AttributeError:
+ if isinstance(value, (tuple, list)):
+ return abs((value[0] - value[1]) - (self.central - self.side))
+
+ left_side_dist = abs(value - (self.central - self.side))
+ right_side_dist = abs(value - (self.central + self.side))
+
+ return min(left_side_dist, right_side_dist)
+ else:
+ return np.inf
+
+
+class FrequencyRangeBase(NamedTuple):
+ """Base class for frequency ranges.
+
+ This is needed because of this bug: https://bugs.python.org/issue41629
+ """
+
+ central: float
+ bandwidth: float
+ unit: str = "GHz"
+
+
+class FrequencyRange(FrequencyBandBaseArithmetics, FrequencyRangeBase):
+ """The Frequency range class.
+
+ The elements of the range are central and bandwidth values, and optionally
+ a unit (defaults to GHz). No clever unit conversion is done here, it's just
+ used for checking that two ranges are comparable.
+
+ This type is used for passive microwave sensors.
+
+ """
+
+ def __eq__(self, other):
+ """Check wether two channel frequencies are equal.
+
+ Args:
+ other (tuple or scalar): (central frq, band width frq) or scalar frq
+
+ Return:
+ True if other is a scalar and min <= other <= max, or if other is a
+ tuple equal to self, or if other is a number contained by self.
+ False otherwise.
+
+ """
+ if other is None:
+ return False
+ if isinstance(other, numbers.Number):
+ return other in self
+ if isinstance(other, (tuple, list)) and len(other) == 2:
+ return self[:2] == other
+ return super().__eq__(other)
+
+ def __str__(self):
+ """Format for print out."""
+ return f"central={self.central} {self.unit} width={self.bandwidth} {self.unit}"
+
+ def __hash__(self):
+ """Hash this tuple."""
+ return tuple.__hash__(self)
+
+ def __contains__(self, other):
+ """Check if this range contains *other*."""
+ if other is None:
+ return False
+ if isinstance(other, numbers.Number):
+ return self.central - self.bandwidth/2. <= other <= self.central + self.bandwidth/2.
+
+ with suppress(AttributeError):
+ if self.unit != other.unit:
+ raise NotImplementedError("Can't compare frequency ranges with different units.")
+ return (self.central - self.bandwidth/2. <= other.central - other.bandwidth/2. and
+ self.central + self.bandwidth/2. >= other.central + other.bandwidth/2.)
+ return False
+
+ def distance(self, value):
+ """Get the distance from value."""
+ if self == value:
+ try:
+ return abs(value.central - self.central)
+ except AttributeError:
+ if isinstance(value, (tuple, list)):
+ return abs(value[0] - self.central)
+ return abs(value - self.central)
+ else:
+ return np.inf
+
+
+def _is_inside_interval(value, central, width):
+ return central - width/2 <= value <= central + width/2
diff --git a/satpy/readers/safe_sar_l2_ocn.py b/satpy/readers/safe_sar_l2_ocn.py
index 292794078b..927a136c9c 100644
--- a/satpy/readers/safe_sar_l2_ocn.py
+++ b/satpy/readers/safe_sar_l2_ocn.py
@@ -27,12 +27,12 @@
import logging
-from satpy.readers.file_handlers import BaseFileHandler
-from satpy import CHUNK_SIZE
-
import numpy as np
import xarray as xr
+from satpy import CHUNK_SIZE
+from satpy.readers.file_handlers import BaseFileHandler
+
logger = logging.getLogger(__name__)
@@ -80,23 +80,7 @@ def get_dataset(self, key, info):
res = self.lons
res.attrs = info
else:
- res = self.nc[key['name']]
- if key['name'] in ['owiHs', 'owiWl', 'owiDirmet']:
- res = xr.DataArray(res, dims=['y', 'x', 'oswPartitions'])
- elif key['name'] in ['owiNrcs', 'owiNesz', 'owiNrcsNeszCorr']:
- res = xr.DataArray(res, dims=['y', 'x', 'oswPolarisation'])
- elif key['name'] in ['owiPolarisationName']:
- res = xr.DataArray(res, dims=['owiPolarisation'])
- elif key['name'] in ['owiCalConstObsi', 'owiCalConstInci']:
- res = xr.DataArray(res, dims=['owiIncSize'])
- elif key['name'].startswith('owi'):
- res = xr.DataArray(res, dims=['y', 'x'])
- else:
- res = xr.DataArray(res, dims=['y', 'x'])
- res.attrs.update(info)
- if '_FillValue' in res.attrs:
- res = res.where(res != res.attrs['_FillValue'])
- res.attrs['_FillValue'] = np.nan
+ res = self._get_data_channels(key, info)
if 'missionName' in self.nc.attrs:
res.attrs.update({'platform_name': self.nc.attrs['missionName']})
@@ -109,6 +93,26 @@ def get_dataset(self, key, info):
return res
+ def _get_data_channels(self, key, info):
+ res = self.nc[key['name']]
+ if key['name'] in ['owiHs', 'owiWl', 'owiDirmet']:
+ res = xr.DataArray(res, dims=['y', 'x', 'oswPartitions'])
+ elif key['name'] in ['owiNrcs', 'owiNesz', 'owiNrcsNeszCorr']:
+ res = xr.DataArray(res, dims=['y', 'x', 'oswPolarisation'])
+ elif key['name'] in ['owiPolarisationName']:
+ res = xr.DataArray(res, dims=['owiPolarisation'])
+ elif key['name'] in ['owiCalConstObsi', 'owiCalConstInci']:
+ res = xr.DataArray(res, dims=['owiIncSize'])
+ elif key['name'].startswith('owi'):
+ res = xr.DataArray(res, dims=['y', 'x'])
+ else:
+ res = xr.DataArray(res, dims=['y', 'x'])
+ res.attrs.update(info)
+ if '_FillValue' in res.attrs:
+ res = res.where(res != res.attrs['_FillValue'])
+ res.attrs['_FillValue'] = np.nan
+ return res
+
@property
def start_time(self):
"""Product start_time, parsed from the measurement file name."""
diff --git a/satpy/readers/sar_c_safe.py b/satpy/readers/sar_c_safe.py
index 659fb5c424..6353a5992d 100644
--- a/satpy/readers/sar_c_safe.py
+++ b/satpy/readers/sar_c_safe.py
@@ -34,11 +34,11 @@
"""
+import functools
import logging
-import xml.etree.ElementTree as ET
-from functools import lru_cache
from threading import Lock
+import defusedxml.ElementTree as ET
import numpy as np
import rasterio
import rioxarray
@@ -80,7 +80,7 @@ def _dictify(r):
def _get_calibration_name(calibration):
"""Get the proper calibration name."""
- calibration_name = calibration.name or 'gamma'
+ calibration_name = getattr(calibration, "name", calibration) or 'gamma'
if calibration_name == 'sigma_nought':
calibration_name = 'sigmaNought'
elif calibration_name == 'beta_nought':
@@ -108,37 +108,102 @@ def __init__(self, filename, filename_info, filetype_info,
self._image_shape = (self.hdr['product']['imageAnnotation']['imageInformation']['numberOfLines'],
self.hdr['product']['imageAnnotation']['imageInformation']['numberOfSamples'])
- self.azimuth_noise_reader = AzimuthNoiseReader(self.filename, self._image_shape)
-
def get_metadata(self):
"""Convert the xml metadata to dict."""
return dictify(self.root.getroot())
- def get_dataset(self, key, info):
+ @property
+ def start_time(self):
+ """Get the start time."""
+ return self._start_time
+
+ @property
+ def end_time(self):
+ """Get the end time."""
+ return self._end_time
+
+
+class SAFEXMLAnnotation(SAFEXML):
+ """XML file reader for the SAFE format, Annotation file."""
+
+ def __init__(self, filename, filename_info, filetype_info,
+ header_file=None):
+ """Init the XML annotation reader."""
+ super().__init__(filename, filename_info, filetype_info, header_file)
+ self.get_incidence_angle = functools.lru_cache(maxsize=10)(
+ self._get_incidence_angle_uncached
+ )
+
+ def get_dataset(self, key, info, chunks=None):
"""Load a dataset."""
if self._polarization != key["polarization"]:
return
- xml_items = info['xml_item']
- xml_tags = info['xml_tag']
+ if key["name"] == "incidence_angle":
+ return self.get_incidence_angle(chunks=chunks or CHUNK_SIZE)
+
+ def _get_incidence_angle_uncached(self, chunks):
+ """Get the incidence angle array."""
+ incidence_angle = XMLArray(self.root, ".//geolocationGridPoint", "incidenceAngle")
+ return incidence_angle.expand(self._image_shape, chunks=chunks)
- if not isinstance(xml_items, list):
- xml_items = [xml_items]
- xml_tags = [xml_tags]
- for xml_item, xml_tag in zip(xml_items, xml_tags):
- data_items = self.root.findall(".//" + xml_item)
- if not data_items:
- continue
- data, low_res_coords = self.read_xml_array(data_items, xml_tag)
+class SAFEXMLCalibration(SAFEXML):
+ """XML file reader for the SAFE format, Calibration file."""
+
+ def __init__(self, filename, filename_info, filetype_info,
+ header_file=None):
+ """Init the XML calibration reader."""
+ super().__init__(filename, filename_info, filetype_info, header_file)
+ self.get_calibration = functools.lru_cache(maxsize=10)(
+ self._get_calibration_uncached
+ )
+
+ def get_dataset(self, key, info, chunks=None):
+ """Load a dataset."""
+ if self._polarization != key["polarization"]:
+ return
+ if key["name"] == "calibration_constant":
+ return self.get_calibration_constant()
+ return self.get_calibration(key["name"], chunks=chunks or CHUNK_SIZE)
+
+ def get_calibration_constant(self):
+ """Load the calibration constant."""
+ return float(self.root.find('.//absoluteCalibrationConstant').text)
+
+ def _get_calibration_uncached(self, calibration, chunks=None):
+ """Get the calibration array."""
+ calibration_name = _get_calibration_name(calibration)
+ calibration_vector = self._get_calibration_vector(calibration_name, chunks)
+ return calibration_vector
+
+ def _get_calibration_vector(self, calibration_name, chunks):
+ """Get the calibration vector."""
+ calibration_vector = XMLArray(self.root, ".//calibrationVector", calibration_name)
+ return calibration_vector.expand(self._image_shape, chunks=chunks)
+
+
+class SAFEXMLNoise(SAFEXML):
+ """XML file reader for the SAFE format, Noise file."""
+
+ def __init__(self, filename, filename_info, filetype_info,
+ header_file=None):
+ """Init the xml filehandler."""
+ super().__init__(filename, filename_info, filetype_info, header_file)
- if key['name'].endswith('squared'):
- data **= 2
+ self.azimuth_noise_reader = AzimuthNoiseReader(self.root, self._image_shape)
+ self.get_noise_correction = functools.lru_cache(maxsize=10)(
+ self._get_noise_correction_uncached
+ )
- data = self.interpolate_xml_array(data, low_res_coords, data.shape)
+ def get_dataset(self, key, info, chunks=None):
+ """Load a dataset."""
+ if self._polarization != key["polarization"]:
+ return
+ if key["name"] == "noise":
+ return self.get_noise_correction(chunks=chunks or CHUNK_SIZE)
- @lru_cache(maxsize=10)
- def get_noise_correction(self, chunks=None):
+ def _get_noise_correction_uncached(self, chunks=None):
"""Get the noise correction array."""
try:
noise = self.read_legacy_noise(chunks)
@@ -158,38 +223,6 @@ def read_range_noise_array(self, chunks):
range_noise = XMLArray(self.root, ".//noiseRangeVector", "noiseRangeLut")
return range_noise.expand(self._image_shape, chunks)
- @lru_cache(maxsize=10)
- def get_calibration(self, calibration, chunks=None):
- """Get the calibration array."""
- calibration_name = _get_calibration_name(calibration)
- calibration_vector = self._get_calibration_vector(calibration_name, chunks)
- return calibration_vector
-
- def _get_calibration_vector(self, calibration_name, chunks):
- """Get the calibration vector."""
- calibration_vector = XMLArray(self.root, ".//calibrationVector", calibration_name)
- return calibration_vector.expand(self._image_shape, chunks=chunks)
-
- def get_calibration_constant(self):
- """Load the calibration constant."""
- return float(self.root.find('.//absoluteCalibrationConstant').text)
-
- @lru_cache(maxsize=10)
- def get_incidence_angle(self, chunks):
- """Get the incidence angle array."""
- incidence_angle = XMLArray(self.root, ".//geolocationGridPoint", "incidenceAngle")
- return incidence_angle.expand(self._image_shape, chunks=chunks)
-
- @property
- def start_time(self):
- """Get the start time."""
- return self._start_time
-
- @property
- def end_time(self):
- """Get the end time."""
- return self._end_time
-
class AzimuthNoiseReader:
"""Class to parse and read azimuth-noise data.
@@ -214,9 +247,9 @@ class AzimuthNoiseReader:
to be gap-filled with NaNs.
"""
- def __init__(self, filename, shape):
+ def __init__(self, root, shape):
"""Set up the azimuth noise reader."""
- self.root = ET.parse(filename)
+ self.root = root
self.elements = self.root.findall(".//noiseAzimuthVector")
self._image_shape = shape
self.blocks = []
@@ -264,17 +297,21 @@ def _create_dask_slices_from_blocks(self, chunks):
def _create_dask_slice_from_block_line(self, current_line, chunks):
"""Create a dask slice from the blocks at the current line."""
- current_blocks = self._find_blocks_covering_line(current_line)
- current_blocks.sort(key=(lambda x: x.coords['x'][0]))
-
- next_line = min((arr.coords['y'][-1] for arr in current_blocks))
- current_y = np.arange(current_line, next_line + 1)
-
- pieces = [arr.sel(y=current_y) for arr in current_blocks]
+ pieces = self._get_array_pieces_for_current_line(current_line)
dask_pieces = self._get_padded_dask_pieces(pieces, chunks)
new_slice = da.hstack(dask_pieces)
+
return new_slice
+ def _get_array_pieces_for_current_line(self, current_line):
+ """Get the array pieces that cover the current line."""
+ current_blocks = self._find_blocks_covering_line(current_line)
+ current_blocks.sort(key=(lambda x: x.coords['x'][0]))
+ next_line = self._get_next_start_line(current_blocks, current_line)
+ current_y = np.arange(current_line, next_line)
+ pieces = [arr.sel(y=current_y) for arr in current_blocks]
+ return pieces
+
def _find_blocks_covering_line(self, current_line):
"""Find the blocks covering a given line."""
current_blocks = []
@@ -283,30 +320,43 @@ def _find_blocks_covering_line(self, current_line):
current_blocks.append(block)
return current_blocks
+ def _get_next_start_line(self, current_blocks, current_line):
+ next_line = min((arr.coords['y'][-1] for arr in current_blocks)) + 1
+ blocks_starting_soon = [block for block in self.blocks if current_line < block.coords["y"][0] < next_line]
+ if blocks_starting_soon:
+ next_start_line = min((arr.coords["y"][0] for arr in blocks_starting_soon))
+ next_line = min(next_line, next_start_line)
+ return next_line
+
def _get_padded_dask_pieces(self, pieces, chunks):
"""Get the padded pieces of a slice."""
- dask_pieces = [piece.data for piece in pieces]
- self._pad_dask_pieces_before(pieces, dask_pieces, chunks)
- self._pad_dask_pieces_after(pieces, dask_pieces, chunks)
+ pieces = sorted(pieces, key=(lambda x: x.coords['x'][0]))
+ dask_pieces = []
+ previous_x_end = -1
+ piece = pieces[0]
+ next_x_start = piece.coords['x'][0].item()
+ y_shape = len(piece.coords['y'])
+
+ x_shape = (next_x_start - previous_x_end - 1)
+ self._fill_dask_pieces(dask_pieces, (y_shape, x_shape), chunks)
+
+ for i, piece in enumerate(pieces):
+ dask_pieces.append(piece.data)
+ previous_x_end = piece.coords['x'][-1].item()
+ try:
+ next_x_start = pieces[i + 1].coords['x'][0].item()
+ except IndexError:
+ next_x_start = self._image_shape[1]
+
+ x_shape = (next_x_start - previous_x_end - 1)
+ self._fill_dask_pieces(dask_pieces, (y_shape, x_shape), chunks)
+
return dask_pieces
@staticmethod
- def _pad_dask_pieces_before(pieces, dask_pieces, chunks):
- """Pad the dask pieces before."""
- first_x = min(arr.coords['x'][0] for arr in pieces)
- if first_x > 0:
- missing_x = np.arange(first_x)
- missing_y = pieces[0].coords['y']
- new_piece = da.full((len(missing_y), len(missing_x)), np.nan, chunks=chunks)
- dask_pieces.insert(0, new_piece)
-
- def _pad_dask_pieces_after(self, pieces, dask_pieces, chunks):
- """Pad the dask pieces after."""
- last_x = max(arr.coords['x'][-1] for arr in pieces)
- if last_x < self._image_shape[1] - 1:
- missing_x = np.arange(last_x + 1, self._image_shape[1])
- missing_y = pieces[-1].coords['y']
- new_piece = da.full((len(missing_y), len(missing_x)), np.nan, chunks=chunks)
+ def _fill_dask_pieces(dask_pieces, shape, chunks):
+ if shape[1] > 0:
+ new_piece = da.full(shape, np.nan, chunks=chunks)
dask_pieces.append(new_piece)
@@ -462,8 +512,7 @@ def intp(grid_x, grid_y, interpolator):
def interpolate_xarray_linear(xpoints, ypoints, values, shape, chunks=CHUNK_SIZE):
"""Interpolate linearly, generating a dask array."""
- from scipy.interpolate.interpnd import (LinearNDInterpolator,
- _ndim_coords_from_arrays)
+ from scipy.interpolate.interpnd import LinearNDInterpolator, _ndim_coords_from_arrays
if isinstance(chunks, (list, tuple)):
vchunks, hchunks = chunks
@@ -511,6 +560,9 @@ def __init__(self, filename, filename_info, filetype_info, calfh, noisefh, annot
self.read_lock = Lock()
self.filehandle = rasterio.open(self.filename, 'r', sharing=False)
+ self.get_lonlatalts = functools.lru_cache(maxsize=2)(
+ self._get_lonlatalts_uncached
+ )
def get_dataset(self, key, info):
"""Load a dataset."""
@@ -583,8 +635,7 @@ def _calibrate(self, dn, chunks, key):
data = ((dn + cal_constant) / (cal ** 2)).clip(min=0)
return data
- @lru_cache(maxsize=2)
- def get_lonlatalts(self):
+ def _get_lonlatalts_uncached(self):
"""Obtain GCPs and construct latitude and longitude arrays.
Args:
diff --git a/satpy/readers/satpy_cf_nc.py b/satpy/readers/satpy_cf_nc.py
index 23ea136bc2..202e81f0ac 100644
--- a/satpy/readers/satpy_cf_nc.py
+++ b/satpy/readers/satpy_cf_nc.py
@@ -179,10 +179,11 @@
"""
import itertools
-import logging
import json
+import logging
import xarray as xr
+from pyresample import AreaDefinition
from satpy import CHUNK_SIZE
from satpy.dataset.dataid import WavelengthRange
@@ -210,16 +211,16 @@ def end_time(self):
"""Get end time."""
return self.filename_info.get('end_time', self.start_time)
- @property
- def sensor(self):
- """Get sensor."""
- nc = xr.open_dataset(self.filename, engine=self.engine)
- return nc.attrs['instrument'].replace('/', '-').lower()
-
@property
def sensor_names(self):
"""Get sensor set."""
- return {self.sensor}
+ sensors = set()
+ for _, ds_info in self.available_datasets():
+ try:
+ sensors.add(ds_info["sensor"])
+ except KeyError:
+ continue
+ return sensors
def available_datasets(self, configured_datasets=None):
"""Add information of available datasets."""
@@ -294,8 +295,21 @@ def get_dataset(self, ds_id, ds_info):
data.attrs.update(nc.attrs) # For now add global attributes to all datasets
if "orbital_parameters" in data.attrs:
data.attrs["orbital_parameters"] = _str2dict(data.attrs["orbital_parameters"])
+
return data
+ def get_area_def(self, dataset_id):
+ """Get area definition from CF complient netcdf."""
+ try:
+ area = AreaDefinition.from_cf(self.filename)
+ return area
+ except ValueError:
+ # No CF compliant projection information was found in the netcdf file or
+ # file contains 2D lat/lon arrays. To fall back to generating a SwathDefinition
+ # with the yaml_reader NotImplementedError is raised.
+ logger.debug("No AreaDefinition to load from nc file. Falling back to SwathDefinition.")
+ raise NotImplementedError
+
def _str2dict(val):
"""Convert string to dictionary."""
diff --git a/satpy/readers/scatsat1_l2b.py b/satpy/readers/scatsat1_l2b.py
index 6765a1dc10..e507cdb2bc 100644
--- a/satpy/readers/scatsat1_l2b.py
+++ b/satpy/readers/scatsat1_l2b.py
@@ -14,10 +14,11 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
-"""ScatSat-1 L2B Reader, distributed by Eumetsat in HDF5 format
-"""
+# type: ignore
+"""ScatSat-1 L2B Reader, distributed by Eumetsat in HDF5 format."""
from datetime import datetime
+
import h5py
from satpy.dataset import Dataset
diff --git a/satpy/readers/scmi.py b/satpy/readers/scmi.py
index 042f7e5246..c8cdb65cf5 100644
--- a/satpy/readers/scmi.py
+++ b/satpy/readers/scmi.py
@@ -41,15 +41,15 @@
"""
import logging
+import os
from datetime import datetime
-import os
import numpy as np
import xarray as xr
-
from pyresample import geometry
-from satpy.readers.file_handlers import BaseFileHandler
+
from satpy import CHUNK_SIZE
+from satpy.readers.file_handlers import BaseFileHandler
# NetCDF doesn't support multi-threaded reading, trick it by opening
# as one whole chunk then split it up before we do any calculations
@@ -157,9 +157,11 @@ def get_dataset(self, key, info):
'sensor': data.attrs.get('sensor', self.sensor),
})
if 'satellite_longitude' in self.nc.attrs:
- data.attrs['satellite_longitude'] = self.nc.attrs['satellite_longitude']
- data.attrs['satellite_latitude'] = self.nc.attrs['satellite_latitude']
- data.attrs['satellite_altitude'] = self.nc.attrs['satellite_altitude']
+ data.attrs['orbital_parameters'] = {
+ 'projection_longitude': self.nc.attrs['satellite_longitude'],
+ 'projection_latitude': self.nc.attrs['satellite_latitude'],
+ 'projection_altitude': self.nc.attrs['satellite_altitude'],
+ }
scene_id = self.nc.attrs.get('scene_id')
if scene_id is not None:
diff --git a/satpy/readers/seadas_l2.py b/satpy/readers/seadas_l2.py
new file mode 100644
index 0000000000..281a0132af
--- /dev/null
+++ b/satpy/readers/seadas_l2.py
@@ -0,0 +1,157 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Reader for SEADAS L2 products.
+
+This reader currently only supports MODIS and VIIRS Chlorophyll A from SEADAS.
+
+The reader includes an additional keyword argument ``apply_quality_flags``
+which can be used to mask out low-quality pixels based on quality flags
+contained in the file (``l2_flags``). This option defaults to ``False``, but
+when set to ``True`` the "CHLWARN" pixels of the ``l2_flags`` variable
+are masked out. These pixels represent data where the chlorophyll algorithm
+warned about the quality of the result.
+
+"""
+
+from datetime import datetime
+
+from .hdf4_utils import HDF4FileHandler
+from .netcdf_utils import NetCDF4FileHandler
+
+
+class _SEADASL2Base:
+ """Simple handler of SEADAS L2 files."""
+
+ def __init__(self, filename, filename_info, filetype_info, apply_quality_flags=False):
+ """Initialize file handler and determine if data quality flags should be applied."""
+ super().__init__(filename, filename_info, filetype_info)
+ self.apply_quality_flags = apply_quality_flags and self.l2_flags_var_name in self
+
+ def _add_satpy_metadata(self, data):
+ data.attrs["sensor"] = self.sensor_names
+ data.attrs["platform_name"] = self._platform_name()
+ data.attrs["rows_per_scan"] = self._rows_per_scan()
+ return data
+
+ def _rows_per_scan(self):
+ if "modis" in self.sensor_names:
+ return 10
+ if "viirs" in self.sensor_names:
+ return 16
+ raise ValueError(f"Don't know how to read data for sensors: {self.sensor_names}")
+
+ def _platform_name(self):
+ platform = self[self.platform_attr_name]
+ platform_dict = {'NPP': 'Suomi-NPP',
+ 'JPSS-1': 'NOAA-20',
+ 'JPSS-2': 'NOAA-21'}
+ return platform_dict.get(platform, platform)
+
+ @property
+ def start_time(self):
+ """Get the starting observation time of this file's data."""
+ start_time = self[self.start_time_attr_name]
+ return datetime.strptime(start_time[:-3], self.time_format)
+
+ @property
+ def end_time(self):
+ """Get the ending observation time of this file's data."""
+ end_time = self[self.end_time_attr_name]
+ return datetime.strptime(end_time[:-3], self.time_format)
+
+ @property
+ def sensor_names(self):
+ """Get sensor for the current file's data."""
+ # Example: MODISA or VIIRSN or VIIRSJ1
+ sensor_name = self[self.sensor_attr_name].lower()
+ if sensor_name.startswith("modis"):
+ return {"modis"}
+ return {"viirs"}
+
+ def get_dataset(self, data_id, dataset_info):
+ """Get DataArray for the specified DataID."""
+ file_key, data = self._get_file_key_and_variable(data_id, dataset_info)
+ data = self._filter_by_valid_min_max(data)
+ data = self._rename_2d_dims_if_necessary(data)
+ data = self._mask_based_on_l2_flags(data)
+ for attr_name in ("standard_name", "long_name", "units"):
+ val = data.attrs[attr_name]
+ if val[-1] == "\x00":
+ data.attrs[attr_name] = data.attrs[attr_name][:-1]
+ data = self._add_satpy_metadata(data)
+ return data
+
+ def _get_file_key_and_variable(self, data_id, dataset_info):
+ file_keys = dataset_info.get("file_key", data_id["name"])
+ if not isinstance(file_keys, list):
+ file_keys = [file_keys]
+ for file_key in file_keys:
+ try:
+ data = self[file_key]
+ return file_key, data
+ except KeyError:
+ continue
+ raise KeyError(f"Unable to find any of the possible keys for {data_id}: {file_keys}")
+
+ def _rename_2d_dims_if_necessary(self, data_arr):
+ if data_arr.ndim != 2 or data_arr.dims == ("y", "x"):
+ return data_arr
+ return data_arr.rename(dict(zip(data_arr.dims, ("y", "x"))))
+
+ def _filter_by_valid_min_max(self, data_arr):
+ valid_range = self._valid_min_max(data_arr)
+ data_arr = data_arr.where(valid_range[0] <= data_arr)
+ data_arr = data_arr.where(data_arr <= valid_range[1])
+ return data_arr
+
+ def _valid_min_max(self, data_arr):
+ try:
+ return data_arr.attrs["valid_range"]
+ except KeyError:
+ return data_arr.attrs["valid_min"], data_arr.attrs["valid_max"]
+
+ def _mask_based_on_l2_flags(self, data_arr):
+ standard_name = data_arr.attrs.get("standard_name", "")
+ if self.apply_quality_flags and not ("lon" in standard_name or "lat" in standard_name):
+ l2_flags = self[self.l2_flags_var_name]
+ l2_flags = self._rename_2d_dims_if_necessary(l2_flags)
+ mask = (l2_flags & 0b00000000010000000000000000000000) != 0
+ data_arr = data_arr.where(~mask)
+ return data_arr
+
+
+class SEADASL2NetCDFFileHandler(_SEADASL2Base, NetCDF4FileHandler):
+ """Simple handler of SEADAS L2 NetCDF4 files."""
+
+ start_time_attr_name = "/attr/time_coverage_start"
+ end_time_attr_name = "/attr/time_coverage_end"
+ time_format = "%Y-%m-%dT%H:%M:%S.%f"
+ platform_attr_name = "/attr/platform"
+ sensor_attr_name = "/attr/instrument"
+ l2_flags_var_name = "geophysical_data/l2_flags"
+
+
+class SEADASL2HDFFileHandler(_SEADASL2Base, HDF4FileHandler):
+ """Simple handler of SEADAS L2 HDF4 files."""
+
+ start_time_attr_name = "/attr/Start Time"
+ end_time_attr_name = "/attr/End Time"
+ time_format = "%Y%j%H%M%S"
+ platform_attr_name = "/attr/Mission"
+ sensor_attr_name = "/attr/Sensor Name"
+ l2_flags_var_name = "l2_flags"
diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py
index 69c2b4065b..5842c07fa2 100644
--- a/satpy/readers/seviri_base.py
+++ b/satpy/readers/seviri_base.py
@@ -114,16 +114,6 @@
* The ``orbital_parameters`` attribute provides the nominal and actual satellite
position, as well as the projection centre. See the `Metadata` section in
the :doc:`../readers` chapter for more information.
-* The ``raw_metadata`` attribute provides raw metadata from the file header
- (HRIT and Native format). By default, arrays with more than 100 elements are
- excluded to limit memory usage. This threshold can be adjusted using the
- ``mda_max_array_size`` reader keyword argument:
-
- .. code-block:: python
-
- scene = satpy.Scene(filenames,
- reader='seviri_l1b_hrit/native',
- reader_kwargs={'mda_max_array_size': 1000})
* The ``acq_time`` coordinate provides the mean acquisition time for each
scanline. Use a ``MultiIndex`` to enable selection by acquisition time:
@@ -136,6 +126,20 @@
scn['IR_108']['y'] = mi
scn['IR_108'].sel(time=np.datetime64('2019-03-01T12:06:13.052000000'))
+* Raw metadata from the file header can be included by setting the reader
+ argument ``include_raw_metadata=True`` (HRIT and Native format only). Note
+ that this comes with a performance penalty of up to 10% if raw metadata from
+ multiple segments or scans need to be combined. By default arrays with more
+ than 100 elements are excluded to limit the performance penalty. This
+ threshold can be adjusted using the ``mda_max_array_size`` reader keyword
+ argument:
+
+ .. code-block:: python
+
+ scene = satpy.Scene(filenames,
+ reader='seviri_l1b_hrit/native',
+ reader_kwargs={'include_raw_metadata': True,
+ 'mda_max_array_size': 1000})
References:
- `MSG Level 1.5 Image Data Format Description`_
@@ -154,15 +158,14 @@
import warnings
-import numpy as np
-from numpy.polynomial.chebyshev import Chebyshev
import dask.array as da
+import numpy as np
import pyproj
+from numpy.polynomial.chebyshev import Chebyshev
-from satpy.readers.utils import apply_earthsun_distance_correction
-from satpy.readers.eum_base import (time_cds_short,
- issue_revision)
from satpy import CHUNK_SIZE
+from satpy.readers.eum_base import issue_revision, time_cds_short
+from satpy.readers.utils import apply_earthsun_distance_correction
PLATFORM_DICT = {
'MET08': 'Meteosat-8',
@@ -411,8 +414,7 @@ def dec10216(inbuf):
arr16_1 = ((arr10_1 & 63) << 4) + (arr10_2 >> 4)
arr16_2 = ((arr10_2 & 15) << 6) + (arr10_3 >> 2)
arr16_3 = ((arr10_3 & 3) << 8) + arr10_4
- arr16 = da.stack([arr16_0, arr16_1, arr16_2, arr16_3], axis=-1).ravel()
- arr16 = da.rechunk(arr16, arr16.shape[0])
+ arr16 = np.stack([arr16_0, arr16_1, arr16_2, arr16_3], axis=-1).ravel()
return arr16
@@ -446,7 +448,12 @@ def get(self):
('TerminationSpace', 'S1'),
('EncodingVersion', np.uint16),
('Channel', np.uint8),
- ('Filler', 'S20'),
+ ('ImageLocation', 'S3'),
+ ('GsicsCalMode', np.bool),
+ ('GsicsCalValidity', np.bool),
+ ('Padding', 'S2'),
+ ('OffsetToData', np.uint32),
+ ('Padding2', 'S9'),
('RepeatCycle', 'S15'),
]
@@ -523,7 +530,7 @@ def vis_calibrate(self, data, solar_irradiance):
"""Calibrate to reflectance.
This uses the method described in Conversion from radiances to
- reflectances for SEVIRI warm channels: https://tinyurl.com/y67zhphm
+ reflectances for SEVIRI warm channels: https://www-cdn.eumetsat.int/files/2020-04/pdf_msg_seviri_rad2refl.pdf
"""
reflectance = np.pi * data * 100.0 / solar_irradiance
return apply_earthsun_distance_correction(reflectance, self._scan_time)
@@ -823,39 +830,44 @@ def _get_closest_interval(self, time):
return closest_match, distance
+# def calculate_area_extent(center_point, north, east, south, west, we_offset, ns_offset, column_step, line_step):
def calculate_area_extent(area_dict):
"""Calculate the area extent seen by a geostationary satellite.
Args:
area_dict: A dictionary containing the required parameters
center_point: Center point for the projection
- resolution: Pixel resulution in meters
north: Northmost row number
east: Eastmost column number
west: Westmost column number
south: Southmost row number
+ column_step: Pixel resulution in meters in east-west direction
+ line_step: Pixel resulution in meters in soutth-north direction
[column_offset: Column offset, defaults to 0 if not given]
- [row_offset: Row offset, defaults to 0 if not given]
+ [line_offset: Line offset, defaults to 0 if not given]
Returns:
tuple: An area extent for the scene defined by the lower left and
upper right corners
+ # For Earth model 2 and full disk VISIR, (center_point - west - 0.5 + we_offset) must be -1856.5 .
+ # See MSG Level 1.5 Image Data Format Description Figure 7 - Alignment and numbering of the non-HRV pixels.
"""
- # For Earth model 2 and full disk resolution center point
- # column and row is (1856.5, 1856.5)
- # See: MSG Level 1.5 Image Data Format Description, Figure 7
- cp_c = area_dict['center_point'] + area_dict.get('column_offset', 0)
- cp_r = area_dict['center_point'] + area_dict.get('row_offset', 0)
-
- # Calculate column and row for lower left and upper right corners.
- ll_c = (area_dict['west'] - cp_c)
- ll_r = (area_dict['north'] - cp_r + 1)
- ur_c = (area_dict['east'] - cp_c - 1)
- ur_r = (area_dict['south'] - cp_r)
-
- aex = np.array([ll_c, ll_r, ur_c, ur_r]) * area_dict['resolution']
-
- return tuple(aex)
+ center_point = area_dict['center_point']
+ east = area_dict['east']
+ west = area_dict['west']
+ south = area_dict['south']
+ north = area_dict['north']
+ column_step = area_dict['column_step']
+ line_step = area_dict['line_step']
+ column_offset = area_dict.get('column_offset', 0)
+ line_offset = area_dict.get('line_offset', 0)
+
+ ll_c = (center_point - east + 0.5 + column_offset) * column_step
+ ll_l = (north - center_point + 0.5 + line_offset) * line_step
+ ur_c = (center_point - west - 0.5 + column_offset) * column_step
+ ur_l = (south - center_point - 0.5 + line_offset) * line_step
+
+ return (ll_c, ll_l, ur_c, ur_l)
def create_coef_dict(coefs_nominal, coefs_gsics, radiance_type, ext_coefs):
diff --git a/satpy/readers/seviri_l1b_hrit.py b/satpy/readers/seviri_l1b_hrit.py
index 96b282abb1..7da6ee1e21 100644
--- a/satpy/readers/seviri_l1b_hrit.py
+++ b/satpy/readers/seviri_l1b_hrit.py
@@ -38,12 +38,12 @@
H-000-MSG4__-MSG4________-_________-EPI______-201903011200-__
Each image is decomposed into 24 segments (files) for the high-resolution-visible (HRV) channel and 8 segments for other
-visible (VIS) and infrared (IR) channels. Additionally there is one prologue and one epilogue file for the entire scan
+visible (VIS) and infrared (IR) channels. Additionally, there is one prologue and one epilogue file for the entire scan
which contain global metadata valid for all channels.
Reader Arguments
----------------
-Some arguments can be provided to the reader to change it's behaviour. These are
+Some arguments can be provided to the reader to change its behaviour. These are
provided through the `Scene` instantiation, eg::
Scene(reader="seviri_l1b_hrit", filenames=fnames, reader_kwargs={'fill_hrv': False})
@@ -51,6 +51,16 @@
To see the full list of arguments that can be provided, look into the documentation
of :class:`HRITMSGFileHandler`.
+Compression
+-----------
+
+This reader accepts compressed HRIT files, ending in ``C_`` as other HRIT readers, see
+:class:`satpy.readers.hrit_base.HRITFileHandler`.
+
+This reader also accepts bzipped file with the extension ``.bz2`` for the prologue,
+epilogue, and segment files.
+
+
Example
-------
Here is an example how to read the data in satpy:
@@ -77,9 +87,6 @@
* x (x) float64 5.566e+06 5.563e+06 5.56e+06 ... -5.566e+06 -5.569e+06
* y (y) float64 -5.566e+06 -5.563e+06 ... 5.566e+06 5.569e+06
Attributes:
- satellite_longitude: 0.0
- satellite_latitude: 0.0
- satellite_altitude: 35785831.0
orbital_parameters: {'projection_longitude': 0.0, 'projection_latit...
platform_name: Meteosat-11
georef_offset_corrected: True
@@ -100,9 +107,80 @@
modifiers: ()
ancillary_variables: []
+The `filenames` argument can either be a list of strings, see the example above, or a list of
+:class:`satpy.readers.FSFile` objects. FSFiles can be used in conjunction with `fsspec`_,
+e.g. to handle in-memory data:
+
+.. code-block:: python
+
+ import glob
+
+ from fsspec.implementations.memory import MemoryFile, MemoryFileSystem
+ from satpy import Scene
+ from satpy.readers import FSFile
+
+ # In this example, we will make use of `MemoryFile`s in a `MemoryFileSystem`.
+ memory_fs = MemoryFileSystem()
+
+ # Usually, the data already resides in memory.
+ # For explanatory reasons, we will load the files found with glob in memory,
+ # and load the scene with FSFiles.
+ filenames = glob.glob('data/H-000-MSG4__-MSG4________-*201903011200*')
+ fs_files = []
+ for fn in filenames:
+ with open(fn, 'rb') as fh:
+ fs_files.append(MemoryFile(
+ fs=memory_fs,
+ path="{}{}".format(memory_fs.root_marker, fn),
+ data=fh.read()
+ ))
+ fs_files[-1].commit() # commit the file to the filesystem
+ fs_files = [FSFile(open_file) for open_file in filenames] # wrap MemoryFiles as FSFiles
+ # similar to the example above, we pass a list of FSFiles to the `Scene`
+ scn = Scene(filenames=fs_files, reader='seviri_l1b_hrit')
+ scn.load(['VIS006', 'IR_108'])
+ print(scn['IR_108'])
+
+
+Output:
+
+.. code-block:: none
+
+
+ dask.array
+ Coordinates:
+ acq_time (y) datetime64[ns] NaT NaT NaT NaT NaT NaT ... NaT NaT NaT NaT NaT
+ * x (x) float64 5.566e+06 5.563e+06 5.56e+06 ... -5.566e+06 -5.569e+06
+ * y (y) float64 -5.566e+06 -5.563e+06 ... 5.566e+06 5.569e+06
+ Attributes:
+ orbital_parameters: {'projection_longitude': 0.0, 'projection_latit...
+ platform_name: Meteosat-11
+ georef_offset_corrected: True
+ standard_name: brightness_temperature
+ raw_metadata: {'file_type': 0, 'total_header_length': 6198, '...
+ wavelength: (9.8, 10.8, 11.8)
+ units: K
+ sensor: seviri
+ platform_name: Meteosat-11
+ start_time: 2019-03-01 12:00:09.716000
+ end_time: 2019-03-01 12:12:42.946000
+ area: Area ID: some_area_name\\nDescription: On-the-fl...
+ name: IR_108
+ resolution: 3000.403165817
+ calibration: brightness_temperature
+ polarization: None
+ level: None
+ modifiers: ()
+ ancillary_variables: []
+
+
+References:
+ - `MSG Level 1.5 Image Data Format Description`_
+
.. _MSG Level 1.5 Image Data Format Description:
https://www-cdn.eumetsat.int/files/2020-05/pdf_ten_05105_msg_img_data.pdf
-
+.. _fsspec:
+ https://filesystem-spec.readthedocs.io
"""
from __future__ import division
@@ -114,24 +192,34 @@
import dask.array as da
import numpy as np
import xarray as xr
-
from pyresample import geometry
+
+import satpy.readers.utils as utils
from satpy import CHUNK_SIZE
from satpy._compat import cached_property
-import satpy.readers.utils as utils
-from satpy.readers.eum_base import recarray2dict, time_cds_short, get_service_mode
-from satpy.readers.hrit_base import (HRITFileHandler, ancillary_text,
- annotation_header, base_hdr_map,
- image_data_function)
+from satpy.readers._geos_area import get_area_definition, get_area_extent, get_geos_area_naming
+from satpy.readers.eum_base import get_service_mode, recarray2dict, time_cds_short
+from satpy.readers.hrit_base import (
+ HRITFileHandler,
+ ancillary_text,
+ annotation_header,
+ base_hdr_map,
+ image_data_function,
+)
from satpy.readers.seviri_base import (
- CHANNEL_NAMES, SATNUM, SEVIRICalibrationHandler, get_cds_time,
- HRV_NUM_COLUMNS, pad_data_horizontally, create_coef_dict,
- OrbitPolynomialFinder, get_satpos, NoValidOrbitParams,
- add_scanline_acq_time
+ CHANNEL_NAMES,
+ HRV_NUM_COLUMNS,
+ SATNUM,
+ NoValidOrbitParams,
+ OrbitPolynomialFinder,
+ SEVIRICalibrationHandler,
+ add_scanline_acq_time,
+ create_coef_dict,
+ get_cds_time,
+ get_satpos,
+ pad_data_horizontally,
)
-from satpy.readers.seviri_l1b_native_hdr import (hrit_epilogue, hrit_prologue,
- impf_configuration)
-from satpy.readers._geos_area import get_area_extent, get_area_definition, get_geos_area_naming
+from satpy.readers.seviri_l1b_native_hdr import hrit_epilogue, hrit_prologue, impf_configuration
logger = logging.getLogger('hrit_msg')
@@ -210,7 +298,8 @@ class HRITMSGPrologueFileHandler(HRITMSGPrologueEpilogueBase):
"""SEVIRI HRIT prologue reader."""
def __init__(self, filename, filename_info, filetype_info, calib_mode='nominal',
- ext_calib_coefs=None, mda_max_array_size=None, fill_hrv=None):
+ ext_calib_coefs=None, include_raw_metadata=False,
+ mda_max_array_size=None, fill_hrv=None):
"""Initialize the reader."""
super(HRITMSGPrologueFileHandler, self).__init__(filename, filename_info,
filetype_info,
@@ -228,13 +317,13 @@ def __init__(self, filename, filename_info, filetype_info, calib_mode='nominal',
def read_prologue(self):
"""Read the prologue metadata."""
- with open(self.filename, "rb") as fp_:
+ with utils.generic_open(self.filename, mode="rb") as fp_:
fp_.seek(self.mda['total_header_length'])
- data = np.fromfile(fp_, dtype=hrit_prologue, count=1)
+ data = np.frombuffer(fp_.read(hrit_prologue.itemsize), dtype=hrit_prologue, count=1)
self.prologue.update(recarray2dict(data))
try:
- impf = np.fromfile(fp_, dtype=impf_configuration, count=1)[0]
- except IndexError:
+ impf = np.frombuffer(fp_.read(impf_configuration.itemsize), dtype=impf_configuration, count=1)[0]
+ except ValueError:
logger.info('No IMPF configuration field found in prologue.')
else:
self.prologue.update(recarray2dict(impf))
@@ -282,7 +371,8 @@ class HRITMSGEpilogueFileHandler(HRITMSGPrologueEpilogueBase):
"""SEVIRI HRIT epilogue reader."""
def __init__(self, filename, filename_info, filetype_info, calib_mode='nominal',
- ext_calib_coefs=None, mda_max_array_size=None, fill_hrv=None):
+ ext_calib_coefs=None, include_raw_metadata=False,
+ mda_max_array_size=None, fill_hrv=None):
"""Initialize the reader."""
super(HRITMSGEpilogueFileHandler, self).__init__(filename, filename_info,
filetype_info,
@@ -300,9 +390,9 @@ def __init__(self, filename, filename_info, filetype_info, calib_mode='nominal',
def read_epilogue(self):
"""Read the epilogue metadata."""
- with open(self.filename, "rb") as fp_:
+ with utils.generic_open(self.filename, mode="rb") as fp_:
fp_.seek(self.mda['total_header_length'])
- data = np.fromfile(fp_, dtype=hrit_epilogue, count=1)
+ data = np.frombuffer(fp_.read(hrit_epilogue.itemsize), dtype=hrit_epilogue, count=1)
self.epilogue.update(recarray2dict(data))
def reduce(self, max_size):
@@ -336,7 +426,8 @@ class HRITMSGFileHandler(HRITFileHandler):
def __init__(self, filename, filename_info, filetype_info,
prologue, epilogue, calib_mode='nominal',
- ext_calib_coefs=None, mda_max_array_size=100, fill_hrv=True):
+ ext_calib_coefs=None, include_raw_metadata=False,
+ mda_max_array_size=100, fill_hrv=True):
"""Initialize the reader."""
super(HRITMSGFileHandler, self).__init__(filename, filename_info,
filetype_info,
@@ -349,6 +440,7 @@ def __init__(self, filename, filename_info, filetype_info,
self.prologue = prologue.prologue
self.epilogue = epilogue.epilogue
self._filename_info = filename_info
+ self.include_raw_metadata = include_raw_metadata
self.mda_max_array_size = mda_max_array_size
self.fill_hrv = fill_hrv
self.calib_mode = calib_mode
@@ -394,6 +486,18 @@ def _get_header(self):
self.mda['service'] = service
self.channel_name = CHANNEL_NAMES[self.mda['spectral_channel_id']]
+ @property
+ def nominal_start_time(self):
+ """Get the start time."""
+ return self.prologue['ImageAcquisition'][
+ 'PlannedAcquisitionTime']['TrueRepeatCycleStart']
+
+ @property
+ def nominal_end_time(self):
+ """Get the end time."""
+ return self.prologue['ImageAcquisition'][
+ 'PlannedAcquisitionTime']['PlannedRepeatCycleEnd']
+
@property
def start_time(self):
"""Get the start time."""
@@ -614,18 +718,16 @@ def _update_attrs(self, res, info):
res.attrs['standard_name'] = info['standard_name']
res.attrs['platform_name'] = self.platform_name
res.attrs['sensor'] = 'seviri'
- res.attrs['satellite_longitude'] = self.mda[
- 'projection_parameters']['SSP_longitude']
- res.attrs['satellite_latitude'] = self.mda[
- 'projection_parameters']['SSP_latitude']
- res.attrs['satellite_altitude'] = self.mda['projection_parameters']['h']
+ res.attrs['nominal_start_time'] = self.nominal_start_time
+ res.attrs['nominal_end_time'] = self.nominal_end_time
res.attrs['orbital_parameters'] = {
'projection_longitude': self.mda['projection_parameters']['SSP_longitude'],
'projection_latitude': self.mda['projection_parameters']['SSP_latitude'],
'projection_altitude': self.mda['projection_parameters']['h']}
res.attrs['orbital_parameters'].update(self.mda['orbital_parameters'])
res.attrs['georef_offset_corrected'] = self.mda['offset_corrected']
- res.attrs['raw_metadata'] = self._get_raw_mda()
+ if self.include_raw_metadata:
+ res.attrs['raw_metadata'] = self._get_raw_mda()
def _get_calib_coefs(self, channel_name):
"""Get coefficients for calibration from counts to radiance."""
diff --git a/satpy/readers/seviri_l1b_icare.py b/satpy/readers/seviri_l1b_icare.py
index 748f18e273..38c4360744 100644
--- a/satpy/readers/seviri_l1b_icare.py
+++ b/satpy/readers/seviri_l1b_icare.py
@@ -69,11 +69,13 @@
ancillary_variables: []
"""
-from satpy.readers._geos_area import get_area_extent, get_area_definition
-from satpy.readers.hdf4_utils import HDF4FileHandler
from datetime import datetime
+
import numpy as np
+from satpy.readers._geos_area import get_area_definition, get_area_extent
+from satpy.readers.hdf4_utils import HDF4FileHandler
+
class SEVIRI_ICARE(HDF4FileHandler):
"""SEVIRI L1B handler for HDF4 files."""
@@ -252,13 +254,13 @@ def get_dataset(self, ds_id, ds_info):
offset = data.attrs.get('add_offset')
scale_factor = data.attrs.get('scale_factor')
data = data.where(data != fill)
- data.values = data.values.astype(np.float32)
+ data = data.astype(np.float32)
if scale_factor is not None and offset is not None:
- data.values *= scale_factor
- data.values += offset
+ data = data * scale_factor
+ data = data + offset
# Now we correct range from 0-1 to 0-100 for VIS:
if ds_id['name'] in self.ref_bands:
- data.values *= 100.
+ data = data * 100.
return data
def get_area_def(self, ds_id):
diff --git a/satpy/readers/seviri_l1b_native.py b/satpy/readers/seviri_l1b_native.py
index e626b73601..9c3268d6d6 100644
--- a/satpy/readers/seviri_l1b_native.py
+++ b/satpy/readers/seviri_l1b_native.py
@@ -26,34 +26,45 @@
"""
import logging
+import warnings
from datetime import datetime
-import numpy as np
-import xarray as xr
import dask.array as da
-
-from satpy import CHUNK_SIZE
-
+import numpy as np
+import xarray as xr
from pyresample import geometry
+from satpy import CHUNK_SIZE
from satpy._compat import cached_property
+from satpy.readers._geos_area import get_area_definition, get_geos_area_naming
+from satpy.readers.eum_base import get_service_mode, recarray2dict, time_cds_short
from satpy.readers.file_handlers import BaseFileHandler
-from satpy.readers.eum_base import (
- recarray2dict, get_service_mode, time_cds_short
-)
from satpy.readers.seviri_base import (
- SEVIRICalibrationHandler, CHANNEL_NAMES, SATNUM, dec10216,
- VISIR_NUM_COLUMNS, VISIR_NUM_LINES, HRV_NUM_COLUMNS, HRV_NUM_LINES,
- create_coef_dict, pad_data_horizontally, pad_data_vertically,
- add_scanline_acq_time, get_cds_time, OrbitPolynomialFinder, get_satpos,
- NoValidOrbitParams
+ CHANNEL_NAMES,
+ HRV_NUM_COLUMNS,
+ HRV_NUM_LINES,
+ SATNUM,
+ VISIR_NUM_COLUMNS,
+ VISIR_NUM_LINES,
+ NoValidOrbitParams,
+ OrbitPolynomialFinder,
+ SEVIRICalibrationHandler,
+ add_scanline_acq_time,
+ calculate_area_extent,
+ create_coef_dict,
+ dec10216,
+ get_cds_time,
+ get_satpos,
+ pad_data_horizontally,
+ pad_data_vertically,
)
from satpy.readers.seviri_l1b_native_hdr import (
- GSDTRecords, get_native_header, native_trailer,
- DEFAULT_15_SECONDARY_PRODUCT_HEADER
+ DEFAULT_15_SECONDARY_PRODUCT_HEADER,
+ GSDTRecords,
+ get_native_header,
+ native_trailer,
)
from satpy.readers.utils import reduce_mda
-from satpy.readers._geos_area import get_area_definition, get_geos_area_naming
logger = logging.getLogger('native_msg')
@@ -84,7 +95,7 @@ class NativeMSGFileHandler(BaseFileHandler):
def __init__(self, filename, filename_info, filetype_info,
calib_mode='nominal', fill_disk=False, ext_calib_coefs=None,
- mda_max_array_size=100):
+ include_raw_metadata=False, mda_max_array_size=100):
"""Initialize the reader."""
super(NativeMSGFileHandler, self).__init__(filename,
filename_info,
@@ -93,6 +104,7 @@ def __init__(self, filename, filename_info, filetype_info,
self.calib_mode = calib_mode
self.ext_calib_coefs = ext_calib_coefs or {}
self.fill_disk = fill_disk
+ self.include_raw_metadata = include_raw_metadata
self.mda_max_array_size = mda_max_array_size
# Declare required variables.
@@ -115,29 +127,38 @@ def _has_archive_header(self):
return istream.read(36) == ascii_startswith
@property
- def start_time(self):
- """Read the repeat cycle start time from metadata."""
+ def nominal_start_time(self):
+ """Read the repeat cycle nominal start time from metadata."""
return self.header['15_DATA_HEADER']['ImageAcquisition'][
'PlannedAcquisitionTime']['TrueRepeatCycleStart']
@property
- def end_time(self):
- """Read the repeat cycle end time from metadata."""
+ def nominal_end_time(self):
+ """Read the repeat cycle nominal end time from metadata."""
return self.header['15_DATA_HEADER']['ImageAcquisition'][
'PlannedAcquisitionTime']['PlannedRepeatCycleEnd']
- @staticmethod
- def _calculate_area_extent(center_point, north, east, south, west,
- we_offset, ns_offset, column_step, line_step):
- # For Earth model 2 and full disk VISIR, (center_point - west - 0.5 + we_offset) must be -1856.5 .
- # See MSG Level 1.5 Image Data Format Description Figure 7 - Alignment and numbering of the non-HRV pixels.
+ @property
+ def observation_start_time(self):
+ """Read the repeat cycle sensing start time from metadata."""
+ return self.trailer['15TRAILER']['ImageProductionStats'][
+ 'ActualScanningSummary']['ForwardScanStart']
- ll_c = (center_point - east + 0.5 + we_offset) * column_step
- ll_l = (north - center_point + 0.5 + ns_offset) * line_step
- ur_c = (center_point - west - 0.5 + we_offset) * column_step
- ur_l = (south - center_point - 0.5 + ns_offset) * line_step
+ @property
+ def observation_end_time(self):
+ """Read the repeat cycle sensing end time from metadata."""
+ return self.trailer['15TRAILER']['ImageProductionStats'][
+ 'ActualScanningSummary']['ForwardScanEnd']
- return (ll_c, ll_l, ur_c, ur_l)
+ @property
+ def start_time(self):
+ """Get general start time for this file."""
+ return self.nominal_start_time
+
+ @property
+ def end_time(self):
+ """Get the general end time for this file."""
+ return self.nominal_end_time
def _get_data_dtype(self):
"""Get the dtype of the file based on the actual available channels."""
@@ -275,6 +296,9 @@ def _read_header(self):
self.mda['hrv_number_of_lines'] = int(sec15hd["NumberLinesHRV"]['Value'])
self.mda['hrv_number_of_columns'] = cols_hrv
+ if self.header['15_MAIN_PRODUCT_HEADER']['QQOV']['Value'] == 'NOK':
+ warnings.warn("The quality flag for this file indicates not OK. Use this data with caution!", UserWarning)
+
def _read_trailer(self):
hdr_size = self.header_type.itemsize
@@ -411,8 +435,19 @@ def get_area_extent(self, dataset_id):
nlines = north_bound - south_bound + 1
ncolumns = west_bound - east_bound + 1
- aex = self._calculate_area_extent(center_point, north_bound, east_bound, south_bound, west_bound,
- we_offset, ns_offset, column_step, line_step)
+
+ area_dict = {'center_point': center_point,
+ 'east': east_bound,
+ 'west': west_bound,
+ 'south': south_bound,
+ 'north': north_bound,
+ 'column_step': column_step,
+ 'line_step': line_step,
+ 'column_offset': we_offset,
+ 'line_offset': ns_offset
+ }
+
+ aex = calculate_area_extent(area_dict)
aex_data['area_extent'].append(aex)
aex_data['nlines'].append(nlines)
@@ -568,6 +603,19 @@ def _update_attrs(self, dataset, dataset_info):
dataset.attrs['sensor'] = 'seviri'
dataset.attrs['georef_offset_corrected'] = self.mda[
'offset_corrected']
+ dataset.attrs['time_parameters'] = {
+ 'nominal_start_time': self.nominal_start_time,
+ 'nominal_end_time': self.nominal_end_time,
+ 'observation_start_time': self.observation_start_time,
+ 'observation_end_time': self.observation_end_time,
+ }
+ dataset.attrs['orbital_parameters'] = self._get_orbital_parameters()
+ if self.include_raw_metadata:
+ dataset.attrs['raw_metadata'] = reduce_mda(
+ self.header, max_size=self.mda_max_array_size
+ )
+
+ def _get_orbital_parameters(self):
orbital_parameters = {
'projection_longitude': self.mda['projection_parameters'][
'ssp_longitude'],
@@ -587,10 +635,7 @@ def _update_attrs(self, dataset, dataset_info):
})
except NoValidOrbitParams as err:
logger.warning(err)
- dataset.attrs['orbital_parameters'] = orbital_parameters
- dataset.attrs['raw_metadata'] = reduce_mda(
- self.header, max_size=self.mda_max_array_size
- )
+ return orbital_parameters
@cached_property
def satpos(self):
@@ -605,7 +650,7 @@ def satpos(self):
orbit_polynomial = poly_finder.get_orbit_polynomial(self.start_time)
return get_satpos(
orbit_polynomial=orbit_polynomial,
- time=self.start_time,
+ time=self.observation_start_time,
semi_major_axis=self.mda['projection_parameters']['a'],
semi_minor_axis=self.mda['projection_parameters']['b']
)
diff --git a/satpy/readers/seviri_l1b_native_hdr.py b/satpy/readers/seviri_l1b_native_hdr.py
index cfb580754a..8c0212a6f2 100644
--- a/satpy/readers/seviri_l1b_native_hdr.py
+++ b/satpy/readers/seviri_l1b_native_hdr.py
@@ -19,11 +19,8 @@
import numpy as np
-from satpy.readers.eum_base import (time_cds_short, time_cds,
- time_cds_expanded)
-from satpy.readers.seviri_base import (
- VISIR_NUM_LINES, VISIR_NUM_COLUMNS, HRV_NUM_COLUMNS, HRV_NUM_LINES
-)
+from satpy.readers.eum_base import time_cds, time_cds_expanded, time_cds_short
+from satpy.readers.seviri_base import HRV_NUM_COLUMNS, HRV_NUM_LINES, VISIR_NUM_COLUMNS, VISIR_NUM_LINES
class GSDTRecords(object):
@@ -716,7 +713,6 @@ def seviri_l15_trailer(self):
('GeometricQuality', self.geometric_quality),
('TimelinessAndCompleteness', self.timeliness_and_completeness)
]
-
return record
@property
diff --git a/satpy/readers/seviri_l1b_nc.py b/satpy/readers/seviri_l1b_nc.py
index a4d8372676..7dd67c3296 100644
--- a/satpy/readers/seviri_l1b_nc.py
+++ b/satpy/readers/seviri_l1b_nc.py
@@ -21,20 +21,22 @@
import logging
import numpy as np
-import xarray as xr
+from satpy import CHUNK_SIZE
from satpy._compat import cached_property
-from satpy.readers.file_handlers import BaseFileHandler
-from satpy.readers.seviri_base import (SEVIRICalibrationHandler,
- CHANNEL_NAMES, SATNUM,
- get_cds_time, add_scanline_acq_time,
- OrbitPolynomialFinder, get_satpos,
- NoValidOrbitParams)
-from satpy.readers.eum_base import get_service_mode
-
from satpy.readers._geos_area import get_area_definition, get_geos_area_naming
-from satpy import CHUNK_SIZE
-
+from satpy.readers.eum_base import get_service_mode
+from satpy.readers.file_handlers import BaseFileHandler, open_dataset
+from satpy.readers.seviri_base import (
+ CHANNEL_NAMES,
+ SATNUM,
+ NoValidOrbitParams,
+ OrbitPolynomialFinder,
+ SEVIRICalibrationHandler,
+ add_scanline_acq_time,
+ get_cds_time,
+ get_satpos,
+)
logger = logging.getLogger('nc_msg')
@@ -59,10 +61,9 @@ def __init__(self, filename, filename_info, filetype_info,
"""Init the file handler."""
super(NCSEVIRIFileHandler, self).__init__(filename, filename_info, filetype_info)
self.ext_calib_coefs = ext_calib_coefs or {}
- self.nc = None
self.mda = {}
self.reference = datetime.datetime(1958, 1, 1)
- self._read_file()
+ self.get_metadata()
@property
def start_time(self):
@@ -74,29 +75,41 @@ def end_time(self):
"""Get the end time."""
return self.deltaEnd
- def _read_file(self):
+ @cached_property
+ def nc(self):
"""Read the file."""
- if self.nc is None:
-
- self.nc = xr.open_dataset(self.filename,
- decode_cf=True,
- mask_and_scale=False,
- chunks=CHUNK_SIZE)
+ return open_dataset(self.filename, decode_cf=True, mask_and_scale=False,
+ chunks=CHUNK_SIZE).rename({'num_columns_vis_ir': 'x',
+ 'num_rows_vis_ir': 'y'})
+ def get_metadata(self):
+ """Get metadata."""
# Obtain some area definition attributes
- equatorial_radius = (self.nc.attrs['equatorial_radius'] * 1000.)
+ equatorial_radius = self.nc.attrs['equatorial_radius'] * 1000.
polar_radius = (self.nc.attrs['north_polar_radius'] * 1000 + self.nc.attrs['south_polar_radius'] * 1000) * 0.5
ssp_lon = self.nc.attrs['longitude_of_SSP']
+ self.mda['vis_ir_grid_origin'] = self.nc.attrs['vis_ir_grid_origin']
+ self.mda['vis_ir_column_dir_grid_step'] = self.nc.attrs['vis_ir_column_dir_grid_step'] * 1000.0
+ self.mda['vis_ir_line_dir_grid_step'] = self.nc.attrs['vis_ir_line_dir_grid_step'] * 1000.0
+ # if FSFile is used h5netcdf engine is used which outputs arrays instead of floats for attributes
+ if isinstance(equatorial_radius, np.ndarray):
+ equatorial_radius = equatorial_radius.item()
+ polar_radius = polar_radius.item()
+ ssp_lon = ssp_lon.item()
+ self.mda['vis_ir_column_dir_grid_step'] = self.mda['vis_ir_column_dir_grid_step'].item()
+ self.mda['vis_ir_line_dir_grid_step'] = self.mda['vis_ir_line_dir_grid_step'].item()
+
self.mda['projection_parameters'] = {'a': equatorial_radius,
'b': polar_radius,
'h': 35785831.00,
'ssp_longitude': ssp_lon}
- self.mda['number_of_lines'] = int(self.nc.dims['num_rows_vis_ir'])
- self.mda['number_of_columns'] = int(self.nc.dims['num_columns_vis_ir'])
+ self.mda['number_of_lines'] = int(self.nc.dims['y'])
+ self.mda['number_of_columns'] = int(self.nc.dims['x'])
- self.mda['hrv_number_of_lines'] = int(self.nc.dims['num_rows_hrv'])
- self.mda['hrv_number_of_columns'] = int(self.nc.dims['num_columns_hrv'])
+ # only needed for HRV channel which is not implemented yet
+ # self.mda['hrv_number_of_lines'] = int(self.nc.dims['num_rows_hrv'])
+ # self.mda['hrv_number_of_columns'] = int(self.nc.dims['num_columns_hrv'])
self.deltaSt = self.reference + datetime.timedelta(
days=int(self.nc.attrs['true_repeat_cycle_start_day']),
@@ -114,19 +127,6 @@ def _read_file(self):
def get_dataset(self, dataset_id, dataset_info):
"""Get the dataset."""
- channel = dataset_id['name']
-
- if (channel == 'HRV'):
- self.nc = self.nc.rename({'num_columns_hrv': 'x', 'num_rows_hrv': 'y'})
- else:
- # the first channel of a composite will rename the dimension variable
- # but the later channels will raise a value error as its already been renamed
- # we can just ignore these exceptions
- try:
- self.nc = self.nc.rename({'num_columns_vis_ir': 'x', 'num_rows_vis_ir': 'y'})
- except ValueError:
- pass
-
dataset = self.nc[dataset_info['nc_key']]
# Correct for the scan line order
@@ -258,7 +258,7 @@ def get_area_extent(self, dsid):
# following calculations assume grid origin is south-east corner
# section 7.2.4 of MSG Level 1.5 Image Data Format Description
origins = {0: 'NW', 1: 'SW', 2: 'SE', 3: 'NE'}
- grid_origin = self.nc.attrs['vis_ir_grid_origin']
+ grid_origin = self.mda['vis_ir_grid_origin']
grid_origin = int(grid_origin, 16)
if grid_origin != 2:
raise NotImplementedError(
@@ -266,11 +266,11 @@ def get_area_extent(self, dsid):
.format(grid_origin, origins[grid_origin])
)
- center_point = 3712/2
+ center_point = 3712 / 2
- column_step = self.nc.attrs['vis_ir_column_dir_grid_step'] * 1000.0
+ column_step = self.mda['vis_ir_column_dir_grid_step']
- line_step = self.nc.attrs['vis_ir_line_dir_grid_step'] * 1000.0
+ line_step = self.mda['vis_ir_line_dir_grid_step']
# check for Earth model as this affects the north-south and
# west-east offsets
@@ -356,7 +356,13 @@ def _get_earth_model(self):
return int(self.nc.attrs['type_of_earth_model'], 16)
-class NCSEVIRIHRVFileHandler(BaseFileHandler, SEVIRICalibrationHandler):
+class NCSEVIRIHRVFileHandler(NCSEVIRIFileHandler, SEVIRICalibrationHandler):
"""HRV filehandler."""
- pass
+ def get_dataset(self, dataset_id, dataset_info):
+ """Get dataset from file."""
+ return NotImplementedError("Currently the HRV channel is not implemented.")
+
+ def get_area_extent(self, dsid):
+ """Get HRV area extent."""
+ return NotImplementedError
diff --git a/satpy/readers/seviri_l2_bufr.py b/satpy/readers/seviri_l2_bufr.py
index f02f655eb1..4b9df6460f 100644
--- a/satpy/readers/seviri_l2_bufr.py
+++ b/satpy/readers/seviri_l2_bufr.py
@@ -24,12 +24,18 @@
"""
import logging
-from datetime import timedelta, datetime
+from datetime import datetime, timedelta
+
+import dask.array as da
import numpy as np
import xarray as xr
-import dask.array as da
+
+from satpy import CHUNK_SIZE
+from satpy.readers._geos_area import get_geos_area_naming
+from satpy.readers.eum_base import get_service_mode, recarray2dict
+from satpy.readers.file_handlers import BaseFileHandler
from satpy.readers.seviri_base import mpef_product_header
-from satpy.readers.eum_base import recarray2dict
+from satpy.resample import get_area_def
try:
import eccodes as ec
@@ -37,23 +43,46 @@
raise ImportError(
"Missing eccodes-python and/or eccodes C-library installation. Use conda to install eccodes")
-from satpy.readers.file_handlers import BaseFileHandler
-from satpy import CHUNK_SIZE
-
logger = logging.getLogger('SeviriL2Bufr')
-data_center_dict = {55: {'ssp': 'E0415', 'name': '08'}, 56: {'ssp': 'E0000', 'name': '09'},
+data_center_dict = {55: {'ssp': 'E0415', 'name': '08'}, 56: {'ssp': 'E0455', 'name': '09'},
57: {'ssp': 'E0095', 'name': '10'}, 70: {'ssp': 'E0000', 'name': '11'}}
seg_size_dict = {'seviri_l2_bufr_asr': 16, 'seviri_l2_bufr_cla': 16,
'seviri_l2_bufr_csr': 16, 'seviri_l2_bufr_gii': 3,
- 'seviri_l2_bufr_thu': 16, 'seviri_l2_bufr_toz': 3}
+ 'seviri_l2_bufr_thu': 16, 'seviri_l2_bufr_toz': 3,
+ 'seviri_l2_bufr_amv': 24}
class SeviriL2BufrFileHandler(BaseFileHandler):
- """File handler for SEVIRI L2 BUFR products."""
+ """File handler for SEVIRI L2 BUFR products.
+
+ **Loading data with AreaDefinition**
+
+ By providing the `with_area_definition` as True in the `reader_kwargs`, the dataset is loaded with
+ an AreaDefinition using a standardized AreaDefinition in areas.yaml. By default, the dataset will
+ be loaded with a SwathDefinition, i.e. similar to how the data are stored in the BUFR file:
+
+ scene = satpy.Scene(filenames,
+ reader='seviri_l2_bufr',
+ reader_kwargs={'with_area_definition': False})
- def __init__(self, filename, filename_info, filetype_info, **kwargs):
+ **Defining dataset recticifation longitude**
+
+ The BUFR data were originally extracted from a rectified two-dimensional grid with a given central longitude
+ (typically the sub-satellite point). This information is not available in the file itself nor the filename (for
+ files from the EUMETSAT archive). Also, it cannot be reliably derived from all datasets themselves. Hence, the
+ rectification longitude can be defined by the user by providing `rectification_longitude` in the `reader_kwargs`:
+
+ scene = satpy.Scene(filenames,
+ reader='seviri_l2_bufr',
+ reader_kwargs={'rectification_longitude': 0.0})
+
+ If not done, default values applicable to the operational grids of the respective SEVIRI instruments will be used.
+ """
+
+ def __init__(self, filename, filename_info, filetype_info, with_area_definition=False,
+ rectification_longitude='default', **kwargs):
"""Initialise the file handler for SEVIRI L2 BUFR data."""
super(SeviriL2BufrFileHandler, self).__init__(filename,
filename_info,
@@ -72,6 +101,14 @@ def __init__(self, filename, filename_info, filetype_info, **kwargs):
self.mpef_header['SpacecraftName'] = data_center_dict[sc_id]['name']
self.mpef_header['RectificationLongitude'] = data_center_dict[sc_id]['ssp']
+ if rectification_longitude != 'default':
+ self.mpef_header['RectificationLongitude'] = f'E{int(rectification_longitude * 10):04d}'
+
+ self.with_adef = with_area_definition
+ if self.with_adef and filetype_info['file_type'] == 'seviri_l2_bufr_amv':
+ logging.warning("AMV BUFR data cannot be loaded with an area definition. Setting self.with_def = False.")
+ self.with_adef = False
+
self.seg_size = seg_size_dict[filetype_info['file_type']]
@property
@@ -96,6 +133,13 @@ def ssp_lon(self):
ssp_lon = self.mpef_header['RectificationLongitude']
return float(ssp_lon[1:])/10.
+ def get_area_def(self, key):
+ """Return the area definition."""
+ try:
+ return self._area_def
+ except AttributeError:
+ raise NotImplementedError
+
def _read_mpef_header(self):
"""Read MPEF header."""
hdr = np.fromfile(self.filename, mpef_product_header, 1)
@@ -148,15 +192,86 @@ def get_array(self, key):
return arr
def get_dataset(self, dataset_id, dataset_info):
- """Get dataset using the BUFR key in dataset_info."""
+ """Create dataset.
+
+ Load data from BUFR file using the BUFR key in dataset_info
+ and create the dataset with or without an AreaDefinition.
+
+ """
arr = self.get_array(dataset_info['key'])
- arr[arr == dataset_info['fill_value']] = np.nan
- xarr = xr.DataArray(arr, dims=["y"])
+ if self.with_adef:
+ xarr = self.get_dataset_with_area_def(arr, dataset_id)
+ # coordinates are not relevant when returning data with an AreaDefinition
+ if 'coordinates' in dataset_info.keys():
+ del dataset_info['coordinates']
+ else:
+ xarr = xr.DataArray(arr, dims=["y"])
+
+ if 'fill_value' in dataset_info:
+ xarr = xarr.where(xarr != dataset_info['fill_value'])
+
+ self._add_attributes(xarr, dataset_info)
+
+ return xarr
+
+ def get_dataset_with_area_def(self, arr, dataset_id):
+ """Get dataset with an AreaDefinition."""
+ if dataset_id['name'] in ['latitude', 'longitude']:
+ self.__setattr__(dataset_id['name'], arr)
+ xarr = xr.DataArray(arr, dims=["y"])
+ else:
+ lons_1d, lats_1d, data_1d = da.compute(self.longitude, self.latitude, arr)
+
+ self._area_def = self._construct_area_def(dataset_id)
+ icol, irow = self._area_def.get_array_indices_from_lonlat(lons_1d, lats_1d)
+
+ data_2d = np.empty(self._area_def.shape)
+ data_2d[:] = np.nan
+ data_2d[irow.compressed(), icol.compressed()] = data_1d[~irow.mask]
+
+ xarr = xr.DataArray(da.from_array(data_2d, CHUNK_SIZE), dims=('y', 'x'))
+
+ ntotal = len(icol)
+ nvalid = len(icol.compressed())
+ if nvalid < ntotal:
+ logging.warning(f'{ntotal-nvalid} out of {ntotal} data points could not be put on '
+ f'the grid {self._area_def.area_id}.')
+
+ return xarr
+
+ def _construct_area_def(self, dataset_id):
+ """Construct a standardized AreaDefinition based on satellite, instrument, resolution and sub-satellite point.
+
+ Returns:
+ AreaDefinition: A pyresample AreaDefinition object containing the area definition.
+
+ """
+ res = dataset_id['resolution']
+
+ area_naming_input_dict = {'platform_name': 'msg',
+ 'instrument_name': 'seviri',
+ 'resolution': res,
+ }
+
+ area_naming = get_geos_area_naming({**area_naming_input_dict,
+ **get_service_mode('seviri', self.ssp_lon)})
+
+ # Datasets with a segment size of 3 pixels extend outside the original SEVIRI 3km grid (with 1238 x 1238
+ # segments a 3 pixels). Hence, we need to use corresponding area defintions in areas.yaml
+ if self.seg_size == 3:
+ area_naming['area_id'] += '_ext'
+ area_naming['description'] += ' (extended outside original 3km grid)'
+
+ # Construct AreaDefinition from standardized area definition in areas.yaml.
+ stand_area_def = get_area_def(area_naming['area_id'])
+
+ return stand_area_def
+
+ def _add_attributes(self, xarr, dataset_info):
+ """Add dataset attributes to xarray."""
xarr.attrs['sensor'] = 'SEVIRI'
xarr.attrs['platform_name'] = self.platform_name
xarr.attrs['ssp_lon'] = self.ssp_lon
xarr.attrs['seg_size'] = self.seg_size
xarr.attrs.update(dataset_info)
-
- return xarr
diff --git a/satpy/readers/seviri_l2_grib.py b/satpy/readers/seviri_l2_grib.py
index 008861e7a2..974e575e08 100644
--- a/satpy/readers/seviri_l2_grib.py
+++ b/satpy/readers/seviri_l2_grib.py
@@ -21,23 +21,22 @@
References:
FM 92 GRIB Edition 2
https://www.wmo.int/pages/prog/www/WMOCodes/Guides/GRIB/GRIB2_062006.pdf
-
EUMETSAT Product Navigator
https://navigator.eumetsat.int/
-
"""
+
import logging
+from datetime import timedelta
+
import dask.array as da
import numpy as np
import xarray as xr
-from datetime import timedelta
from satpy import CHUNK_SIZE
-from satpy.readers._geos_area import get_area_definition
+from satpy.readers._geos_area import get_area_definition, get_geos_area_naming
+from satpy.readers.eum_base import get_service_mode
from satpy.readers.file_handlers import BaseFileHandler
-from satpy.readers.seviri_base import (calculate_area_extent,
- PLATFORM_DICT,
- REPEAT_CYCLE_DURATION)
+from satpy.readers.seviri_base import PLATFORM_DICT, REPEAT_CYCLE_DURATION, calculate_area_extent
try:
import eccodes as ec
@@ -45,7 +44,6 @@
raise ImportError(
"Missing eccodes-python and/or eccodes C-library installation. Use conda to install eccodes")
-
logger = logging.getLogger(__name__)
@@ -57,48 +55,6 @@ def __init__(self, filename, filename_info, filetype_info):
super().__init__(filename, filename_info, filetype_info)
# Turn on support for multiple fields in single GRIB messages (required for SEVIRI L2 files)
ec.codes_grib_multi_support_on()
- self._read_global_attributes()
-
- def _read_global_attributes(self):
- """Read the global product attributes from the first message.
-
- Read the information about the date and time of the data product,
- the projection and area definition and the number of messages.
-
- """
- with open(self.filename, 'rb') as fh:
- gid = ec.codes_grib_new_from_file(fh)
-
- if gid is None:
- # Could not obtain a valid message id: set attributes to None, number of messages to 0
- logger.warning("Could not obtain a valid message id in GRIB file")
-
- self._ssp_lon = None
- self._nrows = None
- self._ncols = None
- self._pdict, self._area_dict = None, None
-
- return
-
- # Read SSP and date/time
- self._ssp_lon = self._get_from_msg(gid, 'longitudeOfSubSatellitePointInDegrees')
-
- # Read number of points on the x and y axes
- self._nrows = self._get_from_msg(gid, 'Ny')
- self._ncols = self._get_from_msg(gid, 'Nx')
-
- # Creates the projection and area dictionaries
- self._pdict, self._area_dict = self._get_proj_area(gid)
-
- # Determine the number of messages in the product by iterating until an invalid id is obtained
- i = 1
- ec.codes_release(gid)
- while True:
- gid = ec.codes_grib_new_from_file(fh)
- if gid is None:
- break
- ec.codes_release(gid)
- i = i+1
@property
def start_time(self):
@@ -112,10 +68,10 @@ def end_time(self):
def get_area_def(self, dataset_id):
"""Return the area definition for a dataset."""
- # The area extension depends on the resolution of the dataset
- area_dict = self._area_dict.copy()
- area_dict['resolution'] = dataset_id.resolution
- area_extent = calculate_area_extent(area_dict)
+ self._area_dict['column_step'] = dataset_id.resolution
+ self._area_dict['line_step'] = dataset_id.resolution
+
+ area_extent = calculate_area_extent(self._area_dict)
# Call the get_area_definition function to obtain the area
area_def = get_area_definition(self._pdict, area_extent)
@@ -123,54 +79,80 @@ def get_area_def(self, dataset_id):
return area_def
def get_dataset(self, dataset_id, dataset_info):
- """Get dataset using the parameter_number key in dataset_info."""
+ """Get dataset using the parameter_number key in dataset_info.
+
+ In a previous version of the reader, the attributes (nrows, ncols, ssp_lon) and projection information
+ (pdict and area_dict) were computed while initializing the file handler. Also the code would break out from
+ the While-loop below as soon as the correct parameter_number was found. This has now been revised becasue the
+ reader would sometimes give corrupt information about the number of messages in the file and the dataset
+ dimensions within a given message if the file was only partly read (not looping over all messages) in an earlier
+ instance.
+ """
logger.debug('Reading in file to get dataset with parameter number %d.',
dataset_info['parameter_number'])
xarr = None
-
+ message_found = False
with open(self.filename, 'rb') as fh:
- # Iterate until a message containing the correct parameter number is found
+
+ # Iterate over all messages and fetch data when the correct parameter number is found
while True:
gid = ec.codes_grib_new_from_file(fh)
if gid is None:
- # Could not obtain a valid message ID, break out of the loop
- logger.warning("Could not find parameter_number %d in GRIB file, no valid Dataset created",
- dataset_info['parameter_number'])
+ if not message_found:
+ # Could not obtain a valid message ID from the grib file
+ logger.warning("Could not find parameter_number %d in GRIB file, no valid Dataset created",
+ dataset_info['parameter_number'])
break
# Check if the parameter number in the GRIB message corresponds to the required key
parameter_number = self._get_from_msg(gid, 'parameterNumber')
- if parameter_number != dataset_info['parameter_number']:
- # The parameter number is not the correct one, skip to next message
- ec.codes_release(gid)
- continue
+ if parameter_number == dataset_info['parameter_number']:
- # Read the missing value
- missing_value = self._get_from_msg(gid, 'missingValue')
+ self._res = dataset_id.resolution
+ self._read_attributes(gid)
- # Retrieve values and metadata from the GRIB message, masking the values equal to missing_value
- xarr = self._get_xarray_from_msg(gid)
+ # Read the missing value
+ missing_value = self._get_from_msg(gid, 'missingValue')
- xarr.data = da.where(xarr.data == missing_value, np.nan, xarr.data)
+ # Retrieve values and metadata from the GRIB message, masking the values equal to missing_value
+ xarr = self._get_xarray_from_msg(gid)
- ec.codes_release(gid)
+ xarr.data = da.where(xarr.data == missing_value, np.nan, xarr.data)
+
+ ec.codes_release(gid)
- # Combine all metadata into the dataset attributes and break out of the loop
- xarr.attrs.update(dataset_info)
- xarr.attrs.update(self._get_global_attributes())
- break
+ # Combine all metadata into the dataset attributes and break out of the loop
+ xarr.attrs.update(dataset_info)
+ xarr.attrs.update(self._get_attributes())
+
+ message_found = True
+
+ else:
+ # The parameter number is not the correct one, release gid and skip to next message
+ ec.codes_release(gid)
return xarr
+ def _read_attributes(self, gid):
+ """Read the parameter attributes from the message and create the projection and area dictionaries."""
+ # Read SSP and date/time
+ self._ssp_lon = self._get_from_msg(gid, 'longitudeOfSubSatellitePointInDegrees')
+
+ # Read number of points on the x and y axes
+ self._nrows = self._get_from_msg(gid, 'Ny')
+ self._ncols = self._get_from_msg(gid, 'Nx')
+
+ # Creates the projection and area dictionaries
+ self._pdict, self._area_dict = self._get_proj_area(gid)
+
def _get_proj_area(self, gid):
"""Compute the dictionary with the projection and area definition from a GRIB message.
Args:
gid: The ID of the GRIB message.
-
Returns:
tuple: A tuple of two dictionaries for the projection and the area definition.
pdict:
@@ -189,14 +171,26 @@ def _get_proj_area(self, gid):
east: coodinate of the east limit
west: coodinate of the west limit
south: coodinate of the south limit
-
"""
+ # Get name of area definition
+ area_naming_input_dict = {'platform_name': 'msg',
+ 'instrument_name': 'seviri',
+ 'resolution': self._res,
+ }
+
+ area_naming = get_geos_area_naming({**area_naming_input_dict,
+ **get_service_mode('seviri', self._ssp_lon)})
+
# Read all projection and area parameters from the message
- earth_major_axis_in_meters = self._get_from_msg(gid, 'earthMajorAxis') * 1000.0 # [m]
- earth_minor_axis_in_meters = self._get_from_msg(gid, 'earthMinorAxis') * 1000.0 # [m]
+ earth_major_axis_in_meters = self._get_from_msg(gid, 'earthMajorAxis') * 1000.0 # [m]
+ earth_minor_axis_in_meters = self._get_from_msg(gid, 'earthMinorAxis') * 1000.0 # [m]
+
+ earth_major_axis_in_meters = self._scale_earth_axis(earth_major_axis_in_meters)
+ earth_minor_axis_in_meters = self._scale_earth_axis(earth_minor_axis_in_meters)
+
nr_in_radius_of_earth = self._get_from_msg(gid, 'NrInRadiusOfEarth')
xp_in_grid_lengths = self._get_from_msg(gid, 'XpInGridLengths')
- h_in_meters = earth_major_axis_in_meters * (nr_in_radius_of_earth - 1.0) # [m]
+ h_in_meters = earth_major_axis_in_meters * (nr_in_radius_of_earth - 1.0) # [m]
# Create the dictionary with the projection data
pdict = {
@@ -206,14 +200,14 @@ def _get_proj_area(self, gid):
'ssp_lon': self._ssp_lon,
'nlines': self._ncols,
'ncols': self._nrows,
- 'a_name': 'geos_seviri',
- 'a_desc': 'Calculated area for SEVIRI L2 GRIB product',
- 'p_id': 'geos',
+ 'a_name': area_naming['area_id'],
+ 'a_desc': area_naming['description'],
+ 'p_id': "",
}
# Compute the dictionary with the area extension
area_dict = {
- 'center_point': xp_in_grid_lengths + 0.5,
+ 'center_point': xp_in_grid_lengths,
'north': self._nrows,
'east': 1,
'west': self._ncols,
@@ -222,15 +216,26 @@ def _get_proj_area(self, gid):
return pdict, area_dict
+ @staticmethod
+ def _scale_earth_axis(data):
+ """Scale Earth axis data to make sure the value matched the expected unit [m].
+
+ The earthMinorAxis value stored in the aerosol over sea product is scaled incorrectly by a factor of 1e8. This
+ method provides a flexible temporarily workaraound by making sure that all earth axis values are scaled such
+ that they are on the order of millions of meters as expected by the reader. As soon as the scaling issue has
+ been resolved by EUMETSAT this workaround can be removed.
+
+ """
+ scale_factor = 10 ** np.ceil(np.log10(1e6/data))
+ return data * scale_factor
+
def _get_xarray_from_msg(self, gid):
"""Read the values from the GRIB message and return a DataArray object.
Args:
gid: The ID of the GRIB message.
-
Returns:
DataArray: The array containing the retrieved values.
-
"""
# Data from GRIB message are read into an Xarray...
xarr = xr.DataArray(da.from_array(ec.codes_get_values(
@@ -238,15 +243,14 @@ def _get_xarray_from_msg(self, gid):
return xarr
- def _get_global_attributes(self):
- """Create a dictionary of global attributes to be added to all datasets.
+ def _get_attributes(self):
+ """Create a dictionary of attributes to be added to the dataset.
Returns:
- dict: A dictionary of global attributes.
+ dict: A dictionary of parameter attributes.
ssp_lon: longitude of subsatellite point
sensor: name of sensor
platform_name: name of the platform
-
"""
orbital_parameters = {
'projection_longitude': self._ssp_lon
@@ -259,16 +263,15 @@ def _get_global_attributes(self):
}
return attributes
- def _get_from_msg(self, gid, key):
+ @staticmethod
+ def _get_from_msg(gid, key):
"""Get a value from the GRIB message based on the key, return None if missing.
Args:
gid: The ID of the GRIB message.
key: The key of the required attribute.
-
Returns:
The retrieved attribute or None if the key is missing.
-
"""
try:
attr = ec.codes_get(gid, key)
diff --git a/satpy/readers/slstr_l1b.py b/satpy/readers/slstr_l1b.py
index 525db2cb59..507d4448a1 100644
--- a/satpy/readers/slstr_l1b.py
+++ b/satpy/readers/slstr_l1b.py
@@ -17,19 +17,18 @@
# satpy. If not, see .
"""SLSTR L1b reader."""
-import warnings
import logging
import os
import re
-
+import warnings
from datetime import datetime
+import dask.array as da
import numpy as np
import xarray as xr
-import dask.array as da
-from satpy.readers.file_handlers import BaseFileHandler
from satpy import CHUNK_SIZE
+from satpy.readers.file_handlers import BaseFileHandler
logger = logging.getLogger(__name__)
@@ -37,23 +36,23 @@
'S3B': 'Sentinel-3B'}
# These are the default channel adjustment factors.
-# Defined in the product notice: S3.PN-SLSTR-L1.06
-# https://www.eumetsat.int/website/wcm/idc/idcplg?IdcService=GET_FILE&dDocName=PDF_S3A_PN_SLSTR_L1_06&RevisionSelectionMethod=LatestReleased&Rendition=Web
-CHANCALIB_FACTORS = {'S1_nadir': 1.0,
- 'S2_nadir': 1.0,
- 'S3_nadir': 1.0,
+# Defined in the product notice: S3.PN-SLSTR-L1.08
+# https://sentinel.esa.int/documents/247904/2731673/Sentinel-3A-and-3B-SLSTR-Product-Notice-Level-1B-SL-1-RBT-at-NRT-and-NTC.pdf
+CHANCALIB_FACTORS = {'S1_nadir': 0.97,
+ 'S2_nadir': 0.98,
+ 'S3_nadir': 0.98,
'S4_nadir': 1.0,
- 'S5_nadir': 1.12,
- 'S6_nadir': 1.2,
+ 'S5_nadir': 1.11,
+ 'S6_nadir': 1.13,
'S7_nadir': 1.0,
'S8_nadir': 1.0,
'S9_nadir': 1.0,
- 'S1_oblique': 1.0,
- 'S2_oblique': 1.0,
- 'S3_oblique': 1.0,
+ 'S1_oblique': 0.94,
+ 'S2_oblique': 0.95,
+ 'S3_oblique': 0.95,
'S4_oblique': 1.0,
- 'S5_oblique': 1.15,
- 'S6_oblique': 1.26,
+ 'S5_oblique': 1.04,
+ 'S6_oblique': 1.07,
'S7_oblique': 1.0,
'S8_oblique': 1.0,
'S9_oblique': 1.0, }
@@ -108,7 +107,7 @@ class NCSLSTR1B(BaseFileHandler):
By default, the calibration factors recommended by EUMETSAT are applied.
This is required as the SLSTR VIS channels are producing slightly incorrect
radiances that require adjustment.
- Satpy uses the radiance corrections in S3.PN-SLSTR-L1.06, checked 26/10/2020.
+ Satpy uses the radiance corrections in S3.PN-SLSTR-L1.08, checked 11/03/2022.
User-supplied coefficients can be passed via the `user_calibration` kwarg
This should be a dict of channel names (such as `S1_nadir`, `S8_oblique`).
@@ -231,6 +230,15 @@ def end_time(self):
class NCSLSTRAngles(BaseFileHandler):
"""Filehandler for angles."""
+ def _loadcart(self, fname):
+ """Load a cartesian file of appropriate type."""
+ cartf = xr.open_dataset(fname,
+ decode_cf=True,
+ mask_and_scale=True,
+ chunks={'columns': CHUNK_SIZE,
+ 'rows': CHUNK_SIZE})
+ return cartf
+
def __init__(self, filename, filename_info, filetype_info):
"""Initialize the angles reader."""
super(NCSLSTRAngles, self).__init__(filename, filename_info,
@@ -245,29 +253,23 @@ def __init__(self, filename, filename_info, filetype_info):
# TODO: get metadata from the manifest file (xfdumanifest.xml)
self.platform_name = PLATFORM_NAMES[filename_info['mission_id']]
self.sensor = 'slstr'
-
self.view = filename_info['view']
self._start_time = filename_info['start_time']
self._end_time = filename_info['end_time']
- cart_file = os.path.join(
+ carta_file = os.path.join(
+ os.path.dirname(self.filename), 'cartesian_a{}.nc'.format(self.view[0]))
+ carti_file = os.path.join(
os.path.dirname(self.filename), 'cartesian_i{}.nc'.format(self.view[0]))
- self.cart = xr.open_dataset(cart_file,
- decode_cf=True,
- mask_and_scale=True,
- chunks={'columns': CHUNK_SIZE,
- 'rows': CHUNK_SIZE})
cartx_file = os.path.join(
os.path.dirname(self.filename), 'cartesian_tx.nc')
- self.cartx = xr.open_dataset(cartx_file,
- decode_cf=True,
- mask_and_scale=True,
- chunks={'columns': CHUNK_SIZE,
- 'rows': CHUNK_SIZE})
+ self.carta = self._loadcart(carta_file)
+ self.carti = self._loadcart(carti_file)
+ self.cartx = self._loadcart(cartx_file)
def get_dataset(self, key, info):
"""Load a dataset."""
- if not info['view'].name.startswith(self.view):
+ if not key['view'].name.startswith(self.view[0]):
return
logger.debug('Reading %s.', key['name'])
# Check if file_key is specified in the yaml
@@ -283,15 +285,19 @@ def get_dataset(self, key, info):
if c_step != 1 or l_step != 1:
logger.debug('Interpolating %s.', key['name'])
-
# TODO: do it in cartesian coordinates ! pbs at date line and
# possible
tie_x = self.cartx['x_tx'].data[0, :][::-1]
tie_y = self.cartx['y_tx'].data[:, 0]
- full_x = self.cart['x_i' + self.view[0]].data
- full_y = self.cart['y_i' + self.view[0]].data
+ if key.get('resolution', 1000) == 500:
+ full_x = self.carta['x_a' + self.view[0]].data
+ full_y = self.carta['y_a' + self.view[0]].data
+ else:
+ full_x = self.carti['x_i' + self.view[0]].data
+ full_y = self.carti['y_i' + self.view[0]].data
variable = variable.fillna(0)
+ variable.attrs['resolution'] = key.get('resolution', 1000)
from scipy.interpolate import RectBivariateSpline
spl = RectBivariateSpline(
diff --git a/satpy/readers/slstr_l2.py b/satpy/readers/slstr_l2.py
deleted file mode 100644
index 7531834be8..0000000000
--- a/satpy/readers/slstr_l2.py
+++ /dev/null
@@ -1,75 +0,0 @@
-# -*- coding: utf-8 -*-
-# Copyright (c) 2017 Satpy developers
-#
-# This file is part of satpy.
-#
-# satpy is free software: you can redistribute it and/or modify it under the
-# terms of the GNU General Public License as published by the Free Software
-# Foundation, either version 3 of the License, or (at your option) any later
-# version.
-#
-# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
-# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
-# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along with
-# satpy. If not, see .
-"""Reader for Sentinel-3 SLSTR SST data."""
-
-from datetime import datetime
-from satpy.readers.file_handlers import BaseFileHandler
-from satpy import CHUNK_SIZE
-import xarray as xr
-
-
-class SLSTRL2FileHandler(BaseFileHandler):
- """File handler for Sentinel-3 SSL L2 netCDF files."""
-
- def __init__(self, filename, filename_info, filetype_info, engine=None):
- """Initialize the file handler for Sentinel-3 SSL L2 netCDF data."""
- super(SLSTRL2FileHandler, self).__init__(filename, filename_info, filetype_info)
-
- if filename.endswith('tar'):
- import tarfile
- import os
- import tempfile
- with tempfile.TemporaryDirectory() as tempdir:
- with tarfile.open(name=filename, mode='r') as tf:
- sst_filename = next((name for name in tf.getnames()
- if name.endswith('nc') and 'GHRSST-SSTskin' in name))
- tf.extract(sst_filename, tempdir)
- fullpath = os.path.join(tempdir, sst_filename)
- self.nc = xr.open_dataset(fullpath,
- decode_cf=True,
- mask_and_scale=True,
- engine=engine,
- chunks={'ni': CHUNK_SIZE,
- 'nj': CHUNK_SIZE})
- else:
- self.nc = xr.open_dataset(filename,
- decode_cf=True,
- mask_and_scale=True,
- engine=engine,
- chunks={'ni': CHUNK_SIZE,
- 'nj': CHUNK_SIZE})
-
- self.nc = self.nc.rename({'ni': 'x', 'nj': 'y'})
- self.filename_info['start_time'] = datetime.strptime(
- self.nc.start_time, '%Y%m%dT%H%M%SZ')
- self.filename_info['end_time'] = datetime.strptime(
- self.nc.stop_time, '%Y%m%dT%H%M%SZ')
-
- def get_dataset(self, key, info):
- """Get any available dataset."""
- stdname = info.get('standard_name')
- return self.nc[stdname].squeeze()
-
- @property
- def start_time(self):
- """Get start time."""
- return self.filename_info['start_time']
-
- @property
- def end_time(self):
- """Get end time."""
- return self.filename_info['end_time']
diff --git a/satpy/readers/smos_l2_wind.py b/satpy/readers/smos_l2_wind.py
index 9e1cc8ea99..116ac39756 100644
--- a/satpy/readers/smos_l2_wind.py
+++ b/satpy/readers/smos_l2_wind.py
@@ -25,9 +25,11 @@
"""
import logging
-import numpy as np
from datetime import datetime
+
+import numpy as np
from pyresample.geometry import AreaDefinition
+
from satpy.readers.netcdf_utils import NetCDF4FileHandler, netCDF4
logger = logging.getLogger(__name__)
diff --git a/satpy/readers/tropomi_l2.py b/satpy/readers/tropomi_l2.py
index e632c18af3..9f0b46500a 100644
--- a/satpy/readers/tropomi_l2.py
+++ b/satpy/readers/tropomi_l2.py
@@ -29,12 +29,14 @@
"""
-from satpy.readers.netcdf_utils import NetCDF4FileHandler, netCDF4
import logging
+
+import dask.array as da
import numpy as np
import xarray as xr
-import dask.array as da
+
from satpy import CHUNK_SIZE
+from satpy.readers.netcdf_utils import NetCDF4FileHandler, netCDF4
logger = logging.getLogger(__name__)
@@ -113,21 +115,25 @@ def available_datasets(self, configured_datasets=None):
# then we should keep it going down the chain
yield is_avail, ds_info
- # This is where we dynamically add new datasets
- # We will sift through all groups and variables, looking for data matching
- # the geolocation bounds
+ yield from self._iterate_over_dataset_contents(handled_variables, lat_shape)
- # Iterate over dataset contents
+ def _iterate_over_dataset_contents(self, handled_variables, shape):
+ """Iterate over dataset contents.
+
+ This is where we dynamically add new datasets
+ We will sift through all groups and variables, looking for data matching
+ the geolocation bounds
+ """
for var_name, val in self.file_content.items():
# Only evaluate variables
if isinstance(val, netCDF4.Variable):
logger.debug("Evaluating new variable: %s", var_name)
var_shape = self[var_name + "/shape"]
logger.debug("Dims:{}".format(var_shape))
- if (lat_shape == var_shape[:len(lat_shape)]):
+ if shape == var_shape[:len(shape)]:
logger.debug("Found valid additional dataset: %s", var_name)
# Skip anything we have already configured
- if (var_name in handled_variables):
+ if var_name in handled_variables:
logger.debug("Already handled, skipping: %s", var_name)
continue
handled_variables.add(var_name)
diff --git a/satpy/readers/utils.py b/satpy/readers/utils.py
index 9218656ee5..289d73b382 100644
--- a/satpy/readers/utils.py
+++ b/satpy/readers/utils.py
@@ -23,21 +23,17 @@
import shutil
import tempfile
import warnings
-from contextlib import closing
+from contextlib import closing, contextmanager
from io import BytesIO
-from subprocess import Popen, PIPE
+from shutil import which
+from subprocess import PIPE, Popen # nosec
import numpy as np
import pyproj
import xarray as xr
from pyresample.geometry import AreaDefinition
-from satpy import CHUNK_SIZE
-try:
- from shutil import which
-except ImportError:
- # python 2 - won't be used, but needed for mocking in tests
- which = None
+from satpy import CHUNK_SIZE
LOGGER = logging.getLogger(__name__)
@@ -108,12 +104,13 @@ def get_geostationary_angle_extent(geos_area):
return xmax, ymax
-def get_geostationary_mask(area):
+def get_geostationary_mask(area, chunks=None):
"""Compute a mask of the earth's shape as seen by a geostationary satellite.
Args:
area (pyresample.geometry.AreaDefinition) : Corresponding area
definition
+ chunks (int or tuple): Chunk size for the 2D array that is generated.
Returns:
Boolean mask, True inside the earth's shape, False outside.
@@ -126,7 +123,7 @@ def get_geostationary_mask(area):
ymax *= h
# Compute projection coordinates at the centre of each pixel
- x, y = area.get_proj_coords(chunks=CHUNK_SIZE)
+ x, y = area.get_proj_coords(chunks=chunks or CHUNK_SIZE)
# Compute mask of the earth's elliptical shape
return ((x / xmax) ** 2 + (y / ymax) ** 2) <= 1
@@ -201,10 +198,20 @@ def get_sub_area(area, xslice, yslice):
new_area_extent)
-def unzip_file(filename):
- """Unzip the file if file is bzipped = ending with 'bz2'."""
- if filename.endswith('bz2'):
- fdn, tmpfilepath = tempfile.mkstemp()
+def unzip_file(filename, prefix=None):
+ """Unzip the file ending with 'bz2'. Initially with pbzip2 if installed or bz2.
+
+ Args:
+ filename: The file to unzip.
+ prefix (str, optional): If file is one of many segments of data, prefix random filename
+ for correct sorting. This is normally the segment number.
+
+ Returns:
+ Temporary filename path for decompressed file or None.
+
+ """
+ if os.fspath(filename).endswith('bz2'):
+ fdn, tmpfilepath = tempfile.mkstemp(prefix=prefix)
LOGGER.info("Using temp file for BZ2 decompression: %s", tmpfilepath)
# try pbzip2
pbzip = which('pbzip2')
@@ -220,7 +227,7 @@ def unzip_file(filename):
runner = [pbzip,
'-dc',
filename]
- p = Popen(runner, stdout=PIPE, stderr=PIPE)
+ p = Popen(runner, stdout=PIPE, stderr=PIPE) # nosec
stdout = BytesIO(p.communicate()[0])
status = p.returncode
if status != 0:
@@ -255,6 +262,43 @@ def unzip_file(filename):
return None
+@contextmanager
+def unzip_context(filename):
+ """Context manager for decompressing a .bz2 file on the fly.
+
+ Uses `unzip_file`. Removes the uncompressed file on exit of the context manager.
+
+ Returns: the filename of the uncompressed file or of the original file if it was not
+ compressed.
+
+ """
+ unzipped = unzip_file(filename)
+ if unzipped is not None:
+ yield unzipped
+ os.remove(unzipped)
+ else:
+ yield filename
+
+
+@contextmanager
+def generic_open(filename, *args, **kwargs):
+ """Context manager for opening either a regular file or a bzip2 file.
+
+ Returns a file-like object.
+ """
+ if os.fspath(filename).endswith('.bz2'):
+ fp = bz2.open(filename, *args, **kwargs)
+ else:
+ try:
+ fp = filename.open(*args, **kwargs)
+ except AttributeError:
+ fp = open(filename, *args, **kwargs)
+
+ yield fp
+
+ fp.close()
+
+
def bbox(img):
"""Find the bounding box around nonzero elements in the given array.
diff --git a/satpy/readers/vaisala_gld360.py b/satpy/readers/vaisala_gld360.py
index 145c655c33..3e3e8dac9b 100644
--- a/satpy/readers/vaisala_gld360.py
+++ b/satpy/readers/vaisala_gld360.py
@@ -29,8 +29,9 @@
"""
import logging
-import pandas as pd
+
import dask.array as da
+import pandas as pd
import xarray as xr
from satpy import CHUNK_SIZE
diff --git a/satpy/readers/vii_base_nc.py b/satpy/readers/vii_base_nc.py
index 49d3825da8..e1f9f74e76 100644
--- a/satpy/readers/vii_base_nc.py
+++ b/satpy/readers/vii_base_nc.py
@@ -18,13 +18,14 @@
"""EUMETSAT EPS-SG Visible/Infrared Imager (VII) readers base class."""
-import logging
+import logging
from datetime import datetime
+from geotiepoints.viiinterpolator import tie_points_geo_interpolation, tie_points_interpolation
+
from satpy.readers.netcdf_utils import NetCDF4FileHandler
from satpy.readers.vii_utils import SCAN_ALT_TIE_POINTS, TIE_POINTS_FACTOR
-from geotiepoints.viiinterpolator import tie_points_interpolation, tie_points_geo_interpolation
logger = logging.getLogger(__name__)
@@ -101,11 +102,6 @@ def get_dataset(self, dataset_id, dataset_info):
if orthorect_data_name is not None:
variable = self._perform_orthorectification(variable, orthorect_data_name)
- # If the dataset contains a longitude, change it to the interval [0., 360.) as natively in the product
- # since the unwrapping performed during the interpolation might have created values outside this range
- if dataset_info.get('standard_name', None) == 'longitude':
- variable %= 360.
-
# Manage the attributes of the dataset
variable.attrs.setdefault('units', None)
diff --git a/satpy/readers/vii_l1b_nc.py b/satpy/readers/vii_l1b_nc.py
index 492b8730cf..2e66c3deb0 100644
--- a/satpy/readers/vii_l1b_nc.py
+++ b/satpy/readers/vii_l1b_nc.py
@@ -18,14 +18,16 @@
"""EUMETSAT EPS-SG Visible/Infrared Imager (VII) Level 1B products reader.
The ``vii_l1b_nc`` reader reads and calibrates EPS-SG VII L1b image data in netCDF format. The format is explained
-in the `EPS-SG VII Level 1B Product Format Specification`_.
+in the `EPS-SG VII Level 1B Product Format Specification V4A`_.
-.. _EPS-SG VII Level 1B Product Format Specification: https://www.eumetsat.int/website/wcm/idc/idcplg?
- IdcService=GET_FILE&dDocName=PDF_EPSSG_VII_L1B_PFS&RevisionSelectionMethod=LatestReleased&Rendition=Web
+This version is applicable for the vii test data V2 to be released in Jan 2022.
+
+.. _EPS-SG VII Level 1B Product Format Specification V4A: https://www.eumetsat.int/media/44393
"""
import logging
+
import numpy as np
from satpy.readers.vii_base_nc import ViiNCBaseFileHandler
@@ -69,19 +71,18 @@ def _perform_calibration(self, variable, dataset_info):
if calibration_name == 'brightness_temperature':
# Extract the values of calibration coefficients for the current channel
chan_index = dataset_info['chan_thermal_index']
- cw = self._channel_cw_thermal[chan_index] * 1e-3
+ cw = self._channel_cw_thermal[chan_index]
a = self._bt_conversion_a[chan_index]
b = self._bt_conversion_b[chan_index]
# Perform the calibration
calibrated_variable = self._calibrate_bt(variable, cw, a, b)
calibrated_variable.attrs = variable.attrs
elif calibration_name == 'reflectance':
- scale = 1/(dataset_info['wavelength'][2] - dataset_info['wavelength'][0])
# Extract the values of calibration coefficients for the current channel
chan_index = dataset_info['chan_solar_index']
- isi = scale * self._integrated_solar_irradiance[chan_index]
+ isi = self._integrated_solar_irradiance[chan_index]
# Perform the calibration
- calibrated_variable = self._calibrate_refl(variable, self.angle_factor, isi)
+ calibrated_variable = self._calibrate_refl(variable, self.angle_factor.data, isi)
calibrated_variable.attrs = variable.attrs
elif calibration_name == 'radiance':
calibrated_variable = variable
@@ -141,5 +142,5 @@ def _calibrate_refl(radiance, angle_factor, isi):
numpy ndarray: array containing the calibrated reflectance values.
"""
- refl_values = (np.pi / isi) * angle_factor * radiance
+ refl_values = (np.pi / isi) * angle_factor * radiance * 100.0
return refl_values
diff --git a/satpy/readers/viirs_compact.py b/satpy/readers/viirs_compact.py
index 43c99090d8..7988a07d53 100644
--- a/satpy/readers/viirs_compact.py
+++ b/satpy/readers/viirs_compact.py
@@ -25,7 +25,7 @@
For more information on this format, the reader can refer to the
`Compact VIIRS SDR Product Format User Guide` that can be found on this EARS_ page.
-.. _EARS: https://www.eumetsat.int/website/home/Data/RegionalDataServiceEARS/EARSVIIRS/index.html
+.. _EARS: https://www.eumetsat.int/media/45988
"""
@@ -37,6 +37,7 @@
import h5py
import numpy as np
import xarray as xr
+
from satpy import CHUNK_SIZE
from satpy.readers.file_handlers import BaseFileHandler
from satpy.readers.utils import np2str
diff --git a/satpy/readers/viirs_edr_active_fires.py b/satpy/readers/viirs_edr_active_fires.py
index 5ef58d681c..f1bcf4d3cc 100644
--- a/satpy/readers/viirs_edr_active_fires.py
+++ b/satpy/readers/viirs_edr_active_fires.py
@@ -21,11 +21,12 @@
ASCII files.
"""
-from satpy.readers.netcdf_utils import NetCDF4FileHandler
-from satpy.readers.file_handlers import BaseFileHandler
import dask.dataframe as dd
import xarray as xr
+from satpy.readers.file_handlers import BaseFileHandler
+from satpy.readers.netcdf_utils import NetCDF4FileHandler
+
# map platform attributes to Oscar standard name
PLATFORM_MAP = {
"NPP": "Suomi-NPP",
@@ -74,7 +75,7 @@ def get_dataset(self, dsid, dsinfo):
data.attrs['units'] = 'K'
data.attrs["platform_name"] = PLATFORM_MAP.get(self.filename_info['satellite_name'].upper(), "unknown")
- data.attrs["sensor"] = "VIIRS"
+ data.attrs["sensor"] = self.sensor_name
return data
@@ -91,12 +92,12 @@ def end_time(self):
@property
def sensor_name(self):
"""Name of sensor for this file."""
- return self["sensor"]
+ return self["/attr/instrument_name"].lower()
@property
def platform_name(self):
"""Name of platform/satellite for this file."""
- return self["platform_name"]
+ return self["/attr/satellite_name"]
class VIIRSActiveFiresTextFileHandler(BaseFileHandler):
diff --git a/satpy/readers/viirs_edr_flood.py b/satpy/readers/viirs_edr_flood.py
index baf655e743..2d9c319656 100644
--- a/satpy/readers/viirs_edr_flood.py
+++ b/satpy/readers/viirs_edr_flood.py
@@ -17,9 +17,10 @@
# satpy. If not, see .
"""Interface to VIIRS flood product."""
-from satpy.readers.hdf4_utils import HDF4FileHandler
-from pyresample import geometry
import numpy as np
+from pyresample import geometry
+
+from satpy.readers.hdf4_utils import HDF4FileHandler
class VIIRSEDRFlood(HDF4FileHandler):
diff --git a/satpy/readers/viirs_l1b.py b/satpy/readers/viirs_l1b.py
index 0d17f38b65..a265bb1f82 100644
--- a/satpy/readers/viirs_l1b.py
+++ b/satpy/readers/viirs_l1b.py
@@ -19,7 +19,9 @@
import logging
from datetime import datetime
+
import numpy as np
+
from satpy.readers.netcdf_utils import NetCDF4FileHandler
LOG = logging.getLogger(__name__)
@@ -68,11 +70,7 @@ def platform_name(self):
@property
def sensor_name(self):
"""Get sensor name."""
- res = self['/attr/instrument']
- if isinstance(res, np.ndarray):
- return str(res.astype(str))
- else:
- return res
+ return self['/attr/instrument'].lower()
def adjust_scaling_factors(self, factors, file_units, output_units):
"""Adjust scaling factors."""
@@ -98,7 +96,7 @@ def adjust_scaling_factors(self, factors, file_units, output_units):
def get_shape(self, ds_id, ds_info):
"""Get shape."""
- var_path = ds_info.get('file_key', 'observation_data/{}'.format(ds_id['name']))
+ var_path = self._dataset_name_to_var_path(ds_id['name'], ds_info)
return self.get(var_path + '/shape', 1)
@property
@@ -172,12 +170,12 @@ def _get_dataset_valid_range(self, dataset_id, ds_info, var_path):
def get_metadata(self, dataset_id, ds_info):
"""Get metadata."""
- var_path = ds_info.get('file_key', 'observation_data/{}'.format(dataset_id['name']))
+ var_path = self._dataset_name_to_var_path(dataset_id['name'], ds_info)
shape = self.get_shape(dataset_id, ds_info)
file_units = self._get_dataset_file_units(dataset_id, ds_info, var_path)
# Get extra metadata
- if '/dimension/number_of_scans' in self:
+ if self._is_scan_based_array(shape):
rows_per_scan = int(shape[0] / self['/dimension/number_of_scans'])
ds_info.setdefault('rows_per_scan', rows_per_scan)
@@ -196,9 +194,12 @@ def get_metadata(self, dataset_id, ds_info):
i.update(dataset_id.to_dict())
return i
+ def _is_scan_based_array(self, shape):
+ return '/dimension/number_of_scans' in self and isinstance(shape, tuple) and shape
+
def get_dataset(self, dataset_id, ds_info):
"""Get dataset."""
- var_path = ds_info.get('file_key', 'observation_data/{}'.format(dataset_id['name']))
+ var_path = self._dataset_name_to_var_path(dataset_id['name'], ds_info)
metadata = self.get_metadata(dataset_id, ds_info)
valid_min, valid_max, scale_factor, scale_offset = self._get_dataset_valid_range(dataset_id, ds_info, var_path)
@@ -238,3 +239,27 @@ def get_dataset(self, dataset_id, ds_info):
if 'number_of_lines' in data.dims:
data = data.rename({'number_of_lines': 'y', 'number_of_pixels': 'x'})
return data
+
+ def available_datasets(self, configured_datasets=None):
+ """Generate dataset info and their availablity.
+
+ See
+ :meth:`satpy.readers.file_handlers.BaseFileHandler.available_datasets`
+ for details.
+
+ """
+ for is_avail, ds_info in (configured_datasets or []):
+ if is_avail is not None:
+ # some other file handler said it has this dataset
+ # we don't know any more information than the previous
+ # file handler so let's yield early
+ yield is_avail, ds_info
+ continue
+ ft_matches = self.file_type_matches(ds_info['file_type'])
+ var_path = self._dataset_name_to_var_path(ds_info['name'], ds_info)
+ is_in_file = var_path in self
+ yield ft_matches and is_in_file, ds_info
+
+ @staticmethod
+ def _dataset_name_to_var_path(dataset_name: str, ds_info: dict) -> str:
+ return ds_info.get('file_key', 'observation_data/{}'.format(dataset_name))
diff --git a/satpy/readers/viirs_sdr.py b/satpy/readers/viirs_sdr.py
index f932994eca..a8fd98954e 100644
--- a/satpy/readers/viirs_sdr.py
+++ b/satpy/readers/viirs_sdr.py
@@ -29,12 +29,13 @@
"""
import logging
+import os.path
+from contextlib import suppress
from datetime import datetime, timedelta
from glob import glob
-import os.path
-import numpy as np
import dask.array as da
+import numpy as np
import xarray as xr
from satpy.readers.hdf5_utils import HDF5FileHandler
@@ -219,18 +220,33 @@ def get_file_units(self, dataset_id, ds_info):
LOG.debug("Unknown units for file key '%s'", dataset_id)
return file_units
- def scale_swath_data(self, data, scaling_factors):
+ def scale_swath_data(self, data, scaling_factors, dataset_group):
"""Scale swath data using scaling factors and offsets.
Multi-granule (a.k.a. aggregated) files will have more than the usual two values.
"""
- num_grans = len(scaling_factors) // 2
- gran_size = data.shape[0] // num_grans
- factors = scaling_factors.where(scaling_factors > -999, np.float32(np.nan))
- factors = factors.data.reshape((-1, 2))
- factors = xr.DataArray(da.repeat(factors, gran_size, axis=0),
- dims=(data.dims[0], 'factors'))
- data = data * factors[:, 0] + factors[:, 1]
+ rows_per_gran = self._get_rows_per_granule(dataset_group)
+ factors = self._mask_and_reshape_factors(scaling_factors)
+ data = self._map_and_apply_factors(data, factors, rows_per_gran)
+ return data
+
+ @staticmethod
+ def _mask_and_reshape_factors(factors):
+ factors = factors.where(factors > -999, np.float32(np.nan))
+ return factors.data.reshape((-1, 2)).rechunk((1, 2)) # make it so map_blocks happens per factor
+
+ @staticmethod
+ def _map_and_apply_factors(data, factors, rows_per_gran):
+ # The user may have requested a different chunking scheme, but we need
+ # per granule chunking right now so factor chunks map 1:1 to data chunks
+ old_chunks = data.chunks
+ dask_data = data.data.rechunk((tuple(rows_per_gran), data.data.chunks[1]))
+ dask_data = da.map_blocks(_apply_factors, dask_data, factors,
+ chunks=dask_data.chunks, dtype=data.dtype,
+ meta=np.array([[]], dtype=data.dtype))
+ data = xr.DataArray(dask_data.rechunk(old_chunks),
+ dims=data.dims, coords=data.coords,
+ attrs=data.attrs)
return data
@staticmethod
@@ -282,7 +298,7 @@ def _generate_file_key(self, ds_id, ds_info, factors=False):
if ds_id['name'] in ['dnb_longitude', 'dnb_latitude']:
if self.use_tc is True:
return var_path + '_TC'
- elif self.use_tc is None and var_path + '_TC' in self.file_content:
+ if self.use_tc is None and var_path + '_TC' in self.file_content:
return var_path + '_TC'
return var_path
@@ -308,13 +324,7 @@ def _scan_size(self, dataset_group_name):
def concatenate_dataset(self, dataset_group, var_path):
"""Concatenate dataset."""
scan_size = self._scan_size(dataset_group)
- number_of_granules_path = 'Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/AggregateNumberGranules'
- nb_granules_path = number_of_granules_path.format(dataset_group=DATASET_KEYS[dataset_group])
- scans = []
- for granule in range(self[nb_granules_path]):
- scans_path = 'Data_Products/{dataset_group}/{dataset_group}_Gran_{granule}/attr/N_Number_Of_Scans'
- scans_path = scans_path.format(dataset_group=DATASET_KEYS[dataset_group], granule=granule)
- scans.append(self[scans_path])
+ scans = self._get_scans_per_granule(dataset_group)
start_scan = 0
data_chunks = []
scans = xr.DataArray(scans)
@@ -328,6 +338,21 @@ def concatenate_dataset(self, dataset_group, var_path):
else:
return self.expand_single_values(variable, scans)
+ def _get_rows_per_granule(self, dataset_group):
+ scan_size = self._scan_size(dataset_group)
+ scans_per_gran = self._get_scans_per_granule(dataset_group)
+ return [scan_size * gran_scans for gran_scans in scans_per_gran]
+
+ def _get_scans_per_granule(self, dataset_group):
+ number_of_granules_path = 'Data_Products/{dataset_group}/{dataset_group}_Aggr/attr/AggregateNumberGranules'
+ nb_granules_path = number_of_granules_path.format(dataset_group=DATASET_KEYS[dataset_group])
+ scans = []
+ for granule in range(self[nb_granules_path]):
+ scans_path = 'Data_Products/{dataset_group}/{dataset_group}_Gran_{granule}/attr/N_Number_Of_Scans'
+ scans_path = scans_path.format(dataset_group=DATASET_KEYS[dataset_group], granule=granule)
+ scans.append(self[scans_path])
+ return scans
+
def mask_fill_values(self, data, ds_info):
"""Mask fill values."""
is_floating = np.issubdtype(data.dtype, np.floating)
@@ -353,9 +378,8 @@ def get_dataset(self, dataset_id, ds_info):
dataset_group = [ds_group for ds_group in ds_info['dataset_groups'] if ds_group in self.datasets]
if not dataset_group:
return
- else:
- dataset_group = dataset_group[0]
- ds_info['dataset_group'] = dataset_group
+ dataset_group = dataset_group[0]
+ ds_info['dataset_group'] = dataset_group
var_path = self._generate_file_key(dataset_id, ds_info)
factor_var_path = ds_info.get("factors_key", var_path + "Factors")
@@ -365,7 +389,7 @@ def get_dataset(self, dataset_id, ds_info):
output_units = ds_info.get("units", file_units)
factors = self._get_scaling_factors(file_units, output_units, factor_var_path)
if factors is not None:
- data = self.scale_swath_data(data, factors)
+ data = self.scale_swath_data(data, factors, dataset_group)
else:
LOG.debug("No scaling factors found for %s", dataset_id)
@@ -436,18 +460,22 @@ def available_datasets(self, configured_datasets=None):
yield is_avail, ds_info
-def split_desired_other(fhs, req_geo, rem_geo):
+def split_desired_other(fhs, prime_geo, second_geo):
"""Split the provided filehandlers *fhs* into desired filehandlers and others."""
desired = []
other = []
for fh in fhs:
- if req_geo in fh.datasets:
+ if prime_geo in fh.datasets:
desired.append(fh)
- elif rem_geo in fh.datasets:
+ elif second_geo in fh.datasets:
other.append(fh)
return desired, other
+def _apply_factors(data, factor_set):
+ return data * factor_set[0, 0] + factor_set[0, 1]
+
+
class VIIRSSDRReader(FileYAMLReader):
"""Custom file reader for finding VIIRS SDR geolocation at runtime."""
@@ -474,7 +502,7 @@ def filter_filenames_by_info(self, filename_items):
geo_keep = []
geo_del = []
for filename, filename_info in filename_items:
- filename_info['datasets'] = datasets = filename_info['datasets'].split('-')
+ datasets = filename_info['datasets'].split('-')
if ('GITCO' in datasets) or ('GMTCO' in datasets):
if self.use_tc is False:
geo_del.append(filename)
@@ -486,25 +514,28 @@ def filter_filenames_by_info(self, filename_items):
else:
geo_keep.append(filename)
if geo_keep:
- fdict = dict(filename_items)
- for to_del in geo_del:
- for dataset in ['GITCO', 'GMTCO', 'GIMGO', 'GMODO']:
- try:
- fdict[to_del]['datasets'].remove(dataset)
- except ValueError:
- pass
- if not fdict[to_del]['datasets']:
- del fdict[to_del]
- filename_items = fdict.items()
- for _filename, filename_info in filename_items:
- filename_info['datasets'] = '-'.join(filename_info['datasets'])
+ filename_items = self._remove_geo_datasets_from_files(filename_items, geo_del)
return super(VIIRSSDRReader, self).filter_filenames_by_info(filename_items)
- def _load_from_geo_ref(self, dsid):
+ def _remove_geo_datasets_from_files(self, filename_items, files_to_edit):
+ fdict = dict(filename_items)
+ for to_del in files_to_edit:
+ fdict[to_del]['datasets'] = fdict[to_del]['datasets'].split('-')
+ for dataset in ['GITCO', 'GMTCO', 'GIMGO', 'GMODO']:
+ with suppress(ValueError):
+ fdict[to_del]['datasets'].remove(dataset)
+ if not fdict[to_del]['datasets']:
+ del fdict[to_del]
+ else:
+ fdict[to_del]['datasets'] = "-".join(fdict[to_del]['datasets'])
+ filename_items = fdict.items()
+ return filename_items
+
+ def _load_filenames_from_geo_ref(self, dsid):
"""Load filenames from the N_GEO_Ref attribute of a dataset's file."""
file_handlers = self._get_file_handlers(dsid)
if not file_handlers:
- return None
+ return []
fns = []
for fh in file_handlers:
@@ -529,34 +560,34 @@ def _load_from_geo_ref(self, dsid):
return fns
- def _get_req_rem_geo(self, ds_info):
+ def _get_primary_secondary_geo_groups(self, ds_info):
"""Find out which geolocation files are needed."""
if ds_info['dataset_groups'][0].startswith('GM'):
if self.use_tc is False:
- req_geo = 'GMODO'
- rem_geo = 'GMTCO'
+ prime_geo = 'GMODO'
+ second_geo = 'GMTCO'
else:
- req_geo = 'GMTCO'
- rem_geo = 'GMODO'
+ prime_geo = 'GMTCO'
+ second_geo = 'GMODO'
elif ds_info['dataset_groups'][0].startswith('GI'):
if self.use_tc is False:
- req_geo = 'GIMGO'
- rem_geo = 'GITCO'
+ prime_geo = 'GIMGO'
+ second_geo = 'GITCO'
else:
- req_geo = 'GITCO'
- rem_geo = 'GIMGO'
+ prime_geo = 'GITCO'
+ second_geo = 'GIMGO'
else:
raise ValueError('Unknown dataset group %s' % ds_info['dataset_groups'][0])
- return req_geo, rem_geo
+ return prime_geo, second_geo
def get_right_geo_fhs(self, dsid, fhs):
"""Find the right geographical file handlers for given dataset ID *dsid*."""
ds_info = self.all_ids[dsid]
- req_geo, rem_geo = self._get_req_rem_geo(ds_info)
- desired, other = split_desired_other(fhs, req_geo, rem_geo)
+ prime_geo, second_geo = self._get_primary_secondary_geo_groups(ds_info)
+ desired, other = split_desired_other(fhs, prime_geo, second_geo)
if desired:
try:
- ds_info['dataset_groups'].remove(rem_geo)
+ ds_info['dataset_groups'].remove(second_geo)
except ValueError:
pass
return desired
@@ -573,7 +604,7 @@ def _get_file_handlers(self, dsid):
LOG.warning("Required file type '%s' not found or loaded for "
"'%s'", ds_info['file_type'], dsid['name'])
else:
- if len(set(ds_info['dataset_groups']) & set(['GITCO', 'GIMGO', 'GMTCO', 'GMODO'])) > 1:
+ if len(set(ds_info['dataset_groups']) & {'GITCO', 'GIMGO', 'GMTCO', 'GMODO'}) > 1:
fhs = self.get_right_geo_fhs(dsid, fhs)
return fhs
@@ -587,24 +618,33 @@ def _get_coordinates_for_dataset_key(self, dsid):
for c_id in coords:
c_info = self.all_ids[c_id] # c_info['dataset_groups'] should be a list of 2 elements
self._get_file_handlers(c_id)
- if len(c_info['dataset_groups']) == 1: # filtering already done
- continue
- try:
- req_geo, rem_geo = self._get_req_rem_geo(c_info)
- except ValueError: # DNB
+ prime_geo, second_geo = self._geo_dataset_groups(c_info)
+ if prime_geo is None:
continue
# check the dataset file for the geolocation filename
- geo_filenames = self._load_from_geo_ref(dsid)
- if not geo_filenames:
- c_info['dataset_groups'] = [rem_geo]
- else:
- # concatenate all values
- new_fhs = sum(self.create_filehandlers(geo_filenames).values(), [])
- desired, other = split_desired_other(new_fhs, req_geo, rem_geo)
- if desired:
- c_info['dataset_groups'].remove(rem_geo)
- else:
- c_info['dataset_groups'].remove(req_geo)
+ geo_filenames = self._load_filenames_from_geo_ref(dsid)
+ self._create_new_geo_file_handlers(geo_filenames)
+ self._remove_not_loaded_geo_dataset_group(c_info['dataset_groups'], prime_geo, second_geo)
return coords
+
+ def _geo_dataset_groups(self, c_info):
+ if len(c_info['dataset_groups']) == 1: # filtering already done
+ return None, None
+ try:
+ prime_geo, second_geo = self._get_primary_secondary_geo_groups(c_info)
+ return prime_geo, second_geo
+ except ValueError: # DNB
+ return None, None
+
+ def _create_new_geo_file_handlers(self, geo_filenames):
+ existing_filenames = set([fh.filename for fh in self.file_handlers['generic_file']])
+ geo_filenames = set(geo_filenames) - existing_filenames
+ self.create_filehandlers(geo_filenames)
+
+ def _remove_not_loaded_geo_dataset_group(self, c_dataset_groups, prime_geo, second_geo):
+ all_fhs = self.file_handlers['generic_file']
+ desired, other = split_desired_other(all_fhs, prime_geo, second_geo)
+ group_to_remove = second_geo if desired else prime_geo
+ c_dataset_groups.remove(group_to_remove)
diff --git a/satpy/readers/virr_l1b.py b/satpy/readers/virr_l1b.py
index a572d245b8..0ffe7251cb 100644
--- a/satpy/readers/virr_l1b.py
+++ b/satpy/readers/virr_l1b.py
@@ -40,12 +40,14 @@
"""
+import logging
from datetime import datetime
-from satpy.readers.hdf5_utils import HDF5FileHandler
-from pyspectral.blackbody import blackbody_wn_rad2temp as rad2temp
-import numpy as np
+
import dask.array as da
-import logging
+import numpy as np
+from pyspectral.blackbody import blackbody_wn_rad2temp as rad2temp
+
+from satpy.readers.hdf5_utils import HDF5FileHandler
LOG = logging.getLogger(__name__)
@@ -92,37 +94,25 @@ def get_dataset(self, dataset_id, ds_info):
file_key = file_key.replace('Data/', '')
data = self[file_key]
band_index = ds_info.get('band_index')
+ valid_range = data.attrs.pop('valid_range', None)
+ if isinstance(valid_range, np.ndarray):
+ valid_range = valid_range.tolist()
if band_index is not None:
data = data[band_index]
- data = data.where((data >= self[file_key + '/attr/valid_range'][0]) &
- (data <= self[file_key + '/attr/valid_range'][1]))
+ if valid_range:
+ data = data.where((data >= valid_range[0]) &
+ (data <= valid_range[1]))
if 'Emissive' in file_key:
- slope = self._correct_slope(self[self.l1b_prefix + 'Emissive_Radiance_Scales'].
- data[:, band_index][:, np.newaxis])
- intercept = self[self.l1b_prefix + 'Emissive_Radiance_Offsets'].data[:, band_index][:, np.newaxis]
- # Converts cm^-1 (wavenumbers) and (mW/m^2)/(str/cm^-1) (radiance data)
- # to SI units m^-1, mW*m^-3*str^-1.
- wave_number = self['/attr/' + self.wave_number][band_index] * 100
- bt_data = rad2temp(wave_number, (data.data * slope + intercept) * 1e-5)
- if isinstance(bt_data, np.ndarray):
- # old versions of pyspectral produce numpy arrays
- data.data = da.from_array(bt_data, chunks=data.data.chunks)
- else:
- # new versions of pyspectral can do dask arrays
- data.data = bt_data
+ self._calibrate_emissive(data, band_index)
elif 'RefSB' in file_key:
- if self.platform_id == 'FY3B':
- coeffs = da.from_array(FY3B_REF_COEFFS, chunks=-1)
- else:
- coeffs = self['/attr/RefSB_Cal_Coefficients']
- slope = self._correct_slope(coeffs[0::2])
- intercept = coeffs[1::2]
- data = data * slope[band_index] + intercept[band_index]
+ data = self._calibrate_reflective(data, band_index)
else:
slope = self._correct_slope(self[file_key + '/attr/Slope'])
intercept = self[file_key + '/attr/Intercept']
- data = data.where((data >= self[file_key + '/attr/valid_range'][0]) &
- (data <= self[file_key + '/attr/valid_range'][1]))
+
+ if valid_range:
+ data = data.where((data >= valid_range[0]) &
+ (data <= valid_range[1]))
data = data * slope + intercept
new_dims = {old: new for old, new in zip(data.dims, ('y', 'x'))}
data = data.rename(new_dims)
@@ -139,6 +129,31 @@ def get_dataset(self, dataset_id, ds_info):
data.attrs.update({'units': '1'})
return data
+ def _calibrate_reflective(self, data, band_index):
+ if self.platform_id == 'FY3B':
+ coeffs = da.from_array(FY3B_REF_COEFFS, chunks=-1)
+ else:
+ coeffs = self['/attr/RefSB_Cal_Coefficients']
+ slope = self._correct_slope(coeffs[0::2])
+ intercept = coeffs[1::2]
+ data = data * slope[band_index] + intercept[band_index]
+ return data
+
+ def _calibrate_emissive(self, data, band_index):
+ slope = self._correct_slope(self[self.l1b_prefix + 'Emissive_Radiance_Scales'].
+ data[:, band_index][:, np.newaxis])
+ intercept = self[self.l1b_prefix + 'Emissive_Radiance_Offsets'].data[:, band_index][:, np.newaxis]
+ # Converts cm^-1 (wavenumbers) and (mW/m^2)/(str/cm^-1) (radiance data)
+ # to SI units m^-1, mW*m^-3*str^-1.
+ wave_number = self['/attr/' + self.wave_number][band_index] * 100
+ bt_data = rad2temp(wave_number, (data.data * slope + intercept) * 1e-5)
+ if isinstance(bt_data, np.ndarray):
+ # old versions of pyspectral produce numpy arrays
+ data.data = da.from_array(bt_data, chunks=data.data.chunks)
+ else:
+ # new versions of pyspectral can do dask arrays
+ data.data = bt_data
+
def _correct_slope(self, slope):
# 0 slope is invalid. Note: slope can be a scalar or array.
return da.where(slope == 0, 1, slope)
diff --git a/satpy/readers/xmlformat.py b/satpy/readers/xmlformat.py
index 2e0f5bc733..0c46a3595e 100644
--- a/satpy/readers/xmlformat.py
+++ b/satpy/readers/xmlformat.py
@@ -17,11 +17,13 @@
# satpy. If not, see .
"""Reads a format from an xml file to create dtypes and scaling factor arrays."""
+from __future__ import annotations
+
from xml.etree.ElementTree import ElementTree
import numpy as np
-VARIABLES = {}
+VARIABLES: dict[str, str] = {}
TYPEC = {"boolean": ">i1",
"integer2": ">i2",
diff --git a/satpy/readers/yaml_reader.py b/satpy/readers/yaml_reader.py
index 33e291cdcb..ba1aeadbea 100644
--- a/satpy/readers/yaml_reader.py
+++ b/satpy/readers/yaml_reader.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
-# Copyright (c) 2016-2019 Satpy developers
+# Copyright (c) 2016-2022 Satpy developers
#
# This file is part of satpy.
#
@@ -23,30 +23,26 @@
import os
import warnings
from abc import ABCMeta, abstractmethod
-from collections import deque, OrderedDict
+from collections import OrderedDict, deque
+from contextlib import suppress
from fnmatch import fnmatch
+from functools import cached_property
from weakref import WeakValueDictionary
+import numpy as np
import xarray as xr
import yaml
-import numpy as np
-
-try:
- from yaml import UnsafeLoader
-except ImportError:
- from yaml import Loader as UnsafeLoader
-
-from pyresample.geometry import StackedAreaDefinition, SwathDefinition
from pyresample.boundary import AreaDefBoundary, Boundary
-from satpy.resample import get_area_def
-from satpy.utils import recursive_dict_update
-from satpy.dataset import DataQuery, DataID, get_key
-from satpy.dataset.dataid import get_keys_from_config, default_id_keys_config, default_co_keys_config
-from satpy.aux_download import DataDownloadMixin
-from satpy import DatasetDict
-from satpy.resample import add_crs_xy_coords
+from pyresample.geometry import AreaDefinition, StackedAreaDefinition, SwathDefinition
from trollsift.parser import globify, parse
-from pyresample.geometry import AreaDefinition
+from yaml import UnsafeLoader
+
+from satpy import DatasetDict
+from satpy.aux_download import DataDownloadMixin
+from satpy.dataset import DataID, DataQuery, get_key
+from satpy.dataset.dataid import default_co_keys_config, default_id_keys_config, get_keys_from_config
+from satpy.resample import add_crs_xy_coords, get_area_def
+from satpy.utils import recursive_dict_update
logger = logging.getLogger(__name__)
@@ -60,10 +56,9 @@ def listify_string(something):
"""
if isinstance(something, str):
return [something]
- elif something is not None:
+ if something is not None:
return list(something)
- else:
- return list()
+ return list()
def _get_filebase(path, pattern):
@@ -221,8 +216,7 @@ def supports_sensor(self, sensor):
if sensor and not (set(self.info.get("sensors")) &
set(listify_string(sensor))):
return False
- else:
- return True
+ return True
def select_files_from_directory(
self, directory=None, fs=None):
@@ -294,26 +288,8 @@ def load_ds_ids_from_config(self):
id_keys = get_keys_from_config(self._id_keys, dataset)
# Build each permutation/product of the dataset
- id_kwargs = []
- for key, idval in id_keys.items():
- val = dataset.get(key, idval.get('default') if idval is not None else None)
- val_type = None
- if idval is not None:
- val_type = idval.get('type')
- if val_type is not None and issubclass(val_type, tuple):
- # special case: wavelength can be [min, nominal, max]
- # but is still considered 1 option
- id_kwargs.append((val,))
- elif isinstance(val, (list, tuple, set)):
- # this key has multiple choices
- # (ex. 250 meter, 500 meter, 1000 meter resolutions)
- id_kwargs.append(val)
- elif isinstance(val, dict):
- id_kwargs.append(val.keys())
- else:
- # this key only has one choice so make it a one
- # item iterable
- id_kwargs.append((val,))
+ id_kwargs = self._build_id_permutations(dataset, id_keys)
+
for id_params in itertools.product(*id_kwargs):
dsid = DataID(id_keys, **dict(zip(id_keys, id_params)))
ids.append(dsid)
@@ -322,13 +298,40 @@ def load_ds_ids_from_config(self):
ds_info = dataset.copy()
for key in dsid.keys():
if isinstance(ds_info.get(key), dict):
- ds_info.update(ds_info[key][dsid.get(key)])
+ with suppress(KeyError):
+ # KeyError is suppressed in case the key does not represent interesting metadata,
+ # eg a custom type
+ ds_info.update(ds_info[key][dsid.get(key)])
# this is important for wavelength which was converted
# to a tuple
ds_info[key] = dsid.get(key)
self.all_ids[dsid] = ds_info
return ids
+ def _build_id_permutations(self, dataset, id_keys):
+ """Build each permutation/product of the dataset."""
+ id_kwargs = []
+ for key, idval in id_keys.items():
+ val = dataset.get(key, idval.get('default') if idval is not None else None)
+ val_type = None
+ if idval is not None:
+ val_type = idval.get('type')
+ if val_type is not None and issubclass(val_type, tuple):
+ # special case: wavelength can be [min, nominal, max]
+ # but is still considered 1 option
+ id_kwargs.append((val,))
+ elif isinstance(val, (list, tuple, set)):
+ # this key has multiple choices
+ # (ex. 250 meter, 500 meter, 1000 meter resolutions)
+ id_kwargs.append(val)
+ elif isinstance(val, dict):
+ id_kwargs.append(val.keys())
+ else:
+ # this key only has one choice so make it a one
+ # item iterable
+ id_kwargs.append((val,))
+ return id_kwargs
+
class FileYAMLReader(AbstractYAMLReader, DataDownloadMixin):
"""Primary reader base class that is configured by a YAML file.
@@ -342,6 +345,10 @@ class FileYAMLReader(AbstractYAMLReader, DataDownloadMixin):
"""
+ # WeakValueDictionary objects must be created at the class level or else
+ # dask will not be able to serialize them on a distributed environment
+ _coords_cache: WeakValueDictionary = WeakValueDictionary()
+
def __init__(self,
config_dict,
filter_parameters=None,
@@ -354,7 +361,6 @@ def __init__(self,
self.available_ids = {}
self.filter_filenames = self.info.get('filter_filenames', filter_filenames)
self.filter_parameters = filter_parameters or {}
- self.coords_cache = WeakValueDictionary()
self.register_data_files()
@property
@@ -715,6 +721,7 @@ def _load_dataset_data(self, file_handlers, dsid, **kwargs):
# Update the metadata
proj.attrs['start_time'] = file_handlers[0].start_time
proj.attrs['end_time'] = file_handlers[-1].end_time
+ proj.attrs['reader'] = self.name
return proj
def _preferred_filetype(self, filetypes):
@@ -735,34 +742,6 @@ def _load_area_def(self, dsid, file_handlers, **kwargs):
"""Load the area definition of *dsid*."""
return _load_area_def(dsid, file_handlers)
- def _get_coordinates_for_dataset_key(self, dsid):
- """Get the coordinate dataset keys for *dsid*."""
- ds_info = self.all_ids[dsid]
- cids = []
- for cinfo in ds_info.get('coordinates', []):
- if not isinstance(cinfo, dict):
- cinfo = {'name': cinfo}
-
- for key in self._co_keys:
- if key == 'name':
- continue
- if key in ds_info:
- if ds_info[key] is not None:
- cinfo[key] = ds_info[key]
- cid = DataQuery.from_dict(cinfo)
-
- cids.append(self.get_dataset_key(cid))
-
- return cids
-
- def _get_coordinates_for_dataset_keys(self, dsids):
- """Get all coordinates."""
- coordinates = {}
- for dsid in dsids:
- cids = self._get_coordinates_for_dataset_key(dsid)
- coordinates.setdefault(dsid, []).extend(cids)
- return coordinates
-
def _get_file_handlers(self, dsid):
"""Get the file handler to load this dataset."""
ds_info = self.all_ids[dsid]
@@ -774,14 +753,28 @@ def _get_file_handlers(self, dsid):
else:
return self.file_handlers[filetype]
+ def _load_dataset_area(self, dsid, file_handlers, coords, **kwargs):
+ """Get the area for *dsid*."""
+ try:
+ return self._load_area_def(dsid, file_handlers, **kwargs)
+ except NotImplementedError:
+ if any(x is None for x in coords):
+ logger.warning(
+ "Failed to load coordinates for '{}'".format(dsid))
+ return None
+
+ area = self._make_area_from_coords(coords)
+ if area is None:
+ logger.debug("No coordinates found for %s", str(dsid))
+ return area
+
def _make_area_from_coords(self, coords):
"""Create an appropriate area with the given *coords*."""
if len(coords) == 2:
lons, lats = self._get_lons_lats_from_coords(coords)
-
sdef = self._make_swath_definition_from_lons_lats(lons, lats)
return sdef
- elif len(coords) != 0:
+ if len(coords) != 0:
raise NameError("Don't know what to do with coordinates " + str(
coords))
@@ -802,7 +795,7 @@ def _make_swath_definition_from_lons_lats(self, lons, lats):
key = None
try:
key = (lons.data.name, lats.data.name)
- sdef = self.coords_cache.get(key)
+ sdef = FileYAMLReader._coords_cache.get(key)
except AttributeError:
sdef = None
if sdef is None:
@@ -813,24 +806,9 @@ def _make_swath_definition_from_lons_lats(self, lons, lats):
lons.attrs.get('name', lons.name),
lats.attrs.get('name', lats.name))
if key is not None:
- self.coords_cache[key] = sdef
+ FileYAMLReader._coords_cache[key] = sdef
return sdef
- def _load_dataset_area(self, dsid, file_handlers, coords, **kwargs):
- """Get the area for *dsid*."""
- try:
- return self._load_area_def(dsid, file_handlers, **kwargs)
- except NotImplementedError:
- if any(x is None for x in coords):
- logger.warning(
- "Failed to load coordinates for '{}'".format(dsid))
- return None
-
- area = self._make_area_from_coords(coords)
- if area is None:
- logger.debug("No coordinates found for %s", str(dsid))
- return area
-
def _load_dataset_with_area(self, dsid, coords, **kwargs):
"""Load *dsid* and its area if available."""
file_handlers = self._get_file_handlers(dsid)
@@ -864,20 +842,7 @@ def _assign_coords_from_dataarray(coords, ds):
def _load_ancillary_variables(self, datasets, **kwargs):
"""Load the ancillary variables of `datasets`."""
- all_av_ids = set()
- for dataset in datasets.values():
- ancillary_variables = dataset.attrs.get('ancillary_variables', [])
- if not isinstance(ancillary_variables, (list, tuple, set)):
- ancillary_variables = ancillary_variables.split(' ')
- av_ids = []
- for key in ancillary_variables:
- try:
- av_ids.append(self.get_dataset_key(key))
- except KeyError:
- logger.warning("Can't load ancillary dataset %s", str(key))
-
- all_av_ids |= set(av_ids)
- dataset.attrs['ancillary_variables'] = av_ids
+ all_av_ids = self._gather_ancillary_variables_ids(datasets)
loadable_av_ids = [av_id for av_id in all_av_ids if av_id not in datasets]
if not all_av_ids:
return
@@ -893,6 +858,27 @@ def _load_ancillary_variables(self, datasets, **kwargs):
new_vars.append(av_id)
dataset.attrs['ancillary_variables'] = new_vars
+ def _gather_ancillary_variables_ids(self, datasets):
+ """Gather ancillary variables' ids.
+
+ This adds/modifies the dataset's `ancillary_variables` attr.
+ """
+ all_av_ids = set()
+ for dataset in datasets.values():
+ ancillary_variables = dataset.attrs.get('ancillary_variables', [])
+ if not isinstance(ancillary_variables, (list, tuple, set)):
+ ancillary_variables = ancillary_variables.split(' ')
+ av_ids = []
+ for key in ancillary_variables:
+ try:
+ av_ids.append(self.get_dataset_key(key))
+ except KeyError:
+ logger.warning("Can't load ancillary dataset %s", str(key))
+
+ all_av_ids |= set(av_ids)
+ dataset.attrs['ancillary_variables'] = av_ids
+ return all_av_ids
+
def get_dataset_key(self, key, available_only=False, **kwargs):
"""Get the fully qualified `DataID` matching `key`.
@@ -951,6 +937,34 @@ def load(self, dataset_keys, previous_datasets=None, **kwargs):
return datasets
+ def _get_coordinates_for_dataset_keys(self, dsids):
+ """Get all coordinates."""
+ coordinates = {}
+ for dsid in dsids:
+ cids = self._get_coordinates_for_dataset_key(dsid)
+ coordinates.setdefault(dsid, []).extend(cids)
+ return coordinates
+
+ def _get_coordinates_for_dataset_key(self, dsid):
+ """Get the coordinate dataset keys for *dsid*."""
+ ds_info = self.all_ids[dsid]
+ cids = []
+ for cinfo in ds_info.get('coordinates', []):
+ if not isinstance(cinfo, dict):
+ cinfo = {'name': cinfo}
+
+ for key in self._co_keys:
+ if key == 'name':
+ continue
+ if key in ds_info:
+ if ds_info[key] is not None:
+ cinfo[key] = ds_info[key]
+ cid = DataQuery.from_dict(cinfo)
+
+ cids.append(self.get_dataset_key(cid))
+
+ return cids
+
def _load_area_def(dsid, file_handlers):
"""Load the area definition of *dsid*."""
@@ -992,6 +1006,11 @@ def _set_orientation(dataset, upper_right_corner):
"and will not be flipped.".format(dataset.attrs.get('name', 'unknown_name')))
return dataset
+ if isinstance(dataset.attrs['area'], SwathDefinition):
+ logger.info("Dataset {} is in a SwathDefinition "
+ "and will not be flipped.".format(dataset.attrs.get('name', 'unknown_name')))
+ return dataset
+
projection_type = _get_projection_type(dataset.attrs['area'])
accepted_geos_proj_types = ['Geostationary Satellite (Sweep Y)', 'Geostationary Satellite (Sweep X)']
if projection_type not in accepted_geos_proj_types:
@@ -1150,8 +1169,7 @@ def create_filehandlers(self, filenames, fh_kwargs=None):
fh.filename_info['segment'] = fh.filename_info.get('count_in_repeat_cycle', 1)
return created_fhs
- @staticmethod
- def _load_dataset(dsid, ds_info, file_handlers, dim='y', pad_data=True):
+ def _load_dataset(self, dsid, ds_info, file_handlers, dim='y', pad_data=True):
"""Load only a piece of the dataset."""
if not pad_data:
return FileYAMLReader._load_dataset(dsid, ds_info,
@@ -1164,28 +1182,14 @@ def _load_dataset(dsid, ds_info, file_handlers, dim='y', pad_data=True):
raise KeyError(
"Could not load {} from any provided files".format(dsid))
- padding_fci_scene = file_handlers[0].filetype_info.get('file_type') == 'fci_l1c_fdhsi'
-
- empty_segment = xr.full_like(projectable, np.nan)
+ filetype = file_handlers[0].filetype_info['file_type']
+ self.empty_segment = xr.full_like(projectable, np.nan)
for i, sli in enumerate(slice_list):
if sli is None:
- if padding_fci_scene:
- slice_list[i] = _get_empty_segment_with_height(empty_segment,
- _get_FCI_L1c_FDHSI_chunk_height(
- empty_segment.shape[1], i + 1),
- dim=dim)
- else:
- slice_list[i] = empty_segment
+ slice_list[i] = self._get_empty_segment(dim=dim, idx=i, filetype=filetype)
while expected_segments > counter:
- if padding_fci_scene:
- slice_list.append(_get_empty_segment_with_height(empty_segment,
- _get_FCI_L1c_FDHSI_chunk_height(
- empty_segment.shape[1], counter + 1),
- dim=dim))
- else:
- slice_list.append(empty_segment)
-
+ slice_list.append(self._get_empty_segment(dim=dim, idx=counter, filetype=filetype))
counter += 1
if dim not in slice_list[0].dims:
@@ -1198,97 +1202,114 @@ def _load_dataset(dsid, ds_info, file_handlers, dim='y', pad_data=True):
res.attrs = combined_info
return res
+ def _get_empty_segment(self, **kwargs):
+ return self.empty_segment
+
def _load_area_def(self, dsid, file_handlers, pad_data=True):
"""Load the area definition of *dsid* with padding."""
if not pad_data:
return _load_area_def(dsid, file_handlers)
- return _load_area_def_with_padding(dsid, file_handlers)
+ return self._load_area_def_with_padding(dsid, file_handlers)
+ def _load_area_def_with_padding(self, dsid, file_handlers):
+ """Load the area definition of *dsid* with padding."""
+ # Pad missing segments between the first available and expected
+ area_defs = self._pad_later_segments_area(file_handlers, dsid)
-def _load_area_def_with_padding(dsid, file_handlers):
- """Load the area definition of *dsid* with padding."""
- # Pad missing segments between the first available and expected
- area_defs = _pad_later_segments_area(file_handlers, dsid)
+ # Add missing start segments
+ area_defs = self._pad_earlier_segments_area(file_handlers, dsid, area_defs)
- # Add missing start segments
- area_defs = _pad_earlier_segments_area(file_handlers, dsid, area_defs)
+ # Stack the area definitions
+ area_def = _stack_area_defs(area_defs)
- # Stack the area definitions
- area_def = _stack_area_defs(area_defs)
+ return area_def
- return area_def
+ def _pad_later_segments_area(self, file_handlers, dsid):
+ """Pad area definitions for missing segments that are later in sequence than the first available."""
+ expected_segments = file_handlers[0].filetype_info['expected_segments']
+ filetype = file_handlers[0].filetype_info['file_type']
+ available_segments = [int(fh.filename_info.get('segment', 1)) for
+ fh in file_handlers]
+ area_defs = self._get_segments_areadef_with_later_padded(file_handlers, filetype, dsid, available_segments,
+ expected_segments)
-def _stack_area_defs(area_def_dict):
- """Stack given dict of area definitions and return a StackedAreaDefinition."""
- area_defs = [area_def_dict[area_def] for
- area_def in sorted(area_def_dict.keys())
- if area_def is not None]
+ return area_defs
- area_def = StackedAreaDefinition(*area_defs)
- area_def = area_def.squeeze()
+ def _get_segments_areadef_with_later_padded(self, file_handlers, filetype, dsid, available_segments,
+ expected_segments):
+ seg_size = None
+ area_defs = {}
+ for segment in range(available_segments[0], expected_segments + 1):
+ try:
+ idx = available_segments.index(segment)
+ fh = file_handlers[idx]
+ area = fh.get_area_def(dsid)
+ except ValueError:
+ area = self._get_new_areadef_for_padded_segment(area, filetype, seg_size, segment, padding_type='later')
+
+ area_defs[segment] = area
+ seg_size = area.shape
+ return area_defs
+
+ def _pad_earlier_segments_area(self, file_handlers, dsid, area_defs):
+ """Pad area definitions for missing segments that are earlier in sequence than the first available."""
+ available_segments = [int(fh.filename_info.get('segment', 1)) for
+ fh in file_handlers]
+ area = file_handlers[0].get_area_def(dsid)
+ seg_size = area.shape
+ filetype = file_handlers[0].filetype_info['file_type']
- return area_def
+ for segment in range(available_segments[0] - 1, 0, -1):
+ area = self._get_new_areadef_for_padded_segment(area, filetype, seg_size, segment, padding_type='earlier')
+ area_defs[segment] = area
+ seg_size = area.shape
+ return area_defs
-def _pad_later_segments_area(file_handlers, dsid):
- """Pad area definitions for missing segments that are later in sequence than the first available."""
- seg_size = None
- expected_segments = file_handlers[0].filetype_info['expected_segments']
- available_segments = [int(fh.filename_info.get('segment', 1)) for
- fh in file_handlers]
- area_defs = {}
- padding_fci_scene = file_handlers[0].filetype_info.get('file_type') == 'fci_l1c_fdhsi'
+ def _get_new_areadef_for_padded_segment(self, area, filetype, seg_size, segment, padding_type):
+ logger.debug("Padding to full disk with segment nr. %d", segment)
+ new_height_px, new_ll_y, new_ur_y = self._get_y_area_extents_for_padded_segment(area, filetype, padding_type,
+ seg_size, segment)
- for segment in range(available_segments[0], expected_segments + 1):
- try:
- idx = available_segments.index(segment)
- fh = file_handlers[idx]
- area = fh.get_area_def(dsid)
- except ValueError:
- logger.debug("Padding to full disk with segment nr. %d", segment)
-
- new_height_proj_coord, new_height_px = _get_new_areadef_heights(area, seg_size, segment,
- padding_fci_scene)
+ fill_extent = (area.area_extent[0], new_ll_y,
+ area.area_extent[2], new_ur_y)
+ area = AreaDefinition('fill', 'fill', 'fill', area.crs,
+ seg_size[1], new_height_px,
+ fill_extent)
+ return area
+
+ def _get_y_area_extents_for_padded_segment(self, area, filetype, padding_type, seg_size, segment):
+ new_height_proj_coord, new_height_px = self._get_new_areadef_heights(area, seg_size,
+ segment_n=segment,
+ filetype=filetype)
+ if padding_type == 'later':
new_ll_y = area.area_extent[1] + new_height_proj_coord
new_ur_y = area.area_extent[1]
- fill_extent = (area.area_extent[0], new_ll_y,
- area.area_extent[2], new_ur_y)
- area = AreaDefinition('fill', 'fill', 'fill', area.crs,
- seg_size[1], new_height_px,
- fill_extent)
-
- area_defs[segment] = area
- seg_size = area.shape
+ elif padding_type == 'earlier':
+ new_ll_y = area.area_extent[3]
+ new_ur_y = area.area_extent[3] - new_height_proj_coord
+ else:
+ raise ValueError("Padding type not recognised.")
+ return new_height_px, new_ll_y, new_ur_y
- return area_defs
+ def _get_new_areadef_heights(self, previous_area, previous_seg_size, **kwargs):
+ new_height_px = previous_seg_size[0]
+ new_height_proj_coord = previous_area.area_extent[1] - previous_area.area_extent[3]
+ return new_height_proj_coord, new_height_px
-def _pad_earlier_segments_area(file_handlers, dsid, area_defs):
- """Pad area definitions for missing segments that are earlier in sequence than the first available."""
- available_segments = [int(fh.filename_info.get('segment', 1)) for
- fh in file_handlers]
- area = file_handlers[0].get_area_def(dsid)
- seg_size = area.shape
- padding_fci_scene = file_handlers[0].filetype_info.get('file_type') == 'fci_l1c_fdhsi'
- for segment in range(available_segments[0] - 1, 0, -1):
- logger.debug("Padding segment %d to full disk.",
- segment)
+def _stack_area_defs(area_def_dict):
+ """Stack given dict of area definitions and return a StackedAreaDefinition."""
+ area_defs = [area_def_dict[area_def] for
+ area_def in sorted(area_def_dict.keys())
+ if area_def is not None]
- new_height_proj_coord, new_height_px = _get_new_areadef_heights(area, seg_size, segment, padding_fci_scene)
- new_ll_y = area.area_extent[3]
- new_ur_y = area.area_extent[3] - new_height_proj_coord
- fill_extent = (area.area_extent[0], new_ll_y,
- area.area_extent[2], new_ur_y)
- area = AreaDefinition('fill', 'fill', 'fill',
- area.crs,
- seg_size[1], new_height_px,
- fill_extent)
- area_defs[segment] = area
- seg_size = area.shape
+ area_def = StackedAreaDefinition(*area_defs)
+ area_def = area_def.squeeze()
- return area_defs
+ return area_def
def _find_missing_segments(file_handlers, ds_info, dsid):
@@ -1325,49 +1346,183 @@ def _find_missing_segments(file_handlers, ds_info, dsid):
return counter, expected_segments, slice_list, failure, projectable
-def _get_new_areadef_heights(previous_area, previous_seg_size, segment_n, padding_fci_scene):
- """Get the area definition heights in projection coordinates and pixels for the new padded segment."""
- if padding_fci_scene:
- # retrieve the chunk/segment pixel height
- new_height_px = _get_FCI_L1c_FDHSI_chunk_height(previous_seg_size[1], segment_n)
- # scale the previous vertical area extent using the new pixel height
- new_height_proj_coord = (previous_area.area_extent[1] - previous_area.area_extent[3]) * new_height_px / \
- previous_seg_size[0]
- else:
- # all other cases have constant segment size, so reuse the previous segment heights
- new_height_px = previous_seg_size[0]
- new_height_proj_coord = previous_area.area_extent[1] - previous_area.area_extent[3]
- return new_height_proj_coord, new_height_px
-
-
def _get_empty_segment_with_height(empty_segment, new_height, dim):
"""Get a new empty segment with the specified height."""
if empty_segment.shape[0] > new_height:
# if current empty segment is too tall, slice the DataArray
return empty_segment[:new_height, :]
- elif empty_segment.shape[0] < new_height:
+ if empty_segment.shape[0] < new_height:
# if current empty segment is too short, concatenate a slice of the DataArray
return xr.concat([empty_segment, empty_segment[:new_height - empty_segment.shape[0], :]], dim=dim)
- else:
- return empty_segment
+ return empty_segment
-def _get_FCI_L1c_FDHSI_chunk_height(chunk_width, chunk_n):
- """Get the height in pixels of a FCI L1c FDHSI chunk given the chunk width and number (starting from 1)."""
- if chunk_width == 11136:
- # 1km resolution case
- if chunk_n in [3, 5, 8, 10, 13, 15, 18, 20, 23, 25, 28, 30, 33, 35, 38, 40]:
- chunk_height = 279
- else:
- chunk_height = 278
- elif chunk_width == 5568:
- # 2km resolution case
- if chunk_n in [5, 10, 15, 20, 25, 30, 35, 40]:
- chunk_height = 140
- else:
- chunk_height = 139
- else:
- raise ValueError("FCI L1c FDHSI chunk width {} not recognized. Must be either 5568 or 11136.".format(
- chunk_width))
+class GEOVariableSegmentYAMLReader(GEOSegmentYAMLReader):
+ """GEOVariableSegmentYAMLReader for handling chunked/segmented GEO products with segments of variable height.
+
+ This YAMLReader overrides parts of the GEOSegmentYAMLReader to account for formats where the segments can
+ have variable heights. It computes the sizes of the padded segments using the information available in the
+ file(handlers), so that gaps of any size can be filled as needed.
+
+ This implementation was motivated by the FCI L1c format, where the segments (called chunks in the FCI world)
+ can have variable heights. It is however generic, so that any future reader can use it. The requirement
+ for the reader is to have a method called `get_segment_position_info`, returning a dictionary containing
+ the positioning info for each chunk (see example in
+ :func:`satpy.readers.fci_l1c_nc.FCIL1cNCFileHandler.get_segment_position_info`).
+
+ For more information on please see the documentation of :func:`satpy.readers.yaml_reader.GEOSegmentYAMLReader`.
+ """
+
+ def create_filehandlers(self, filenames, fh_kwargs=None):
+ """Create file handler objects and collect the location information."""
+ created_fhs = super().create_filehandlers(filenames, fh_kwargs=fh_kwargs)
+ self._extract_segment_location_dicts(created_fhs)
+ return created_fhs
+
+ def _extract_segment_location_dicts(self, created_fhs):
+ self.segment_infos = dict()
+ for filetype, filetype_fhs in created_fhs.items():
+ self._initialise_segment_infos(filetype, filetype_fhs)
+ self._collect_segment_position_infos(filetype, filetype_fhs)
+ return
+
+ def _collect_segment_position_infos(self, filetype, filetype_fhs):
+ # collect the segment positioning infos for all available segments
+ for fh in filetype_fhs:
+ chk_infos = fh.get_segment_position_info()
+ chk_infos.update({'segment_nr': fh.filename_info['segment'] - 1})
+ self.segment_infos[filetype]['available_segment_infos'].append(chk_infos)
+
+ def _initialise_segment_infos(self, filetype, filetype_fhs):
+ # initialise the segment info for this filetype
+ exp_segment_nr = filetype_fhs[0].filetype_info['expected_segments']
+ width_to_grid_type = _get_width_to_grid_type(filetype_fhs[0].get_segment_position_info())
+ self.segment_infos.update({filetype: {'available_segment_infos': [],
+ 'expected_segments': exp_segment_nr,
+ 'width_to_grid_type': width_to_grid_type}})
+
+ def _get_empty_segment(self, dim=None, idx=None, filetype=None):
+ grid_type = self.segment_infos[filetype]['width_to_grid_type'][self.empty_segment.shape[1]]
+ segment_height = self.segment_heights[filetype][grid_type][idx]
+ return _get_empty_segment_with_height(self.empty_segment, segment_height, dim=dim)
+
+ @cached_property
+ def segment_heights(self):
+ """Compute optimal padded segment heights (in number of pixels) based on the location of available segments."""
+ segment_heights = dict()
+ for filetype, filetype_seginfos in self.segment_infos.items():
+ filetype_seg_heights = {'1km': _compute_optimal_missing_segment_heights(filetype_seginfos, '1km', 11136),
+ '2km': _compute_optimal_missing_segment_heights(filetype_seginfos, '2km', 5568)}
+ segment_heights.update({filetype: filetype_seg_heights})
+ return segment_heights
+
+ def _get_new_areadef_heights(self, previous_area, previous_seg_size, segment_n=None, filetype=None):
+ # retrieve the segment height in number of pixels
+ grid_type = self.segment_infos[filetype]['width_to_grid_type'][previous_seg_size[1]]
+ new_height_px = self.segment_heights[filetype][grid_type][segment_n - 1]
+ # scale the previous vertical area extent using the new pixel height
+ prev_area_extent = previous_area.area_extent[1] - previous_area.area_extent[3]
+ new_height_proj_coord = prev_area_extent * new_height_px / previous_seg_size[0]
- return chunk_height
+ return new_height_proj_coord, new_height_px
+
+
+def _get_width_to_grid_type(seg_info):
+ width_to_grid_type = dict()
+ for grid_type, grid_type_seg_info in seg_info.items():
+ width_to_grid_type.update({grid_type_seg_info['segment_width']: grid_type})
+ return width_to_grid_type
+
+
+def _compute_optimal_missing_segment_heights(seg_infos, grid_type, expected_vertical_size):
+ # initialise positioning arrays
+ segment_start_rows, segment_end_rows, segment_heights = _init_positioning_arrays_for_variable_padding(
+ seg_infos['available_segment_infos'], grid_type, seg_infos['expected_segments'])
+
+ # populate start row of first segment and end row of last segment with known values
+ segment_start_rows[0] = 1
+ segment_end_rows[seg_infos['expected_segments'] - 1] = expected_vertical_size
+
+ # find missing segments and group contiguous missing segments together
+ missing_segments = np.where(segment_heights == 0)[0]
+ groups_missing_segments = np.split(missing_segments, np.where(np.diff(missing_segments) > 1)[0] + 1)
+
+ for group in groups_missing_segments:
+ _compute_positioning_data_for_missing_group(segment_start_rows, segment_end_rows, segment_heights, group)
+
+ return segment_heights.astype('int')
+
+
+def _compute_positioning_data_for_missing_group(segment_start_rows, segment_end_rows, segment_heights, group):
+ _populate_group_start_end_row_using_neighbour_segments(group, segment_end_rows, segment_start_rows)
+ proposed_sizes_missing_segments = _compute_proposed_sizes_of_missing_segments_in_group(group, segment_end_rows,
+ segment_start_rows)
+ _populate_start_end_rows_of_missing_segments_with_proposed_sizes(group, proposed_sizes_missing_segments,
+ segment_start_rows, segment_end_rows,
+ segment_heights)
+
+
+def _populate_start_end_rows_of_missing_segments_with_proposed_sizes(group, proposed_sizes_missing_segments,
+ segment_start_rows, segment_end_rows,
+ segment_heights):
+ for n in range(len(group)):
+ # start of first and end of last missing segment have been populated already
+ if n != 0:
+ segment_start_rows[group[n]] = segment_start_rows[group[n - 1]] + proposed_sizes_missing_segments[n] + 1
+ if n != len(group) - 1:
+ segment_end_rows[group[n]] = segment_start_rows[group[n]] + proposed_sizes_missing_segments[n]
+ segment_heights[group[n]] = proposed_sizes_missing_segments[n]
+
+
+def _compute_proposed_sizes_of_missing_segments_in_group(group, segment_end_rows, segment_start_rows):
+ size_group_gap = segment_end_rows[group[-1]] - segment_start_rows[group[0]] + 1
+ proposed_sizes_missing_segments = split_integer_in_most_equal_parts(size_group_gap, len(group))
+ return proposed_sizes_missing_segments
+
+
+def _populate_group_start_end_row_using_neighbour_segments(group, segment_end_rows, segment_start_rows):
+ # if group is at the start/end of the full-disk, we know the start/end value already
+ if segment_start_rows[group[0]] == 0:
+ _populate_group_start_row_using_previous_segment(group, segment_end_rows, segment_start_rows)
+ if segment_end_rows[group[-1]] == 0:
+ _populate_group_end_row_using_later_segment(group, segment_end_rows, segment_start_rows)
+
+
+def _populate_group_end_row_using_later_segment(group, segment_end_rows, segment_start_rows):
+ segment_end_rows[group[-1]] = segment_start_rows[group[-1] + 1] - 1
+
+
+def _populate_group_start_row_using_previous_segment(group, segment_end_rows, segment_start_rows):
+ segment_start_rows[group[0]] = segment_end_rows[group[0] - 1] + 1
+
+
+def _init_positioning_arrays_for_variable_padding(chk_infos, grid_type, exp_segment_nr):
+ segment_heights = np.zeros(exp_segment_nr)
+ segment_start_rows = np.zeros(exp_segment_nr)
+ segment_end_rows = np.zeros(exp_segment_nr)
+
+ _populate_positioning_arrays_with_available_chunk_info(chk_infos, grid_type, segment_start_rows, segment_end_rows,
+ segment_heights)
+ return segment_start_rows, segment_end_rows, segment_heights
+
+
+def _populate_positioning_arrays_with_available_chunk_info(chk_infos, grid_type, segment_start_rows, segment_end_rows,
+ segment_heights):
+ for chk_info in chk_infos:
+ current_fh_segment_nr = chk_info['segment_nr']
+ segment_heights[current_fh_segment_nr] = chk_info[grid_type]['segment_height']
+ segment_start_rows[current_fh_segment_nr] = chk_info[grid_type]['start_position_row']
+ segment_end_rows[current_fh_segment_nr] = chk_info[grid_type]['end_position_row']
+
+
+def split_integer_in_most_equal_parts(x, n):
+ """Split an integer number x in n parts that are as equally-sizes as possible."""
+ if x % n == 0:
+ return np.repeat(x // n, n).astype('int')
+ else:
+ # split the remainder amount over the last remainder parts
+ remainder = int(x % n)
+ mod = int(x // n)
+ ar = np.repeat(mod, n)
+ ar[-remainder:] = mod + 1
+ return ar.astype('int')
diff --git a/satpy/resample.py b/satpy/resample.py
index 6c4881ed94..a8230ae8ed 100644
--- a/satpy/resample.py
+++ b/satpy/resample.py
@@ -119,33 +119,42 @@
Store area definitions
----------------------
-Area definitions can be added to a custom YAML file (see
-`pyresample's documentation `_
-for more information)
-and loaded using pyresample's utility methods::
+Area definitions can be saved to a custom YAML file (see
+`pyresample's writing to disk `_)
+and loaded using pyresample's utility methods
+(`pyresample's loading from disk `_)::
- >>> from pyresample.utils import parse_area_file
- >>> my_area = parse_area_file('my_areas.yaml', 'my_area')[0]
+ >>> from pyresample import load_area
+ >>> my_area = load_area('my_areas.yaml', 'my_area')
-Examples coming soon...
+Or using :func:`satpy.resample.get_area_def`, which will search through all
+``areas.yaml`` files in your ``SATPY_CONFIG_PATH``::
+
+ >>> from satpy.resample import get_area_def
+ >>> area_eurol = get_area_def("eurol")
+
+For examples of area definitions, see the file ``etc/areas.yaml`` that is
+included with Satpy and where all the area definitions shipped with Satpy are
+defined.
"""
import hashlib
import json
import os
+import warnings
from logging import getLogger
from weakref import WeakValueDictionary
-import warnings
-import numpy as np
-import xarray as xr
+
import dask
import dask.array as da
-import zarr
+import numpy as np
import pyresample
+import xarray as xr
+import zarr
from packaging import version
-
from pyresample.ewa import fornav, ll2cr
from pyresample.geometry import SwathDefinition
+
try:
from pyresample.resampler import BaseResampler as PRBaseResampler
except ImportError:
@@ -162,7 +171,6 @@
from satpy import CHUNK_SIZE
from satpy._config import config_search_paths, get_config_path
-
LOG = getLogger(__name__)
CACHE_SIZE = 10
@@ -177,7 +185,7 @@
'out_coords_x': ('x2', ),
'out_coords_y': ('y2', )}
-resamplers_cache = WeakValueDictionary()
+resamplers_cache: "WeakValueDictionary[tuple, object]" = WeakValueDictionary()
PR_USE_SKIPNA = version.parse(pyresample.__version__) > version.parse("1.17.0")
@@ -185,7 +193,7 @@
def hash_dict(the_dict, the_hash=None):
"""Calculate a hash for a dictionary."""
if the_hash is None:
- the_hash = hashlib.sha1()
+ the_hash = hashlib.sha1() # nosec
the_hash.update(json.dumps(the_dict, sort_keys=True).encode('utf-8'))
return the_hash
@@ -205,7 +213,7 @@ def get_area_file():
def get_area_def(area_name):
"""Get the definition of *area_name* from file.
- The file is defined to use is to be placed in the $PPP_CONFIG_DIR
+ The file is defined to use is to be placed in the $SATPY_CONFIG_PATH
directory, and its name is defined in satpy's configuration file.
"""
try:
@@ -234,14 +242,12 @@ def add_xy_coords(data_arr, area, crs=None):
if 'x' in data_arr.coords and 'y' in data_arr.coords:
# x/y coords already provided
return data_arr
- elif 'x' not in data_arr.dims or 'y' not in data_arr.dims:
+ if 'x' not in data_arr.dims or 'y' not in data_arr.dims:
# no defined x and y dimensions
return data_arr
-
- if hasattr(area, 'get_proj_vectors'):
- x, y = area.get_proj_vectors()
- else:
+ if not hasattr(area, 'get_proj_vectors'):
return data_arr
+ x, y = area.get_proj_vectors()
# convert to DataArrays
y_attrs = {}
@@ -432,12 +438,10 @@ def resample(self, data, cache_dir=None, mask_area=None, **kwargs):
cache_id = self.precompute(cache_dir=cache_dir, **kwargs)
return self.compute(data, cache_id=cache_id, **kwargs)
- def _create_cache_filename(self, cache_dir=None, prefix='',
+ def _create_cache_filename(self, cache_dir, prefix='',
fmt='.zarr', **kwargs):
"""Create filename for the cached resampling parameters."""
- cache_dir = cache_dir or '.'
hash_str = self.get_hash(**kwargs)
-
return os.path.join(cache_dir, prefix + hash_str + fmt)
@@ -487,18 +491,7 @@ def precompute(self, mask=None, radius_of_influence=None, epsilon=0,
cache_dir = None
if radius_of_influence is None and not hasattr(self.source_geo_def, 'geocentric_resolution'):
- warnings.warn("Upgrade 'pyresample' for a more accurate default 'radius_of_influence'.")
- try:
- radius_of_influence = self.source_geo_def.lons.resolution * 3
- except AttributeError:
- try:
- radius_of_influence = max(abs(self.source_geo_def.pixel_size_x),
- abs(self.source_geo_def.pixel_size_y)) * 3
- except AttributeError:
- radius_of_influence = 1000
-
- except TypeError:
- radius_of_influence = 10000
+ radius_of_influence = self._adjust_radius_of_influence(radius_of_influence)
kwargs = dict(source_geo_def=self.source_geo_def,
target_geo_def=self.target_geo_def,
@@ -518,6 +511,22 @@ def precompute(self, mask=None, radius_of_influence=None, epsilon=0,
self.resampler.get_neighbour_info(mask=mask)
self.save_neighbour_info(cache_dir, mask=mask, **kwargs)
+ def _adjust_radius_of_influence(self, radius_of_influence):
+ """Adjust radius of influence."""
+ warnings.warn("Upgrade 'pyresample' for a more accurate default 'radius_of_influence'.")
+ try:
+ radius_of_influence = self.source_geo_def.lons.resolution * 3
+ except AttributeError:
+ try:
+ radius_of_influence = max(abs(self.source_geo_def.pixel_size_x),
+ abs(self.source_geo_def.pixel_size_y)) * 3
+ except AttributeError:
+ radius_of_influence = 1000
+
+ except TypeError:
+ radius_of_influence = 10000
+ return radius_of_influence
+
def _apply_cached_index(self, val, idx_name, persist=False):
"""Reassign resampler index attributes."""
if isinstance(val, np.ndarray):
@@ -530,6 +539,8 @@ def _apply_cached_index(self, val, idx_name, persist=False):
def _check_numpy_cache(self, cache_dir, mask=None,
**kwargs):
"""Check if there's Numpy cache file and convert it to zarr."""
+ if cache_dir is None:
+ return
fname_np = self._create_cache_filename(cache_dir,
prefix='resample_lut-',
mask=mask, fmt='.npz',
@@ -558,14 +569,15 @@ def load_neighbour_info(self, cache_dir, mask=None, **kwargs):
cached = {}
self._check_numpy_cache(cache_dir, mask=mask_name, **kwargs)
- filename = self._create_cache_filename(cache_dir, prefix='nn_lut-',
- mask=mask_name, **kwargs)
for idx_name in NN_COORDINATES:
if mask_name in self._index_caches:
cached[idx_name] = self._apply_cached_index(
self._index_caches[mask_name][idx_name], idx_name)
elif cache_dir:
try:
+ filename = self._create_cache_filename(
+ cache_dir, prefix='nn_lut-',
+ mask=mask_name, **kwargs)
fid = zarr.open(filename, 'r')
cache = np.array(fid[idx_name])
if idx_name == 'valid_input_index':
@@ -938,6 +950,17 @@ def _mean(data, y_size, x_size):
return data_mean
+def _repeat_by_factor(data, block_info=None):
+ if block_info is None:
+ return data
+ out_shape = block_info[None]['chunk-shape']
+ out_data = data
+ for axis, axis_size in enumerate(out_shape):
+ in_size = data.shape[axis]
+ out_data = np.repeat(out_data, int(axis_size / in_size), axis=axis)
+ return out_data
+
+
class NativeResampler(BaseResampler):
"""Expand or reduce input datasets to be the same shape.
@@ -961,7 +984,7 @@ def resample(self, data, cache_dir=None, mask_area=False, **kwargs):
**kwargs)
@staticmethod
- def aggregate(d, y_size, x_size):
+ def _aggregate(d, y_size, x_size):
"""Average every 4 elements (2x2) in a 2D array."""
if d.ndim != 2:
# we can't guarantee what blocks we are getting and how
@@ -979,42 +1002,55 @@ def aggregate(d, y_size, x_size):
new_chunks = (tuple(int(x / y_size) for x in d.chunks[0]),
tuple(int(x / x_size) for x in d.chunks[1]))
- return da.core.map_blocks(_mean, d, y_size, x_size, dtype=d.dtype, chunks=new_chunks)
+ return da.core.map_blocks(_mean, d, y_size, x_size,
+ meta=np.array((), dtype=d.dtype),
+ dtype=d.dtype, chunks=new_chunks)
+
+ @staticmethod
+ def _replicate(d_arr, repeats):
+ """Repeat data pixels by the per-axis factors specified."""
+ # rechunk so new chunks are the same size as old chunks
+ c_size = max(x[0] for x in d_arr.chunks)
+
+ def _calc_chunks(c, c_size):
+ whole_chunks = [c_size] * int(sum(c) // c_size)
+ remaining = sum(c) - sum(whole_chunks)
+ if remaining:
+ whole_chunks += [remaining]
+ return tuple(whole_chunks)
+ new_chunks = [_calc_chunks(x, int(c_size // repeats[axis]))
+ for axis, x in enumerate(d_arr.chunks)]
+ d_arr = d_arr.rechunk(new_chunks)
+
+ repeated_chunks = []
+ for axis, axis_chunks in enumerate(d_arr.chunks):
+ factor = repeats[axis]
+ if not factor.is_integer():
+ raise ValueError("Expand factor must be a whole number")
+ repeated_chunks.append(tuple(x * int(factor) for x in axis_chunks))
+ repeated_chunks = tuple(repeated_chunks)
+ d_arr = d_arr.map_blocks(_repeat_by_factor,
+ meta=np.array((), dtype=d_arr.dtype),
+ dtype=d_arr.dtype,
+ chunks=repeated_chunks)
+ return d_arr
@classmethod
- def expand_reduce(cls, d_arr, repeats):
+ def _expand_reduce(cls, d_arr, repeats):
"""Expand reduce."""
if not isinstance(d_arr, da.Array):
d_arr = da.from_array(d_arr, chunks=CHUNK_SIZE)
if all(x == 1 for x in repeats.values()):
return d_arr
- elif all(x >= 1 for x in repeats.values()):
- # rechunk so new chunks are the same size as old chunks
- c_size = max(x[0] for x in d_arr.chunks)
-
- def _calc_chunks(c, c_size):
- whole_chunks = [c_size] * int(sum(c) // c_size)
- remaining = sum(c) - sum(whole_chunks)
- if remaining:
- whole_chunks += [remaining]
- return tuple(whole_chunks)
- new_chunks = [_calc_chunks(x, int(c_size // repeats[axis]))
- for axis, x in enumerate(d_arr.chunks)]
- d_arr = d_arr.rechunk(new_chunks)
-
- for axis, factor in repeats.items():
- if not factor.is_integer():
- raise ValueError("Expand factor must be a whole number")
- d_arr = da.repeat(d_arr, int(factor), axis=axis)
- return d_arr
- elif all(x <= 1 for x in repeats.values()):
+ if all(x >= 1 for x in repeats.values()):
+ return cls._replicate(d_arr, repeats)
+ if all(x <= 1 for x in repeats.values()):
# reduce
y_size = 1. / repeats[0]
x_size = 1. / repeats[1]
- return cls.aggregate(d_arr, y_size, x_size)
- else:
- raise ValueError("Must either expand or reduce in both "
- "directions")
+ return cls._aggregate(d_arr, y_size, x_size)
+ raise ValueError("Must either expand or reduce in both "
+ "directions")
def compute(self, data, expand=True, **kwargs):
"""Resample data with NativeResampler."""
@@ -1045,7 +1081,7 @@ def compute(self, data, expand=True, **kwargs):
repeats[y_axis] = y_repeats
repeats[x_axis] = x_repeats
- d_arr = self.expand_reduce(data.data, repeats)
+ d_arr = self._expand_reduce(data.data, repeats)
new_data = xr.DataArray(d_arr, dims=data.dims)
return update_resampled_coords(data, new_data, target_geo_def)
@@ -1307,7 +1343,7 @@ def prepare_resampler(source_area, destination_area, resampler=None, **resample_
if isinstance(resampler, (BaseResampler, PRBaseResampler)):
raise ValueError("Trying to create a resampler when one already "
"exists.")
- elif isinstance(resampler, str):
+ if isinstance(resampler, str):
resampler_class = RESAMPLERS.get(resampler, None)
if resampler_class is None:
if resampler == "gradient_search":
diff --git a/satpy/scene.py b/satpy/scene.py
index 4fab319c09..5aab323042 100644
--- a/satpy/scene.py
+++ b/satpy/scene.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
-# Copyright (c) 2010-2017 Satpy developers
+# Copyright (c) 2010-2022 Satpy developers
#
# This file is part of satpy.
#
@@ -16,27 +16,27 @@
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
"""Scene object to hold satellite data."""
+from __future__ import annotations
import logging
import os
import warnings
+from typing import Callable
+
+import numpy as np
+import xarray as xr
+from pyresample.geometry import AreaDefinition, BaseDefinition, SwathDefinition
+from xarray import DataArray
from satpy.composites import IncompatibleAreas
-from satpy.composites.config_loader import CompositorLoader
-from satpy.dataset import (DataQuery, DataID, dataset_walker,
- replace_anc, combine_metadata)
-from satpy.node import MissingDependencies, ReaderNode, CompositorNode, Node
+from satpy.composites.config_loader import load_compositor_configs_for_sensors
+from satpy.dataset import DataID, DataQuery, DatasetDict, combine_metadata, dataset_walker, replace_anc
from satpy.dependency_tree import DependencyTree
+from satpy.node import CompositorNode, MissingDependencies, ReaderNode
from satpy.readers import load_readers
-from satpy.dataset import DatasetDict
-from satpy.resample import (resample_dataset,
- prepare_resampler, get_area_def)
+from satpy.resample import get_area_def, prepare_resampler, resample_dataset
+from satpy.utils import convert_remote_files_to_fsspec, get_storage_options_from_reader_kwargs
from satpy.writers import load_writer
-from pyresample.geometry import AreaDefinition, BaseDefinition, SwathDefinition
-
-import xarray as xr
-from xarray import DataArray
-import numpy as np
LOG = logging.getLogger(__name__)
@@ -73,14 +73,29 @@ def __init__(self, filenames=None, reader=None, filter_parameters=None,
reader_kwargs=None):
"""Initialize Scene with Reader and Compositor objects.
- To load data `filenames` and preferably `reader` must be specified. If `filenames` is provided without `reader`
- then the available readers will be searched for a Reader that can support the provided files. This can take
- a considerable amount of time so it is recommended that `reader` always be provided. Note without `filenames`
- the Scene is created with no Readers available requiring Datasets to be added manually::
+ To load data `filenames` and preferably `reader` must be specified::
+
+ scn = Scene(filenames=glob('/path/to/viirs/sdr/files/*'), reader='viirs_sdr')
+
+
+ If ``filenames`` is provided without ``reader`` then the available readers
+ will be searched for a Reader that can support the provided files. This
+ can take a considerable amount of time so it is recommended that
+ ``reader`` always be provided. Note without ``filenames`` the Scene is
+ created with no Readers available requiring Datasets to be added
+ manually::
scn = Scene()
scn['my_dataset'] = Dataset(my_data_array, **my_info)
+ Further, notice that it is also possible to load a combination of files
+ or sets of files each requiring their specific reader. For that
+ ``filenames`` needs to be a `dict` (see parameters list below), e.g.::
+
+ scn = Scene(filenames={'nwcsaf-pps_nc': glob('/path/to/nwc/saf/pps/files/*'),
+ 'modis_l1b': glob('/path/to/modis/lvl1/files/*')})
+
+
Args:
filenames (iterable or dict): A sequence of files that will be used to load data from. A ``dict`` object
should map reader names to a list of filenames for that reader.
@@ -93,27 +108,34 @@ def __init__(self, filenames=None, reader=None, filter_parameters=None,
sub-dictionaries to pass different arguments to different
reader instances.
+ Keyword arguments for remote file access are also given in this dictionary.
+ See `documentation `_
+ for usage examples.
+
"""
self.attrs = dict()
+
+ storage_options, cleaned_reader_kwargs = get_storage_options_from_reader_kwargs(reader_kwargs)
+
if filter_parameters:
- if reader_kwargs is None:
- reader_kwargs = {}
+ if cleaned_reader_kwargs is None:
+ cleaned_reader_kwargs = {}
else:
- reader_kwargs = reader_kwargs.copy()
- reader_kwargs.setdefault('filter_parameters', {}).update(filter_parameters)
+ cleaned_reader_kwargs = cleaned_reader_kwargs.copy()
+ cleaned_reader_kwargs.setdefault('filter_parameters', {}).update(filter_parameters)
if filenames and isinstance(filenames, str):
raise ValueError("'filenames' must be a list of files: Scene(filenames=[filename])")
+ if filenames:
+ filenames = convert_remote_files_to_fsspec(filenames, storage_options)
+
self._readers = self._create_reader_instances(filenames=filenames,
reader=reader,
- reader_kwargs=reader_kwargs)
- self.attrs.update(self._compute_metadata_from_readers())
+ reader_kwargs=cleaned_reader_kwargs)
self._datasets = DatasetDict()
- self._composite_loader = CompositorLoader()
- comps, mods = self._composite_loader.load_compositors(self.attrs['sensor'])
self._wishlist = set()
- self._dependency_tree = DependencyTree(self._readers, comps, mods)
+ self._dependency_tree = DependencyTree(self._readers)
self._resamplers = {}
@property
@@ -124,31 +146,6 @@ def wishlist(self):
def _ipython_key_completions_(self):
return [x['name'] for x in self._datasets.keys()]
- def _compute_metadata_from_readers(self):
- """Determine pieces of metadata from the readers loaded."""
- mda = {'sensor': self._get_sensor_names()}
-
- # overwrite the request start/end times with actual loaded data limits
- if self._readers:
- mda['start_time'] = min(x.start_time
- for x in self._readers.values())
- mda['end_time'] = max(x.end_time
- for x in self._readers.values())
- return mda
-
- def _get_sensor_names(self):
- """Join the sensors from all loaded readers."""
- # if the user didn't tell us what sensors to work with, let's figure it
- # out
- if not self.attrs.get('sensor'):
- # reader finder could return multiple readers
- return set([sensor for reader_instance in self._readers.values()
- for sensor in reader_instance.sensor_names])
- elif not isinstance(self.attrs['sensor'], (set, tuple, list)):
- return set([self.attrs['sensor']])
- else:
- return set(self.attrs['sensor'])
-
def _create_reader_instances(self,
filenames=None,
reader=None,
@@ -158,15 +155,71 @@ def _create_reader_instances(self,
reader=reader,
reader_kwargs=reader_kwargs)
+ @property
+ def sensor_names(self) -> set[str]:
+ """Return sensor names for the data currently contained in this Scene.
+
+ Sensor information is collected from data contained in the Scene
+ whether loaded from a reader or generated as a composite with
+ :meth:`load` or added manually using ``scn["name"] = data_arr``).
+ Sensor information is also collected from any loaded readers.
+ In some rare cases this may mean that the reader includes sensor
+ information for data that isn't actually loaded or even available.
+
+ """
+ contained_sensor_names = self._contained_sensor_names()
+ reader_sensor_names = set([sensor for reader_instance in self._readers.values()
+ for sensor in reader_instance.sensor_names])
+ return contained_sensor_names | reader_sensor_names
+
+ def _contained_sensor_names(self) -> set[str]:
+ sensor_names = set()
+ for data_arr in self.values():
+ if "sensor" not in data_arr.attrs:
+ continue
+ if isinstance(data_arr.attrs["sensor"], str):
+ sensor_names.add(data_arr.attrs["sensor"])
+ elif isinstance(data_arr.attrs["sensor"], set):
+ sensor_names.update(data_arr.attrs["sensor"])
+ else:
+ raise TypeError("Unexpected type in sensor collection")
+ return sensor_names
+
@property
def start_time(self):
- """Return the start time of the file."""
- return self.attrs['start_time']
+ """Return the start time of the contained data.
+
+ If no data is currently contained in the Scene then loaded readers
+ will be consulted.
+
+ """
+ start_times = [data_arr.attrs['start_time'] for data_arr in self.values()
+ if 'start_time' in data_arr.attrs]
+ if not start_times:
+ start_times = self._reader_times('start_time')
+ if not start_times:
+ return None
+ return min(start_times)
@property
def end_time(self):
- """Return the end time of the file."""
- return self.attrs['end_time']
+ """Return the end time of the file.
+
+ If no data is currently contained in the Scene then loaded readers
+ will be consulted. If no readers are loaded then the
+ :attr:`Scene.start_time` is returned.
+
+ """
+ end_times = [data_arr.attrs['end_time'] for data_arr in self.values()
+ if 'end_time' in data_arr.attrs]
+ if not end_times:
+ end_times = self._reader_times('end_time')
+ if not end_times:
+ return self.start_time
+ return max(end_times)
+
+ def _reader_times(self, time_prop_name):
+ return [getattr(reader, time_prop_name) for reader in self._readers.values()]
@property
def missing_datasets(self):
@@ -190,6 +243,48 @@ def _compare_areas(self, datasets=None, compare_func=max):
if datasets is None:
datasets = list(self.values())
+ areas = self._gather_all_areas(datasets)
+
+ if isinstance(areas[0], AreaDefinition):
+ first_crs = areas[0].crs
+ if not all(ad.crs == first_crs for ad in areas[1:]):
+ raise ValueError("Can't compare areas with different "
+ "projections.")
+ return self._compare_area_defs(compare_func, areas)
+ return self._compare_swath_defs(compare_func, areas)
+
+ @staticmethod
+ def _compare_area_defs(compare_func: Callable, area_defs: list[AreaDefinition]) -> list[AreaDefinition]:
+ def _key_func(area_def: AreaDefinition) -> tuple:
+ """Get comparable version of area based on resolution.
+
+ Pixel size x is the primary comparison parameter followed by
+ the y dimension pixel size. The extent of the area and the
+ name (area_id) of the area are also used to act as
+ "tiebreakers" between areas of the same resolution.
+
+ """
+ pixel_size_x_inverse = 1. / abs(area_def.pixel_size_x)
+ pixel_size_y_inverse = 1. / abs(area_def.pixel_size_y)
+ area_id = area_def.area_id
+ return pixel_size_x_inverse, pixel_size_y_inverse, area_def.area_extent, area_id
+ return compare_func(area_defs, key=_key_func)
+
+ @staticmethod
+ def _compare_swath_defs(compare_func: Callable, swath_defs: list[SwathDefinition]) -> list[SwathDefinition]:
+ def _key_func(swath_def: SwathDefinition) -> tuple:
+ attrs = getattr(swath_def.lons, "attrs", {})
+ lon_ds_name = attrs.get("name")
+ rev_shape = swath_def.shape[::-1]
+ return rev_shape + (lon_ds_name,)
+ return compare_func(swath_defs, key=_key_func)
+
+ def _gather_all_areas(self, datasets):
+ """Gather all areas from datasets.
+
+ They have to be of the same type, and at least one dataset should have
+ an area.
+ """
areas = []
for ds in datasets:
if isinstance(ds, BaseDefinition):
@@ -199,28 +294,13 @@ def _compare_areas(self, datasets=None, compare_func=max):
ds = self[ds]
area = ds.attrs.get('area')
areas.append(area)
-
areas = [x for x in areas if x is not None]
if not areas:
raise ValueError("No dataset areas available")
-
if not all(isinstance(x, type(areas[0]))
for x in areas[1:]):
raise ValueError("Can't compare areas of different types")
- elif isinstance(areas[0], AreaDefinition):
- first_crs = areas[0].crs
- if not all(ad.crs == first_crs for ad in areas[1:]):
- raise ValueError("Can't compare areas with different "
- "projections.")
-
- def key_func(ds):
- return 1. / abs(ds.pixel_size_x)
- else:
- def key_func(ds):
- return ds.shape
-
- # find the highest/lowest area among the provided
- return compare_func(areas, key=key_func)
+ return areas
def finest_area(self, datasets=None):
"""Get highest resolution area for the provided datasets.
@@ -237,6 +317,8 @@ def finest_area(self, datasets=None):
def max_area(self, datasets=None):
"""Get highest resolution area for the provided datasets. Deprecated.
+ Deprecated. Use :meth:`finest_area` instead.
+
Args:
datasets (iterable): Datasets whose areas will be compared. Can
be either `xarray.DataArray` objects or
@@ -263,6 +345,8 @@ def coarsest_area(self, datasets=None):
def min_area(self, datasets=None):
"""Get lowest resolution area for the provided datasets. Deprecated.
+ Deprecated. Use :meth:`coarsest_area` instead.
+
Args:
datasets (iterable): Datasets whose areas will be compared. Can
be either `xarray.DataArray` objects or
@@ -286,7 +370,18 @@ def available_dataset_ids(self, reader_name=None, composites=False):
Some readers dynamically determine what is available based on the
contents of the files provided.
- Returns: list of available dataset names
+ By default, only returns non-composite dataset IDs. To include
+ composite dataset IDs, pass ``composites=True``.
+
+ Args:
+ reader_name (str, optional): Name of reader for which to return
+ dataset IDs. If not passed, return dataset IDs for all
+ readers.
+ composites (bool, optional): If True, return dataset IDs including
+ composites. If False (default), return only non-composite
+ dataset IDs.
+
+ Returns: list of available dataset IDs
"""
try:
@@ -305,14 +400,39 @@ def available_dataset_ids(self, reader_name=None, composites=False):
return available_datasets
def available_dataset_names(self, reader_name=None, composites=False):
- """Get the list of the names of the available datasets."""
+ """Get the list of the names of the available datasets.
+
+ By default, this only shows names of datasets directly defined in (one
+ of the) readers. Names of composites are not returned unless the
+ argument ``composites=True`` is passed.
+
+ Args:
+ reader_name (str, optional): Name of reader for which to return
+ dataset IDs. If not passed, return dataset names for all
+ readers.
+ composites (bool, optional): If True, return dataset IDs including
+ composites. If False (default), return only non-composite
+ dataset names.
+
+ Returns: list of available dataset names
+ """
return sorted(set(x['name'] for x in self.available_dataset_ids(
reader_name=reader_name, composites=composites)))
def all_dataset_ids(self, reader_name=None, composites=False):
- """Get names of all datasets from loaded readers or `reader_name` if specified.
+ """Get IDs of all datasets from loaded readers or `reader_name` if specified.
- Returns: list of all dataset names
+ Excludes composites unless ``composites=True`` is passed.
+
+ Args:
+ reader_name (str, optional): Name of reader for which to return
+ dataset IDs. If not passed, return dataset IDs for all
+ readers.
+ composites (bool, optional): If True, return dataset IDs including
+ composites. If False (default), return only non-composite
+ dataset IDs.
+
+ Returns: list of all dataset IDs
"""
try:
@@ -339,6 +459,18 @@ def all_dataset_names(self, reader_name=None, composites=False):
on what files are provided even if a product/dataset is a "standard"
product for a particular reader.
+ Excludes composites unless ``composites=True`` is passed.
+
+ Args:
+ reader_name (str, optional): Name of reader for which to return
+ dataset IDs. If not passed, return dataset names for all
+ readers.
+ composites (bool, optional): If True, return dataset IDs including
+ composites. If False (default), return only non-composite
+ dataset names.
+
+ Returns: list of all dataset names
+
"""
return sorted(set(x['name'] for x in self.all_dataset_ids(
reader_name=reader_name, composites=composites)))
@@ -347,10 +479,10 @@ def _check_known_composites(self, available_only=False):
"""Create new dependency tree and check what composites we know about."""
# Note if we get compositors from the dep tree then it will include
# modified composites which we don't want
- sensor_comps, mods = self._composite_loader.load_compositors(self.attrs['sensor'])
+ sensor_comps, mods = load_compositor_configs_for_sensors(self.sensor_names)
# recreate the dependency tree so it doesn't interfere with the user's
# wishlist from self._dependency_tree
- dep_tree = DependencyTree(self._readers, sensor_comps, mods, available_only=True)
+ dep_tree = DependencyTree(self._readers, sensor_comps, mods, available_only=available_only)
# ignore inline compositor dependencies starting with '_'
comps = (comp for comp_dict in sensor_comps.values()
for comp in comp_dict.keys() if not comp['name'].startswith('_'))
@@ -366,11 +498,11 @@ def _check_known_composites(self, available_only=False):
return sorted(available_comps & all_comps)
def available_composite_ids(self):
- """Get names of composites that can be generated from the available datasets."""
+ """Get IDs of composites that can be generated from the available datasets."""
return self._check_known_composites(available_only=True)
def available_composite_names(self):
- """All configured composites known to this Scene."""
+ """Names of all configured composites known to this Scene."""
return sorted(set(x['name'] for x in self.available_composite_ids()))
def all_composite_ids(self):
@@ -710,15 +842,7 @@ def _resampled_scene(self, new_scn, destination_area, reduce_data=True,
"""
new_datasets = {}
datasets = list(new_scn._datasets.values())
- if isinstance(destination_area, str):
- destination_area = get_area_def(destination_area)
- if hasattr(destination_area, 'freeze'):
- try:
- finest_area = new_scn.finest_area()
- destination_area = destination_area.freeze(finest_area)
- except ValueError:
- raise ValueError("No dataset areas available to freeze "
- "DynamicAreaDefinition.")
+ destination_area = self._get_finalized_destination_area(destination_area, new_scn)
resamplers = {}
reductions = {}
@@ -740,34 +864,9 @@ def _resampled_scene(self, new_scn, destination_area, reduce_data=True,
continue
LOG.debug("Resampling %s", ds_id)
source_area = dataset.attrs['area']
- try:
- if reduce_data:
- key = source_area
- try:
- (slice_x, slice_y), source_area = reductions[key]
- except KeyError:
- if resample_kwargs.get('resampler') == 'gradient_search':
- factor = resample_kwargs.get('shape_divisible_by', 2)
- else:
- factor = None
- try:
- slice_x, slice_y = source_area.get_area_slices(
- destination_area, shape_divisible_by=factor)
- except TypeError:
- slice_x, slice_y = source_area.get_area_slices(
- destination_area)
- source_area = source_area[slice_y, slice_x]
- reductions[key] = (slice_x, slice_y), source_area
- dataset = self._slice_data(source_area, (slice_x, slice_y), dataset)
- else:
- LOG.debug("Data reduction disabled by the user")
- except NotImplementedError:
- LOG.info("Not reducing data before resampling.")
- if source_area not in resamplers:
- key, resampler = prepare_resampler(
- source_area, destination_area, **resample_kwargs)
- resamplers[source_area] = resampler
- self._resamplers[key] = resampler
+ dataset, source_area = self._reduce_data(dataset, source_area, destination_area,
+ reduce_data, reductions, resample_kwargs)
+ self._prepare_resampler(source_area, destination_area, resamplers, resample_kwargs)
kwargs = resample_kwargs.copy()
kwargs['resampler'] = resamplers[source_area]
res = resample_dataset(dataset, destination_area, **kwargs)
@@ -777,6 +876,51 @@ def _resampled_scene(self, new_scn, destination_area, reduce_data=True,
if parent_dataset is not None:
replace_anc(res, pres)
+ def _get_finalized_destination_area(self, destination_area, new_scn):
+ if isinstance(destination_area, str):
+ destination_area = get_area_def(destination_area)
+ if hasattr(destination_area, 'freeze'):
+ try:
+ finest_area = new_scn.finest_area()
+ destination_area = destination_area.freeze(finest_area)
+ except ValueError:
+ raise ValueError("No dataset areas available to freeze "
+ "DynamicAreaDefinition.")
+ return destination_area
+
+ def _prepare_resampler(self, source_area, destination_area, resamplers, resample_kwargs):
+ if source_area not in resamplers:
+ key, resampler = prepare_resampler(
+ source_area, destination_area, **resample_kwargs)
+ resamplers[source_area] = resampler
+ self._resamplers[key] = resampler
+
+ def _reduce_data(self, dataset, source_area, destination_area, reduce_data, reductions, resample_kwargs):
+ try:
+ if reduce_data:
+ key = source_area
+ try:
+ (slice_x, slice_y), source_area = reductions[key]
+ except KeyError:
+ if resample_kwargs.get('resampler') == 'gradient_search':
+ factor = resample_kwargs.get('shape_divisible_by', 2)
+ else:
+ factor = None
+ try:
+ slice_x, slice_y = source_area.get_area_slices(
+ destination_area, shape_divisible_by=factor)
+ except TypeError:
+ slice_x, slice_y = source_area.get_area_slices(
+ destination_area)
+ source_area = source_area[slice_y, slice_x]
+ reductions[key] = (slice_x, slice_y), source_area
+ dataset = self._slice_data(source_area, (slice_x, slice_y), dataset)
+ else:
+ LOG.debug("Data reduction disabled by the user")
+ except NotImplementedError:
+ LOG.info("Not reducing data before resampling.")
+ return dataset, source_area
+
def resample(self, destination=None, datasets=None, generate=True,
unload=True, resampler=None, reduce_data=True,
**resample_kwargs):
@@ -814,7 +958,8 @@ def resample(self, destination=None, datasets=None, generate=True,
# regenerate anything from the wishlist that needs it (combining
# multiple resolutions, etc.)
- new_scn.generate_possible_composites(generate, unload)
+ if generate:
+ new_scn.generate_possible_composites(unload)
return new_scn
@@ -839,8 +984,8 @@ def show(self, dataset_id, overlay=None):
.. _pycoast: https://pycoast.readthedocs.io/
"""
- from satpy.writers import get_enhanced_image
from satpy.utils import in_ipynb
+ from satpy.writers import get_enhanced_image
img = get_enhanced_image(self[dataset_id].squeeze(), overlay=overlay)
if not in_ipynb():
img.show()
@@ -905,6 +1050,9 @@ def to_xarray_dataset(self, datasets=None):
"""
dataarrays = self._get_dataarrays_from_identifiers(datasets)
+ if len(dataarrays) == 0:
+ return xr.Dataset()
+
ds_dict = {i.attrs['name']: i.rename(i.attrs['name']) for i in dataarrays if i.attrs.get('area') is not None}
mdata = combine_metadata(*tuple(i.attrs for i in dataarrays))
if mdata.get('area') is None or not isinstance(mdata['area'], SwathDefinition):
@@ -916,8 +1064,8 @@ def to_xarray_dataset(self, datasets=None):
if not isinstance(lons, DataArray):
lons = DataArray(lons, dims=('y', 'x'))
lats = DataArray(lats, dims=('y', 'x'))
- ds = xr.Dataset(ds_dict, coords={"latitude": (["y", "x"], lats),
- "longitude": (["y", "x"], lons)})
+ ds = xr.Dataset(ds_dict, coords={"latitude": lats,
+ "longitude": lons})
ds.attrs = mdata
return ds
@@ -989,7 +1137,13 @@ def save_dataset(self, dataset_id, filename=None, writer=None,
def save_datasets(self, writer=None, filename=None, datasets=None, compute=True,
**kwargs):
- """Save all the datasets present in a scene to disk using ``writer``.
+ """Save requested datasets present in a scene to disk using ``writer``.
+
+ Note that dependency datasets (those loaded solely to create another
+ and not requested explicitly) that may be contained in this Scene will
+ not be saved by default. The default datasets are those explicitly
+ requested through ``.load`` and exist in the Scene currently. Specify
+ dependency datasets using the ``datasets`` keyword argument.
Args:
writer (str): Name of writer to use when writing data to disk.
@@ -1000,7 +1154,9 @@ def save_datasets(self, writer=None, filename=None, datasets=None, compute=True,
dataset to. It may include string formatting
patterns that will be filled in by dataset
attributes.
- datasets (iterable): Limit written products to these datasets
+ datasets (iterable): Limit written products to these datasets.
+ Elements can be string name, a wavelength as a number, a
+ DataID, or DataQuery object.
compute (bool): If `True` (default), compute all of the saves to
disk. If `False` then the return value is either a
:doc:`dask:delayed` object or two lists to be passed to
@@ -1037,6 +1193,47 @@ def save_datasets(self, writer=None, filename=None, datasets=None, compute=True,
**kwargs)
return writer.save_datasets(dataarrays, compute=compute, **save_kwargs)
+ def compute(self, **kwargs):
+ """Call `compute` on all Scene data arrays.
+
+ See :meth:`xarray.DataArray.compute` for more details.
+ Note that this will convert the contents of the DataArray to numpy arrays which
+ may not work with all parts of Satpy which may expect dask arrays.
+ """
+ from dask import compute
+ new_scn = self.copy()
+ datasets = compute(*(new_scn._datasets.values()), **kwargs)
+
+ for i, k in enumerate(new_scn._datasets.keys()):
+ new_scn[k] = datasets[i]
+
+ return new_scn
+
+ def persist(self, **kwargs):
+ """Call `persist` on all Scene data arrays.
+
+ See :meth:`xarray.DataArray.persist` for more details.
+ """
+ from dask import persist
+ new_scn = self.copy()
+ datasets = persist(*(new_scn._datasets.values()), **kwargs)
+
+ for i, k in enumerate(new_scn._datasets.keys()):
+ new_scn[k] = datasets[i]
+
+ return new_scn
+
+ def chunk(self, **kwargs):
+ """Call `chunk` on all Scene data arrays.
+
+ See :meth:`xarray.DataArray.chunk` for more details.
+ """
+ new_scn = self.copy()
+ for k in new_scn._datasets.keys():
+ new_scn[k] = new_scn[k].chunk(**kwargs)
+
+ return new_scn
+
@staticmethod
def _get_writer_by_ext(extension):
"""Find the writer matching the ``extension``.
@@ -1099,7 +1296,7 @@ def unload(self, keepables=None):
del self._datasets[ds_id]
def load(self, wishlist, calibration='*', resolution='*',
- polarization='*', level='*', generate=True, unload=True,
+ polarization='*', level='*', modifiers='*', generate=True, unload=True,
**kwargs):
"""Read and generate requested datasets.
@@ -1108,35 +1305,37 @@ def load(self, wishlist, calibration='*', resolution='*',
or they can not provide certain parameters and the "best" parameter
will be chosen. For example, if a dataset is available in multiple
resolutions and no resolution is specified in the wishlist's DataQuery
- then the highest (smallest number) resolution will be chosen.
+ then the highest (the smallest number) resolution will be chosen.
Loaded `DataArray` objects are created and stored in the Scene object.
Args:
wishlist (iterable): List of names (str), wavelengths (float),
- DataQuery objects or DataID of the requested
- datasets to load. See `available_dataset_ids()`
- for what datasets are available.
- calibration (list, str): Calibration levels to limit available
- datasets. This is a shortcut to
- having to list each DataQuery/DataID in
- `wishlist`.
+ DataQuery objects or DataID of the requested datasets to load.
+ See `available_dataset_ids()` for what datasets are available.
+ calibration (list | str): Calibration levels to limit available
+ datasets. This is a shortcut to having to list each
+ DataQuery/DataID in `wishlist`.
resolution (list | float): Resolution to limit available datasets.
- This is a shortcut similar to
- calibration.
+ This is a shortcut similar to calibration.
polarization (list | str): Polarization ('V', 'H') to limit
- available datasets. This is a shortcut
- similar to calibration.
+ available datasets. This is a shortcut similar to calibration.
+ modifiers (tuple | str): Modifiers that should be applied to the
+ loaded datasets. This is a shortcut similar to calibration,
+ but only represents a single set of modifiers as a tuple.
+ For example, specifying
+ ``modifiers=('sunz_corrected', 'rayleigh_corrected')`` will
+ attempt to apply both of these modifiers to all loaded
+ datasets in the specified order ('sunz_corrected' first).
level (list | str): Pressure level to limit available datasets.
- Pressure should be in hPa or mb. If an
- altitude is used it should be specified in
- inverse meters (1/m). The units of this
- parameter ultimately depend on the reader.
+ Pressure should be in hPa or mb. If an altitude is used it
+ should be specified in inverse meters (1/m). The units of this
+ parameter ultimately depend on the reader.
generate (bool): Generate composites from the loaded datasets
- (default: True)
- unload (bool): Unload datasets that were required to generate
- the requested datasets (composite dependencies)
- but are no longer needed.
+ (default: True)
+ unload (bool): Unload datasets that were required to generate the
+ requested datasets (composite dependencies) but are no longer
+ needed.
"""
if isinstance(wishlist, str):
@@ -1146,16 +1345,20 @@ def load(self, wishlist, calibration='*', resolution='*',
query = DataQuery(calibration=calibration,
polarization=polarization,
resolution=resolution,
+ modifiers=modifiers,
level=level)
self._update_dependency_tree(needed_datasets, query)
self._wishlist |= needed_datasets
self._read_datasets_from_storage(**kwargs)
- self.generate_possible_composites(generate, unload)
+ if generate:
+ self.generate_possible_composites(unload)
def _update_dependency_tree(self, needed_datasets, query):
try:
+ comps, mods = load_compositor_configs_for_sensors(self.sensor_names)
+ self._dependency_tree.update_compositors_and_modifiers(comps, mods)
self._dependency_tree.populate_with_keys(needed_datasets, query)
except MissingDependencies as err:
raise KeyError(str(err))
@@ -1206,13 +1409,15 @@ def _load_datasets_by_readers(self, reader_datasets, **kwargs):
loaded_datasets.update(new_datasets)
return loaded_datasets
- def generate_possible_composites(self, generate, unload):
- """See what we can generate and do it."""
- if generate:
- keepables = self._generate_composites_from_loaded_datasets()
- else:
- # don't lose datasets we loaded to try to generate composites
- keepables = set(self._datasets.keys()) | self._wishlist
+ def generate_possible_composites(self, unload):
+ """See which composites can be generated and generate them.
+
+ Args:
+ unload (bool): if the dependencies of the composites
+ should be unloaded after successful generation.
+ """
+ keepables = self._generate_composites_from_loaded_datasets()
+
if self.missing_datasets:
self._remove_failed_datasets(keepables)
if unload:
@@ -1239,7 +1444,7 @@ def _generate_composites_nodes_from_loaded_datasets(self, compositor_nodes):
self._generate_composite(node, keepables)
return keepables
- def _generate_composite(self, comp_node: Node, keepables: set):
+ def _generate_composite(self, comp_node: CompositorNode, keepables: set):
"""Collect all composite prereqs and create the specified composite.
Args:
diff --git a/satpy/tests/compositor_tests/test_abi.py b/satpy/tests/compositor_tests/test_abi.py
index cf4568bbff..93df810cf5 100644
--- a/satpy/tests/compositor_tests/test_abi.py
+++ b/satpy/tests/compositor_tests/test_abi.py
@@ -25,17 +25,17 @@ class TestABIComposites(unittest.TestCase):
def test_load_composite_yaml(self):
"""Test loading the yaml for this sensor."""
- from satpy.composites.config_loader import CompositorLoader
- cl = CompositorLoader()
- cl.load_sensor_composites('abi')
+ from satpy.composites.config_loader import load_compositor_configs_for_sensors
+ load_compositor_configs_for_sensors(['abi'])
def test_simulated_green(self):
"""Test creating a fake 'green' band."""
- import xarray as xr
import dask.array as da
import numpy as np
- from satpy.composites.abi import SimulatedGreen
+ import xarray as xr
from pyresample.geometry import AreaDefinition
+
+ from satpy.composites.abi import SimulatedGreen
rows = 5
cols = 10
area = AreaDefinition(
diff --git a/satpy/tests/compositor_tests/test_agri.py b/satpy/tests/compositor_tests/test_agri.py
new file mode 100644
index 0000000000..32fcc72c61
--- /dev/null
+++ b/satpy/tests/compositor_tests/test_agri.py
@@ -0,0 +1,63 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2018-2022 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Tests for AGRI compositors."""
+
+import unittest
+
+
+class TestAGRIComposites(unittest.TestCase):
+ """Test AGRI-specific composites."""
+
+ def test_load_composite_yaml(self):
+ """Test loading the yaml for this sensor."""
+ from satpy.composites.config_loader import load_compositor_configs_for_sensors
+ load_compositor_configs_for_sensors(['agri'])
+
+ def test_simulated_red(self):
+ """Test creating a fake 'red' band."""
+ import dask.array as da
+ import numpy as np
+ import xarray as xr
+ from pyresample.geometry import AreaDefinition
+
+ from satpy.composites.agri import SimulatedRed
+ rows = 5
+ cols = 10
+ area = AreaDefinition(
+ 'test', 'test', 'test',
+ {'proj': 'eqc', 'lon_0': 0.0,
+ 'lat_0': 0.0},
+ cols, rows,
+ (-20037508.34, -10018754.17, 20037508.34, 10018754.17))
+
+ comp = SimulatedRed('red', prerequisites=('C01', 'C02', 'C03'),
+ standard_name='toa_bidirectional_reflectance')
+ c01 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.25,
+ dims=('y', 'x'),
+ attrs={'name': 'C01', 'area': area})
+ c02 = xr.DataArray(da.zeros((rows, cols), chunks=25) + 0.30,
+ dims=('y', 'x'),
+ attrs={'name': 'C02', 'area': area})
+ res = comp((c01, c02))
+ self.assertIsInstance(res, xr.DataArray)
+ self.assertIsInstance(res.data, da.Array)
+ self.assertEqual(res.attrs['name'], 'red')
+ self.assertEqual(res.attrs['standard_name'],
+ 'toa_bidirectional_reflectance')
+ data = res.compute()
+ np.testing.assert_allclose(data, 0.24252874)
diff --git a/satpy/tests/compositor_tests/test_ahi.py b/satpy/tests/compositor_tests/test_ahi.py
index 2fe8390b35..a11794253c 100644
--- a/satpy/tests/compositor_tests/test_ahi.py
+++ b/satpy/tests/compositor_tests/test_ahi.py
@@ -25,17 +25,17 @@ class TestAHIComposites(unittest.TestCase):
def test_load_composite_yaml(self):
"""Test loading the yaml for this sensor."""
- from satpy.composites.config_loader import CompositorLoader
- cl = CompositorLoader()
- cl.load_sensor_composites('abi')
+ from satpy.composites.config_loader import load_compositor_configs_for_sensors
+ load_compositor_configs_for_sensors(['ahi'])
def test_corrected_green(self):
"""Test adjusting the 'green' band."""
- import xarray as xr
import dask.array as da
import numpy as np
- from satpy.composites.ahi import GreenCorrector
+ import xarray as xr
from pyresample.geometry import AreaDefinition
+
+ from satpy.composites.ahi import GreenCorrector
rows = 5
cols = 10
area = AreaDefinition(
diff --git a/satpy/tests/compositor_tests/test_glm.py b/satpy/tests/compositor_tests/test_glm.py
index f5eebb8038..6b79f96678 100644
--- a/satpy/tests/compositor_tests/test_glm.py
+++ b/satpy/tests/compositor_tests/test_glm.py
@@ -23,17 +23,17 @@ class TestGLMComposites:
def test_load_composite_yaml(self):
"""Test loading the yaml for this sensor."""
- from satpy.composites.config_loader import CompositorLoader
- cl = CompositorLoader()
- cl.load_sensor_composites('glm')
+ from satpy.composites.config_loader import load_compositor_configs_for_sensors
+ load_compositor_configs_for_sensors(['glm'])
def test_highlight_compositor(self):
"""Test creating a highlight composite."""
- import xarray as xr
import dask.array as da
import numpy as np
- from satpy.composites.glm import HighlightCompositor
+ import xarray as xr
from pyresample.geometry import AreaDefinition
+
+ from satpy.composites.glm import HighlightCompositor
rows = 5
cols = 10
area = AreaDefinition(
diff --git a/satpy/tests/compositor_tests/test_sar.py b/satpy/tests/compositor_tests/test_sar.py
index e2a9a5d530..ed71e22730 100644
--- a/satpy/tests/compositor_tests/test_sar.py
+++ b/satpy/tests/compositor_tests/test_sar.py
@@ -25,9 +25,10 @@ class TestSARComposites(unittest.TestCase):
def test_sar_ice(self):
"""Test creating a the sar_ice composite."""
- import xarray as xr
import dask.array as da
import numpy as np
+ import xarray as xr
+
from satpy.composites.sar import SARIce
rows = 2
@@ -54,9 +55,10 @@ def test_sar_ice(self):
def test_sar_ice_log(self):
"""Test creating a the sar_ice_log composite."""
- import xarray as xr
import dask.array as da
import numpy as np
+ import xarray as xr
+
from satpy.composites.sar import SARIceLog
rows = 2
diff --git a/satpy/tests/compositor_tests/test_viirs.py b/satpy/tests/compositor_tests/test_viirs.py
index a6604ce3b4..54000ccb70 100644
--- a/satpy/tests/compositor_tests/test_viirs.py
+++ b/satpy/tests/compositor_tests/test_viirs.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
-# Copyright (c) 2018 Satpy developers
+# Copyright (c) 2018, 2022 Satpy developers
#
# This file is part of satpy.
#
@@ -17,25 +17,19 @@
# satpy. If not, see .
"""Tests for VIIRS compositors."""
-import unittest
+import dask.array as da
+import numpy as np
+import pytest
+import xarray as xr
+from pyresample.geometry import AreaDefinition
-class TestVIIRSComposites(unittest.TestCase):
- """Test VIIRS-specific composites."""
+class TestVIIRSComposites:
+ """Test various VIIRS-specific composites."""
- def test_load_composite_yaml(self):
- """Test loading the yaml for this sensor."""
- from satpy.composites.config_loader import CompositorLoader
- cl = CompositorLoader()
- cl.load_sensor_composites('viirs')
-
- def test_histogram_dnb(self):
- """Test the 'histogram_dnb' compositor."""
- import xarray as xr
- import dask.array as da
- import numpy as np
- from satpy.composites.viirs import HistogramDNB
- from pyresample.geometry import AreaDefinition
+ @pytest.fixture
+ def area(self):
+ """Return fake area for use with DNB tests."""
rows = 5
cols = 10
area = AreaDefinition(
@@ -44,183 +38,166 @@ def test_histogram_dnb(self):
'lat_0': 0.0},
cols, rows,
(-20037508.34, -10018754.17, 20037508.34, 10018754.17))
+ return area
- comp = HistogramDNB('histogram_dnb', prerequisites=('dnb',),
- standard_name='toa_outgoing_radiance_per_'
- 'unit_wavelength')
- dnb = np.zeros((rows, cols)) + 0.25
+ @pytest.fixture
+ def c01(self, area):
+ """Return fake channel 1 data for DNB tests."""
+ dnb = np.zeros(area.shape) + 0.25
dnb[3, :] += 0.25
dnb[4:, :] += 0.5
dnb = da.from_array(dnb, chunks=25)
c01 = xr.DataArray(dnb,
dims=('y', 'x'),
attrs={'name': 'DNB', 'area': area})
+ return c01
+
+ @pytest.fixture
+ def c02(self, area):
+ """Return fake sza dataset for DNB tests."""
# data changes by row, sza changes by col for testing
- sza = np.zeros((rows, cols)) + 70.0
+ sza = np.zeros(area.shape) + 70.0
sza[:, 3] += 20.0
sza[:, 4:] += 45.0
sza = da.from_array(sza, chunks=25)
c02 = xr.DataArray(sza,
dims=('y', 'x'),
attrs={'name': 'solar_zenith_angle', 'area': area})
+ return c02
+
+ @pytest.fixture
+ def c03(self, area):
+ """Return fake lunal zenith angle dataset for DNB tests."""
+ lza = np.zeros(area.shape) + 70.0
+ lza[:, 3] += 20.0
+ lza[:, 4:] += 45.0
+ lza = da.from_array(lza, chunks=25)
+ c03 = xr.DataArray(lza,
+ dims=('y', 'x'),
+ attrs={'name': 'lunar_zenith_angle', 'area': area})
+ return c03
+
+ def test_load_composite_yaml(self):
+ """Test loading the yaml for this sensor."""
+ from satpy.composites.config_loader import load_compositor_configs_for_sensors
+ load_compositor_configs_for_sensors(['viirs'])
+
+ def test_histogram_dnb(self, c01, c02):
+ """Test the 'histogram_dnb' compositor."""
+ from satpy.composites.viirs import HistogramDNB
+
+ comp = HistogramDNB('histogram_dnb', prerequisites=('dnb',),
+ standard_name='toa_outgoing_radiance_per_'
+ 'unit_wavelength')
res = comp((c01, c02))
- self.assertIsInstance(res, xr.DataArray)
- self.assertIsInstance(res.data, da.Array)
- self.assertEqual(res.attrs['name'], 'histogram_dnb')
- self.assertEqual(res.attrs['standard_name'],
- 'equalized_radiance')
+ assert isinstance(res, xr.DataArray)
+ assert isinstance(res.data, da.Array)
+ assert res.attrs['name'] == 'histogram_dnb'
+ assert res.attrs['standard_name'] == 'equalized_radiance'
data = res.compute()
unique_values = np.unique(data)
np.testing.assert_allclose(unique_values, [0.5994, 0.7992, 0.999], rtol=1e-3)
- def test_adaptive_dnb(self):
+ def test_adaptive_dnb(self, c01, c02):
"""Test the 'adaptive_dnb' compositor."""
- import xarray as xr
- import dask.array as da
- import numpy as np
from satpy.composites.viirs import AdaptiveDNB
- from pyresample.geometry import AreaDefinition
- rows = 5
- cols = 10
- area = AreaDefinition(
- 'test', 'test', 'test',
- {'proj': 'eqc', 'lon_0': 0.0,
- 'lat_0': 0.0},
- cols, rows,
- (-20037508.34, -10018754.17, 20037508.34, 10018754.17))
comp = AdaptiveDNB('adaptive_dnb', prerequisites=('dnb',),
standard_name='toa_outgoing_radiance_per_'
'unit_wavelength')
- dnb = np.zeros((rows, cols)) + 0.25
- dnb[3, :] += 0.25
- dnb[4:, :] += 0.5
- dnb = da.from_array(dnb, chunks=25)
- c01 = xr.DataArray(dnb,
- dims=('y', 'x'),
- attrs={'name': 'DNB', 'area': area})
- sza = np.zeros((rows, cols)) + 70.0
- sza[:, 3] += 20.0
- sza[:, 4:] += 45.0
- sza = da.from_array(sza, chunks=25)
- c02 = xr.DataArray(sza,
- dims=('y', 'x'),
- attrs={'name': 'solar_zenith_angle', 'area': area})
res = comp((c01, c02))
- self.assertIsInstance(res, xr.DataArray)
- self.assertIsInstance(res.data, da.Array)
- self.assertEqual(res.attrs['name'], 'adaptive_dnb')
- self.assertEqual(res.attrs['standard_name'],
- 'equalized_radiance')
+ assert isinstance(res, xr.DataArray)
+ assert isinstance(res.data, da.Array)
+ assert res.attrs['name'] == 'adaptive_dnb'
+ assert res.attrs['standard_name'] == 'equalized_radiance'
data = res.compute()
np.testing.assert_allclose(data.data, 0.999, rtol=1e-4)
- def test_erf_dnb(self):
- """Test the 'dynamic_dnb' or ERF DNB compositor."""
- import xarray as xr
- import dask.array as da
- import numpy as np
- from satpy.composites.viirs import ERFDNB
- from pyresample.geometry import AreaDefinition
- rows = 5
- cols = 10
- area = AreaDefinition(
- 'test', 'test', 'test',
- {'proj': 'eqc', 'lon_0': 0.0,
- 'lat_0': 0.0},
- cols, rows,
- (-20037508.34, -10018754.17, 20037508.34, 10018754.17))
+ def test_hncc_dnb(self, area, c01, c02, c03):
+ """Test the 'hncc_dnb' compositor."""
+ from satpy.composites.viirs import NCCZinke
- comp = ERFDNB('dynamic_dnb', prerequisites=('dnb',),
- standard_name='toa_outgoing_radiance_per_'
- 'unit_wavelength')
- dnb = np.zeros((rows, cols)) + 0.25
- dnb[3, :] += 0.25
- dnb[4:, :] += 0.5
- dnb = da.from_array(dnb, chunks=25)
- c01 = xr.DataArray(dnb,
- dims=('y', 'x'),
- attrs={'name': 'DNB', 'area': area})
- sza = np.zeros((rows, cols)) + 70.0
- sza[:, 3] += 20.0
- sza[:, 4:] += 45.0
- sza = da.from_array(sza, chunks=25)
- c02 = xr.DataArray(sza,
- dims=('y', 'x'),
- attrs={'name': 'solar_zenith_angle', 'area': area})
- lza = np.zeros((rows, cols)) + 70.0
- lza[:, 3] += 20.0
- lza[:, 4:] += 45.0
- lza = da.from_array(lza, chunks=25)
- c03 = xr.DataArray(lza,
- dims=('y', 'x'),
- attrs={'name': 'lunar_zenith_angle', 'area': area})
+ comp = NCCZinke('hncc_dnb', prerequisites=('dnb',),
+ standard_name='toa_outgoing_radiance_per_'
+ 'unit_wavelength')
mif = xr.DataArray(da.zeros((5,), chunks=5) + 0.1,
dims=('y',),
attrs={'name': 'moon_illumination_fraction', 'area': area})
res = comp((c01, c02, c03, mif))
- self.assertIsInstance(res, xr.DataArray)
- self.assertIsInstance(res.data, da.Array)
- self.assertEqual(res.attrs['name'], 'dynamic_dnb')
- self.assertEqual(res.attrs['standard_name'],
- 'equalized_radiance')
+ assert isinstance(res, xr.DataArray)
+ assert isinstance(res.data, da.Array)
+ assert res.attrs['name'] == 'hncc_dnb'
+ assert res.attrs['standard_name'] == 'ncc_radiance'
data = res.compute()
unique = np.unique(data)
- np.testing.assert_allclose(unique, [0.00000000e+00, 1.00446703e-01, 1.64116082e-01, 2.09233451e-01,
- 1.43916324e+02, 2.03528498e+02, 2.49270516e+02])
+ np.testing.assert_allclose(
+ unique, [3.48479712e-04, 6.96955799e-04, 1.04543189e-03, 4.75394738e-03,
+ 9.50784532e-03, 1.42617433e-02, 1.50001560e+03, 3.00001560e+03,
+ 4.50001560e+03])
- def test_hncc_dnb(self):
- """Test the 'hncc_dnb' compositor."""
- import xarray as xr
- import dask.array as da
- import numpy as np
- from satpy.composites.viirs import NCCZinke
- from pyresample.geometry import AreaDefinition
- rows = 5
- cols = 10
- area = AreaDefinition(
- 'test', 'test', 'test',
- {'proj': 'eqc', 'lon_0': 0.0,
- 'lat_0': 0.0},
- cols, rows,
- (-20037508.34, -10018754.17, 20037508.34, 10018754.17))
+ @pytest.mark.parametrize("dnb_units", ["W m-2 sr-1", "W cm-2 sr-1"])
+ @pytest.mark.parametrize("saturation_correction", [False, True])
+ def test_erf_dnb(self, dnb_units, saturation_correction, area, c02, c03):
+ """Test the 'dynamic_dnb' or ERF DNB compositor."""
+ from satpy.composites.viirs import ERFDNB
- comp = NCCZinke('hncc_dnb', prerequisites=('dnb',),
- standard_name='toa_outgoing_radiance_per_'
- 'unit_wavelength')
- dnb = np.zeros((rows, cols)) + 0.25
+ comp = ERFDNB('dynamic_dnb', prerequisites=('dnb',),
+ saturation_correction=saturation_correction,
+ standard_name='toa_outgoing_radiance_per_'
+ 'unit_wavelength')
+ # c01 is different from in the other tests, so don't use the fixture
+ # here
+ dnb = np.zeros(area.shape) + 0.25
+ cols = area.shape[1]
+ dnb[2, :cols // 2] = np.nan
dnb[3, :] += 0.25
dnb[4:, :] += 0.5
+ if dnb_units == "W cm-2 sr-1":
+ dnb /= 10000.0
dnb = da.from_array(dnb, chunks=25)
c01 = xr.DataArray(dnb,
dims=('y', 'x'),
- attrs={'name': 'DNB', 'area': area})
- sza = np.zeros((rows, cols)) + 70.0
- sza[:, 3] += 20.0
- sza[:, 4:] += 45.0
- sza = da.from_array(sza, chunks=25)
- c02 = xr.DataArray(sza,
- dims=('y', 'x'),
- attrs={'name': 'solar_zenith_angle', 'area': area})
- lza = np.zeros((rows, cols)) + 70.0
- lza[:, 3] += 20.0
- lza[:, 4:] += 45.0
- lza = da.from_array(lza, chunks=25)
- c03 = xr.DataArray(lza,
- dims=('y', 'x'),
- attrs={'name': 'lunar_zenith_angle', 'area': area})
+ attrs={'name': 'DNB', 'area': area, 'units': dnb_units})
mif = xr.DataArray(da.zeros((5,), chunks=5) + 0.1,
dims=('y',),
attrs={'name': 'moon_illumination_fraction', 'area': area})
res = comp((c01, c02, c03, mif))
- self.assertIsInstance(res, xr.DataArray)
- self.assertIsInstance(res.data, da.Array)
- self.assertEqual(res.attrs['name'], 'hncc_dnb')
- self.assertEqual(res.attrs['standard_name'],
- 'ncc_radiance')
+ assert isinstance(res, xr.DataArray)
+ assert isinstance(res.data, da.Array)
+ assert res.attrs['name'] == 'dynamic_dnb'
+ assert res.attrs['standard_name'] == 'equalized_radiance'
data = res.compute()
unique = np.unique(data)
- np.testing.assert_allclose(
- unique, [3.48479712e-04, 6.96955799e-04, 1.04543189e-03, 4.75394738e-03,
- 9.50784532e-03, 1.42617433e-02, 1.50001560e+03, 3.00001560e+03,
- 4.50001560e+03])
+ assert np.isnan(unique).any()
+ nonnan_unique = unique[~np.isnan(unique)]
+ if saturation_correction:
+ exp_unique = [0.000000e+00, 3.978305e-04, 6.500003e-04,
+ 8.286927e-04, 5.628335e-01, 7.959671e-01,
+ 9.748567e-01]
+ else:
+ exp_unique = [0.00000000e+00, 1.00446703e-01, 1.64116082e-01,
+ 2.09233451e-01, 1.43916324e+02, 2.03528498e+02,
+ 2.49270516e+02]
+ np.testing.assert_allclose(nonnan_unique, exp_unique)
+
+ def test_snow_age(self, area):
+ """Test the 'snow_age' compositor."""
+ from satpy.composites.viirs import SnowAge
+
+ projectables = tuple(
+ xr.DataArray(
+ da.from_array(np.full(area.shape, 5.*i), chunks=5),
+ dims=("y", "x"),
+ attrs={"name": f"M0{i:d}",
+ "calibration": "reflectance",
+ "units": "%"})
+ for i in range(7, 12))
+ comp = SnowAge(
+ "snow_age",
+ prerequisites=("M07", "M08", "M09", "M10", "M11",),
+ standard_name="snow_age")
+ res = comp(projectables)
+ assert isinstance(res, xr.DataArray)
+ assert isinstance(res.data, da.Array)
+ assert res.attrs["name"] == "snow_age"
+ assert "units" not in res.attrs
diff --git a/satpy/tests/conftest.py b/satpy/tests/conftest.py
new file mode 100644
index 0000000000..8e3aee65ae
--- /dev/null
+++ b/satpy/tests/conftest.py
@@ -0,0 +1,47 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Shared preparation and utilities for testing.
+
+This module is executed automatically by pytest.
+
+"""
+
+import pytest
+
+import satpy
+
+
+@pytest.fixture(autouse=True)
+def reset_satpy_config(tmpdir):
+ """Set satpy config to logical defaults for tests."""
+ test_config = {
+ "cache_dir": str(tmpdir / "cache"),
+ "data_dir": str(tmpdir / "data"),
+ "config_path": [],
+ "cache_lonlats": False,
+ "cache_sensor_angles": False,
+ }
+ with satpy.config.set(test_config):
+ yield
+
+
+@pytest.fixture(autouse=True)
+def clear_function_caches():
+ """Clear out global function-level caches that may cause conflicts between tests."""
+ from satpy.composites.config_loader import load_compositor_configs_for_sensor
+ load_compositor_configs_for_sensor.cache_clear()
diff --git a/satpy/tests/enhancement_tests/test_abi.py b/satpy/tests/enhancement_tests/test_abi.py
index fbe267f5af..f7ebb853b4 100644
--- a/satpy/tests/enhancement_tests/test_abi.py
+++ b/satpy/tests/enhancement_tests/test_abi.py
@@ -18,9 +18,10 @@
"""Unit testing for the ABI enhancement functions."""
import unittest
+
+import dask.array as da
import numpy as np
import xarray as xr
-import dask.array as da
class TestABIEnhancement(unittest.TestCase):
@@ -33,9 +34,10 @@ def setUp(self):
def test_cimss_true_color_contrast(self):
"""Test the cimss_true_color_contrast enhancement."""
- from satpy.enhancements.abi import cimss_true_color_contrast
from trollimage.xrimage import XRImage
+ from satpy.enhancements.abi import cimss_true_color_contrast
+
expected = np.array([[
[0., 0., 0.05261956, 0.13396146],
[0.21530335, 0.29664525, 0.37798715, 0.45932905],
diff --git a/satpy/tests/enhancement_tests/test_ahi.py b/satpy/tests/enhancement_tests/test_ahi.py
new file mode 100644
index 0000000000..118a00efe6
--- /dev/null
+++ b/satpy/tests/enhancement_tests/test_ahi.py
@@ -0,0 +1,52 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Unit testing for the AHI enhancement function."""
+
+import dask.array as da
+import numpy as np
+import xarray as xr
+
+
+class TestAHIEnhancement():
+ """Test the AHI enhancement functions."""
+
+ def setup(self):
+ """Create test data."""
+ data = da.arange(-100, 1000, 110).reshape(2, 5)
+ rgb_data = np.stack([data, data, data])
+ self.rgb = xr.DataArray(rgb_data, dims=('bands', 'y', 'x'),
+ coords={'bands': ['R', 'G', 'B']})
+
+ def test_jma_true_color_reproduction(self):
+ """Test the jma_true_color_reproduction enhancement."""
+ from trollimage.xrimage import XRImage
+
+ from satpy.enhancements.ahi import jma_true_color_reproduction
+
+ expected = [[[-109.98, 10.998, 131.976, 252.954, 373.932],
+ [494.91, 615.888, 736.866, 857.844, 978.822]],
+
+ [[-97.6, 9.76, 117.12, 224.48, 331.84],
+ [439.2, 546.56, 653.92, 761.28, 868.64]],
+
+ [[-94.27, 9.427, 113.124, 216.821, 320.518],
+ [424.215, 527.912, 631.609, 735.306, 839.003]]]
+
+ img = XRImage(self.rgb)
+ jma_true_color_reproduction(img)
+ np.testing.assert_almost_equal(img.data.compute(), expected)
diff --git a/satpy/tests/enhancement_tests/test_atmosphere.py b/satpy/tests/enhancement_tests/test_atmosphere.py
new file mode 100644
index 0000000000..42e25af0c6
--- /dev/null
+++ b/satpy/tests/enhancement_tests/test_atmosphere.py
@@ -0,0 +1,61 @@
+# Copyright (c) 2022- Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Tests for enhancements in enhancements/atmosphere.py."""
+
+import datetime
+
+import dask.array as da
+import numpy as np
+import xarray as xr
+from trollimage.xrimage import XRImage
+
+
+def test_essl_moisture():
+ """Test ESSL moisture compositor."""
+ from satpy.enhancements.atmosphere import essl_moisture
+
+ ratio = xr.DataArray(
+ da.linspace(1.0, 1.7, 25, chunks=5).reshape((5, 5)),
+ dims=("y", "x"),
+ attrs={"name": "ratio",
+ "calibration": "reflectance",
+ "units": "%",
+ "mode": "L"})
+ im = XRImage(ratio)
+
+ essl_moisture(im)
+ assert im.data.attrs["mode"] == "RGB"
+ np.testing.assert_array_equal(im.data["bands"], ["R", "G", "B"])
+ assert im.data.sel(bands="R")[0, 0] == 1
+ np.testing.assert_allclose(im.data.sel(bands="R")[2, 2], 0.04, rtol=1e-4)
+ np.testing.assert_allclose(im.data.sel(bands="G")[2, 2], 0.42857, rtol=1e-4)
+ np.testing.assert_allclose(im.data.sel(bands="B")[2, 2], 0.1875, rtol=1e-4)
+
+ # test FCI test data correction
+ ratio = xr.DataArray(
+ da.linspace(1.0, 1.7, 25, chunks=5).reshape((5, 5)),
+ dims=("y", "x"),
+ attrs={"name": "ratio",
+ "calibration": "reflectance",
+ "units": "%",
+ "mode": "L",
+ "sensor": "fci",
+ "start_time": datetime.datetime(1999, 1, 1)})
+ im = XRImage(ratio)
+ essl_moisture(im)
+ np.testing.assert_allclose(im.data.sel(bands="R")[3, 3], 0.7342, rtol=1e-4)
+ np.testing.assert_allclose(im.data.sel(bands="G")[3, 3], 0.7257, rtol=1e-4)
+ np.testing.assert_allclose(im.data.sel(bands="B")[3, 3], 0.39, rtol=1e-4)
diff --git a/satpy/tests/enhancement_tests/test_enhancements.py b/satpy/tests/enhancement_tests/test_enhancements.py
index 6371cb88ce..20a4453ef7 100644
--- a/satpy/tests/enhancement_tests/test_enhancements.py
+++ b/satpy/tests/enhancement_tests/test_enhancements.py
@@ -17,18 +17,50 @@
# satpy. If not, see .
"""Unit testing the enhancements functions, e.g. cira_stretch."""
+import contextlib
import os
-import unittest
+from tempfile import NamedTemporaryFile
+from unittest import mock
+
+import dask.array as da
import numpy as np
+import pytest
import xarray as xr
-import dask.array as da
-from unittest import mock
+
+from satpy.enhancements import create_colormap, on_dask_array, on_separate_bands, using_map_blocks
+
+
+def run_and_check_enhancement(func, data, expected, **kwargs):
+ """Perform basic checks that apply to multiple tests."""
+ from trollimage.xrimage import XRImage
+
+ pre_attrs = data.attrs
+ img = XRImage(data)
+ func(img, **kwargs)
+
+ assert isinstance(img.data.data, da.Array)
+ old_keys = set(pre_attrs.keys())
+ # It is OK to have "enhancement_history" added
+ new_keys = set(img.data.attrs.keys()) - {"enhancement_history"}
+ assert old_keys == new_keys
+
+ res_data_arr = img.data
+ assert isinstance(res_data_arr, xr.DataArray)
+ assert isinstance(res_data_arr.data, da.Array)
+ res_data = res_data_arr.data.compute() # mimics what xrimage geotiff writing does
+ assert not isinstance(res_data, da.Array)
+ np.testing.assert_allclose(res_data, expected, atol=1.e-6, rtol=0)
+
+
+def identical_decorator(func):
+ """Decorate but do nothing."""
+ return func
-class TestEnhancementStretch(unittest.TestCase):
+class TestEnhancementStretch:
"""Class for testing enhancements in satpy.enhancements."""
- def setUp(self):
+ def setup_method(self):
"""Create test data used by every test."""
data = np.arange(-210, 790, 100).reshape((2, 5)) * 0.95
data[0, 0] = np.nan # one bad value for testing
@@ -36,26 +68,36 @@ def setUp(self):
crefl_data /= 5.605
crefl_data[0, 0] = np.nan # one bad value for testing
crefl_data[0, 1] = 0.
- self.ch1 = xr.DataArray(data, dims=('y', 'x'), attrs={'test': 'test'})
- self.ch2 = xr.DataArray(crefl_data, dims=('y', 'x'), attrs={'test': 'test'})
+ self.ch1 = xr.DataArray(da.from_array(data, chunks=2), dims=('y', 'x'), attrs={'test': 'test'})
+ self.ch2 = xr.DataArray(da.from_array(crefl_data, chunks=2), dims=('y', 'x'), attrs={'test': 'test'})
rgb_data = np.stack([data, data, data])
- self.rgb = xr.DataArray(rgb_data, dims=('bands', 'y', 'x'),
+ self.rgb = xr.DataArray(da.from_array(rgb_data, chunks=(3, 2, 2)),
+ dims=('bands', 'y', 'x'),
coords={'bands': ['R', 'G', 'B']})
- def _test_enhancement(self, func, data, expected, **kwargs):
- """Perform basic checks that apply to multiple tests."""
- from trollimage.xrimage import XRImage
-
- pre_attrs = data.attrs
- img = XRImage(data)
- func(img, **kwargs)
-
- self.assertIsInstance(img.data.data, da.Array)
- self.assertListEqual(sorted(pre_attrs.keys()),
- sorted(img.data.attrs.keys()),
- "DataArray attributes were not preserved")
-
- np.testing.assert_allclose(img.data.values, expected, atol=1.e-6, rtol=0)
+ @pytest.mark.parametrize(
+ ("decorator", "exp_call_cls"),
+ [
+ (identical_decorator, xr.DataArray),
+ (on_dask_array, da.Array),
+ (using_map_blocks, np.ndarray),
+ ],
+ )
+ @pytest.mark.parametrize("input_data_name", ["ch1", "ch2", "rgb"])
+ def test_apply_enhancement(self, input_data_name, decorator, exp_call_cls):
+ """Test the 'apply_enhancement' utility function."""
+ def _enh_func(img):
+ def _calc_func(data):
+ assert isinstance(data, exp_call_cls)
+ return data
+ decorated_func = decorator(_calc_func)
+ return decorated_func(img.data)
+
+ in_data = getattr(self, input_data_name)
+ exp_data = in_data.values
+ if "bands" not in in_data.coords:
+ exp_data = exp_data[np.newaxis, :, :]
+ run_and_check_enhancement(_enh_func, in_data, exp_data)
def test_cira_stretch(self):
"""Test applying the cira_stretch."""
@@ -64,7 +106,7 @@ def test_cira_stretch(self):
expected = np.array([[
[np.nan, -7.04045974, -7.04045974, 0.79630132, 0.95947296],
[1.05181359, 1.11651012, 1.16635571, 1.20691137, 1.24110186]]])
- self._test_enhancement(cira_stretch, self.ch1, expected)
+ run_and_check_enhancement(cira_stretch, self.ch1, expected)
def test_reinhard(self):
"""Test the reinhard algorithm."""
@@ -77,7 +119,7 @@ def test_reinhard(self):
[[np.nan, 0., 0., 0.93333793, 1.29432402],
[1.55428709, 1.76572249, 1.94738635, 2.10848544, 2.25432809]]])
- self._test_enhancement(reinhard_to_srgb, self.rgb, expected)
+ run_and_check_enhancement(reinhard_to_srgb, self.rgb, expected)
def test_lookup(self):
"""Test the lookup enhancement function."""
@@ -86,7 +128,7 @@ def test_lookup(self):
[0., 0., 0., 0.333333, 0.705882],
[1., 1., 1., 1., 1.]]])
lut = np.arange(256.)
- self._test_enhancement(lookup, self.ch1, expected, luts=lut)
+ run_and_check_enhancement(lookup, self.ch1, expected, luts=lut)
expected = np.array([[[0., 0., 0., 0.333333, 0.705882],
[1., 1., 1., 1., 1.]],
@@ -96,12 +138,13 @@ def test_lookup(self):
[1., 1., 1., 1., 1.]]])
lut = np.arange(256.)
lut = np.vstack((lut, lut, lut)).T
- self._test_enhancement(lookup, self.rgb, expected, luts=lut)
+ run_and_check_enhancement(lookup, self.rgb, expected, luts=lut)
def test_colorize(self):
"""Test the colorize enhancement function."""
- from satpy.enhancements import colorize
from trollimage.colormap import brbg
+
+ from satpy.enhancements import colorize
expected = np.array([[
[np.nan, 3.29409498e-01, 3.29409498e-01,
4.35952940e-06, 4.35952940e-06],
@@ -115,14 +158,15 @@ def test_colorize(self):
1.88238767e-01, 1.88238767e-01],
[1.88238767e-01, 1.88238767e-01, 1.88238767e-01,
1.88238767e-01, 1.88238767e-01]]])
- self._test_enhancement(colorize, self.ch1, expected, palettes=brbg)
+ run_and_check_enhancement(colorize, self.ch1, expected, palettes=brbg)
def test_palettize(self):
"""Test the palettize enhancement function."""
- from satpy.enhancements import palettize
from trollimage.colormap import brbg
+
+ from satpy.enhancements import palettize
expected = np.array([[[10, 0, 0, 10, 10], [10, 10, 10, 10, 10]]])
- self._test_enhancement(palettize, self.ch1, expected, palettes=brbg)
+ run_and_check_enhancement(palettize, self.ch1, expected, palettes=brbg)
def test_three_d_effect(self):
"""Test the three_d_effect enhancement function."""
@@ -130,7 +174,7 @@ def test_three_d_effect(self):
expected = np.array([[
[np.nan, np.nan, -389.5, -294.5, 826.5],
[np.nan, np.nan, 85.5, 180.5, 1301.5]]])
- self._test_enhancement(three_d_effect, self.ch1, expected)
+ run_and_check_enhancement(three_d_effect, self.ch1, expected)
def test_crefl_scaling(self):
"""Test the crefl_scaling enhancement function."""
@@ -138,8 +182,8 @@ def test_crefl_scaling(self):
expected = np.array([[
[np.nan, 0., 0., 0.44378, 0.631734],
[0.737562, 0.825041, 0.912521, 1., 1.]]])
- self._test_enhancement(crefl_scaling, self.ch2, expected, idx=[0., 25., 55., 100., 255.],
- sc=[0., 90., 140., 175., 255.])
+ run_and_check_enhancement(crefl_scaling, self.ch2, expected, idx=[0., 25., 55., 100., 255.],
+ sc=[0., 90., 140., 175., 255.])
def test_piecewise_linear_stretch(self):
"""Test the piecewise_linear_stretch enhancement function."""
@@ -147,13 +191,13 @@ def test_piecewise_linear_stretch(self):
expected = np.array([[
[np.nan, 0., 0., 0.44378, 0.631734],
[0.737562, 0.825041, 0.912521, 1., 1.]]])
- self._test_enhancement(piecewise_linear_stretch,
- self.ch2 / 100.0,
- expected,
- xp=[0., 25., 55., 100., 255.],
- fp=[0., 90., 140., 175., 255.],
- reference_scale_factor=255,
- )
+ run_and_check_enhancement(piecewise_linear_stretch,
+ self.ch2 / 100.0,
+ expected,
+ xp=[0., 25., 55., 100., 255.],
+ fp=[0., 90., 140., 175., 255.],
+ reference_scale_factor=255,
+ )
def test_btemp_threshold(self):
"""Test applying the cira_stretch."""
@@ -162,13 +206,15 @@ def test_btemp_threshold(self):
expected = np.array([[
[np.nan, 0.946207, 0.892695, 0.839184, 0.785672],
[0.73216, 0.595869, 0.158745, -0.278379, -0.715503]]])
- self._test_enhancement(btemp_threshold, self.ch1, expected,
- min_in=-200, max_in=500, threshold=350)
+ run_and_check_enhancement(btemp_threshold, self.ch1, expected,
+ min_in=-200, max_in=500, threshold=350)
def test_merge_colormaps(self):
"""Test merging colormaps."""
from trollimage.colormap import Colormap
- from satpy.enhancements import _merge_colormaps as mcp, create_colormap
+
+ from satpy.enhancements import _merge_colormaps as mcp
+ from satpy.enhancements import create_colormap
ret_map = mock.MagicMock()
create_colormap_mock = mock.Mock(wraps=create_colormap)
@@ -177,7 +223,7 @@ def test_merge_colormaps(self):
with mock.patch('satpy.enhancements.create_colormap', create_colormap_mock):
res = mcp(kwargs)
- self.assertTrue(res is cmap1)
+ assert res is cmap1
create_colormap_mock.assert_not_called()
create_colormap_mock.reset_mock()
ret_map.reset_mock()
@@ -211,164 +257,134 @@ def test_merge_colormaps(self):
def tearDown(self):
"""Clean up."""
- pass
-class TestColormapLoading(unittest.TestCase):
+@contextlib.contextmanager
+def closed_named_temp_file(**kwargs):
+ """Named temporary file context manager that closes the file after creation.
+
+ This helps with Windows systems which can get upset with opening or
+ deleting a file that is already open.
+
+ """
+ try:
+ with NamedTemporaryFile(delete=False, **kwargs) as tmp_cmap:
+ yield tmp_cmap.name
+ finally:
+ os.remove(tmp_cmap.name)
+
+
+def _write_cmap_to_file(cmap_filename, cmap_data):
+ ext = os.path.splitext(cmap_filename)[1]
+ if ext in (".npy",):
+ np.save(cmap_filename, cmap_data)
+ elif ext in (".npz",):
+ np.savez(cmap_filename, cmap_data)
+ else:
+ np.savetxt(cmap_filename, cmap_data, delimiter=",")
+
+
+def _generate_cmap_test_data(color_scale, colormap_mode):
+ cmap_data = np.array([
+ [1, 0, 0],
+ [1, 1, 0],
+ [1, 1, 1],
+ [0, 0, 1],
+ ], dtype=np.float64)
+ if len(colormap_mode) != 3:
+ _cmap_data = cmap_data
+ cmap_data = np.empty((cmap_data.shape[0], len(colormap_mode)),
+ dtype=np.float64)
+ if colormap_mode.startswith("V") or colormap_mode.endswith("A"):
+ cmap_data[:, 0] = np.array([128, 130, 132, 134]) / 255.0
+ cmap_data[:, -3:] = _cmap_data
+ if colormap_mode.startswith("V") and colormap_mode.endswith("A"):
+ cmap_data[:, 1] = np.array([128, 130, 132, 134]) / 255.0
+ if color_scale is None or color_scale == 255:
+ cmap_data = (cmap_data * 255).astype(np.uint8)
+ return cmap_data
+
+
+class TestColormapLoading:
"""Test utilities used with colormaps."""
- def test_cmap_from_file_rgb(self):
+ @pytest.mark.parametrize("color_scale", [None, 1.0])
+ @pytest.mark.parametrize("colormap_mode", ["RGB", "VRGB", "VRGBA"])
+ @pytest.mark.parametrize("extra_kwargs",
+ [
+ {},
+ {"min_value": 50, "max_value": 100},
+ ])
+ @pytest.mark.parametrize("filename_suffix", [".npy", ".npz", ".csv"])
+ def test_cmap_from_file(self, color_scale, colormap_mode, extra_kwargs, filename_suffix):
"""Test that colormaps can be loaded from a binary file."""
- from satpy.enhancements import create_colormap
- from tempfile import NamedTemporaryFile
# create the colormap file on disk
- with NamedTemporaryFile(suffix='.npy', delete=False) as tmp_cmap:
- cmap_filename = tmp_cmap.name
- np.save(cmap_filename, np.array([
- [255, 0, 0],
- [255, 255, 0],
- [255, 255, 255],
- [0, 0, 255],
- ]))
-
- try:
- cmap = create_colormap({'filename': cmap_filename})
- self.assertEqual(cmap.colors.shape[0], 4)
- np.testing.assert_equal(cmap.colors[0], [1.0, 0, 0])
- self.assertEqual(cmap.values.shape[0], 4)
- self.assertEqual(cmap.values[0], 0)
- self.assertEqual(cmap.values[-1], 1.0)
-
- cmap = create_colormap({'filename': cmap_filename, 'min_value': 50, 'max_value': 100})
- self.assertEqual(cmap.colors.shape[0], 4)
- np.testing.assert_equal(cmap.colors[0], [1.0, 0, 0])
- self.assertEqual(cmap.values.shape[0], 4)
- self.assertEqual(cmap.values[0], 50)
- self.assertEqual(cmap.values[-1], 100)
- finally:
- os.remove(cmap_filename)
-
- def test_cmap_from_file_rgb_1(self):
- """Test that colormaps can be loaded from a binary file with 0-1 colors."""
- from satpy.enhancements import create_colormap
- from tempfile import NamedTemporaryFile
- # create the colormap file on disk
- with NamedTemporaryFile(suffix='.npy', delete=False) as tmp_cmap:
- cmap_filename = tmp_cmap.name
- np.save(cmap_filename, np.array([
- [1, 0, 0],
- [1, 1, 0],
- [1, 1, 1],
- [0, 0, 1],
- ]))
-
- try:
- cmap = create_colormap({'filename': cmap_filename,
- 'color_scale': 1})
- self.assertEqual(cmap.colors.shape[0], 4)
- np.testing.assert_equal(cmap.colors[0], [1.0, 0, 0])
- self.assertEqual(cmap.values.shape[0], 4)
- self.assertEqual(cmap.values[0], 0)
- self.assertEqual(cmap.values[-1], 1.0)
-
- cmap = create_colormap({'filename': cmap_filename, 'color_scale': 1,
- 'min_value': 50, 'max_value': 100})
- self.assertEqual(cmap.colors.shape[0], 4)
- np.testing.assert_equal(cmap.colors[0], [1.0, 0, 0])
- self.assertEqual(cmap.values.shape[0], 4)
- self.assertEqual(cmap.values[0], 50)
- self.assertEqual(cmap.values[-1], 100)
- finally:
- os.remove(cmap_filename)
-
- def test_cmap_from_file_vrgb(self):
- """Test that colormaps can be loaded from a binary file with values."""
- from satpy.enhancements import create_colormap
- from tempfile import NamedTemporaryFile
- # create the colormap file on disk
- with NamedTemporaryFile(suffix='.npy', delete=False) as tmp_cmap:
- cmap_filename = tmp_cmap.name
- np.save(cmap_filename, np.array([
- [128, 255, 0, 0],
- [130, 255, 255, 0],
- [132, 255, 255, 255],
- [134, 0, 0, 255],
- ]))
-
- try:
- # default mode of VRGB
- cmap = create_colormap({'filename': cmap_filename})
- self.assertEqual(cmap.colors.shape[0], 4)
- np.testing.assert_equal(cmap.colors[0], [1.0, 0, 0])
- self.assertEqual(cmap.values.shape[0], 4)
- self.assertEqual(cmap.values[0], 128)
- self.assertEqual(cmap.values[-1], 134)
-
- cmap = create_colormap({'filename': cmap_filename, 'colormap_mode': 'RGBA'})
- self.assertEqual(cmap.colors.shape[0], 4)
- self.assertEqual(cmap.colors.shape[1], 4) # RGBA
+ with closed_named_temp_file(suffix=filename_suffix) as cmap_filename:
+ cmap_data = _generate_cmap_test_data(color_scale, colormap_mode)
+ _write_cmap_to_file(cmap_filename, cmap_data)
+
+ unset_first_value = 128.0 / 255.0 if colormap_mode.startswith("V") else 0.0
+ unset_last_value = 134.0 / 255.0 if colormap_mode.startswith("V") else 1.0
+ if (color_scale is None or color_scale == 255) and colormap_mode.startswith("V"):
+ unset_first_value *= 255
+ unset_last_value *= 255
+ if "min_value" in extra_kwargs:
+ unset_first_value = extra_kwargs["min_value"]
+ unset_last_value = extra_kwargs["max_value"]
+
+ first_color = [1.0, 0.0, 0.0]
+ if colormap_mode == "VRGBA":
+ first_color = [128.0 / 255.0] + first_color
+
+ kwargs1 = {"filename": cmap_filename}
+ kwargs1.update(extra_kwargs)
+ if color_scale is not None:
+ kwargs1["color_scale"] = color_scale
+
+ cmap = create_colormap(kwargs1)
+ assert cmap.colors.shape[0] == 4
+ np.testing.assert_equal(cmap.colors[0], first_color)
+ assert cmap.values.shape[0] == 4
+ assert cmap.values[0] == unset_first_value
+ assert cmap.values[-1] == unset_last_value
+
+ def test_cmap_vrgb_as_rgba(self):
+ """Test that data created as VRGB still reads as RGBA."""
+ with closed_named_temp_file(suffix=".npy") as cmap_filename:
+ cmap_data = _generate_cmap_test_data(None, "VRGB")
+ np.save(cmap_filename, cmap_data)
+ cmap = create_colormap({'filename': cmap_filename, 'colormap_mode': "RGBA"})
+ assert cmap.colors.shape[0] == 4
+ assert cmap.colors.shape[1] == 4 # RGBA
np.testing.assert_equal(cmap.colors[0], [128 / 255., 1.0, 0, 0])
- self.assertEqual(cmap.values.shape[0], 4)
- self.assertEqual(cmap.values[0], 0)
- self.assertEqual(cmap.values[-1], 1.0)
-
- cmap = create_colormap({'filename': cmap_filename, 'min_value': 50, 'max_value': 100})
- self.assertEqual(cmap.colors.shape[0], 4)
- np.testing.assert_equal(cmap.colors[0], [1.0, 0, 0])
- self.assertEqual(cmap.values.shape[0], 4)
- self.assertEqual(cmap.values[0], 50)
- self.assertEqual(cmap.values[-1], 100)
-
- self.assertRaises(ValueError, create_colormap,
- {'filename': cmap_filename, 'colormap_mode': 'RGB',
- 'min_value': 50, 'max_value': 100})
- finally:
- os.remove(cmap_filename)
-
- def test_cmap_from_file_vrgba(self):
- """Test that colormaps can be loaded RGBA colors and values."""
- from satpy.enhancements import create_colormap
- from tempfile import NamedTemporaryFile
- # create the colormap file on disk
- with NamedTemporaryFile(suffix='.npy', delete=False) as tmp_cmap:
- cmap_filename = tmp_cmap.name
- np.save(cmap_filename, np.array([
- [128, 128, 255, 0, 0], # value, R, G, B, A
- [130, 130, 255, 255, 0],
- [132, 132, 255, 255, 255],
- [134, 134, 0, 0, 255],
- ]))
-
- try:
- # default mode of VRGBA
- cmap = create_colormap({'filename': cmap_filename})
- self.assertEqual(cmap.colors.shape[0], 4)
- self.assertEqual(cmap.colors.shape[1], 4) # RGBA
- np.testing.assert_equal(cmap.colors[0], [128 / 255.0, 1.0, 0, 0])
- self.assertEqual(cmap.values.shape[0], 4)
- self.assertEqual(cmap.values[0], 128)
- self.assertEqual(cmap.values[-1], 134)
-
- self.assertRaises(ValueError, create_colormap,
- {'filename': cmap_filename, 'colormap_mode': 'RGBA'})
-
- cmap = create_colormap({'filename': cmap_filename, 'min_value': 50, 'max_value': 100})
- self.assertEqual(cmap.colors.shape[0], 4)
- self.assertEqual(cmap.colors.shape[1], 4) # RGBA
- np.testing.assert_equal(cmap.colors[0], [128 / 255.0, 1.0, 0, 0])
- self.assertEqual(cmap.values.shape[0], 4)
- self.assertEqual(cmap.values[0], 50)
- self.assertEqual(cmap.values[-1], 100)
- finally:
- os.remove(cmap_filename)
+ assert cmap.values.shape[0] == 4
+ assert cmap.values[0] == 0
+ assert cmap.values[-1] == 1.0
+
+ @pytest.mark.parametrize(
+ ("real_mode", "forced_mode"),
+ [
+ ("VRGBA", "RGBA"),
+ ("VRGBA", "VRGB"),
+ ("RGBA", "RGB"),
+ ]
+ )
+ @pytest.mark.parametrize("filename_suffix", [".npy", ".csv"])
+ def test_cmap_bad_mode(self, real_mode, forced_mode, filename_suffix):
+ """Test that reading colormaps with the wrong mode fails."""
+ with closed_named_temp_file(suffix=filename_suffix) as cmap_filename:
+ cmap_data = _generate_cmap_test_data(None, real_mode)
+ _write_cmap_to_file(cmap_filename, cmap_data)
+ # Force colormap_mode VRGBA to RGBA and we should see an exception
+ with pytest.raises(ValueError):
+ create_colormap({'filename': cmap_filename, 'colormap_mode': forced_mode})
def test_cmap_from_file_bad_shape(self):
"""Test that unknown array shape causes an error."""
from satpy.enhancements import create_colormap
- from tempfile import NamedTemporaryFile
+
# create the colormap file on disk
- with NamedTemporaryFile(suffix='.npy', delete=False) as tmp_cmap:
- cmap_filename = tmp_cmap.name
+ with closed_named_temp_file(suffix='.npy') as cmap_filename:
np.save(cmap_filename, np.array([
[0],
[64],
@@ -376,11 +392,28 @@ def test_cmap_from_file_bad_shape(self):
[255],
]))
- try:
- self.assertRaises(ValueError, create_colormap,
- {'filename': cmap_filename})
- finally:
- os.remove(cmap_filename)
+ with pytest.raises(ValueError):
+ create_colormap({'filename': cmap_filename})
+
+ def test_cmap_from_config_path(self, tmp_path):
+ """Test loading a colormap relative to a config path."""
+ import satpy
+ from satpy.enhancements import create_colormap
+
+ cmap_dir = tmp_path / "colormaps"
+ cmap_dir.mkdir()
+ cmap_filename = cmap_dir / "my_colormap.npy"
+ cmap_data = _generate_cmap_test_data(None, "RGBA")
+ np.save(cmap_filename, cmap_data)
+ with satpy.config.set(config_path=[tmp_path]):
+ rel_cmap_filename = os.path.join("colormaps", "my_colormap.npy")
+ cmap = create_colormap({'filename': rel_cmap_filename, 'colormap_mode': "RGBA"})
+ assert cmap.colors.shape[0] == 4
+ assert cmap.colors.shape[1] == 4 # RGBA
+ np.testing.assert_equal(cmap.colors[0], [128 / 255., 1.0, 0, 0])
+ assert cmap.values.shape[0] == 4
+ assert cmap.values[0] == 0
+ assert cmap.values[-1] == 1.0
def test_cmap_from_trollimage(self):
"""Test that colormaps in trollimage can be loaded."""
@@ -393,7 +426,8 @@ def test_cmap_from_trollimage(self):
def test_cmap_no_colormap(self):
"""Test that being unable to create a colormap raises an error."""
from satpy.enhancements import create_colormap
- self.assertRaises(ValueError, create_colormap, {})
+ with pytest.raises(ValueError):
+ create_colormap({})
def test_cmap_list(self):
"""Test that colors can be a list/tuple."""
@@ -406,15 +440,53 @@ def test_cmap_list(self):
]
values = [2, 4, 6, 8]
cmap = create_colormap({'colors': colors, 'color_scale': 1})
- self.assertEqual(cmap.colors.shape[0], 4)
+ assert cmap.colors.shape[0] == 4
np.testing.assert_equal(cmap.colors[0], [0.0, 0.0, 1.0])
- self.assertEqual(cmap.values.shape[0], 4)
- self.assertEqual(cmap.values[0], 0)
- self.assertEqual(cmap.values[-1], 1.0)
+ assert cmap.values.shape[0] == 4
+ assert cmap.values[0] == 0
+ assert cmap.values[-1] == 1.0
cmap = create_colormap({'colors': colors, 'color_scale': 1, 'values': values})
- self.assertEqual(cmap.colors.shape[0], 4)
+ assert cmap.colors.shape[0] == 4
np.testing.assert_equal(cmap.colors[0], [0.0, 0.0, 1.0])
- self.assertEqual(cmap.values.shape[0], 4)
- self.assertEqual(cmap.values[0], 2)
- self.assertEqual(cmap.values[-1], 8)
+ assert cmap.values.shape[0] == 4
+ assert cmap.values[0] == 2
+ assert cmap.values[-1] == 8
+
+
+def test_on_separate_bands():
+ """Test the `on_separate_bands` decorator."""
+ def func(array, index, gain=2):
+ return xr.DataArray(np.ones(array.shape, dtype=array.dtype) * index * gain,
+ coords=array.coords, dims=array.dims, attrs=array.attrs)
+
+ separate_func = on_separate_bands(func)
+ arr = xr.DataArray(np.zeros((3, 10, 10)), dims=['bands', 'y', 'x'], coords={"bands": ["R", "G", "B"]})
+ assert separate_func(arr).shape == arr.shape
+ assert all(separate_func(arr, gain=1).values[:, 0, 0] == [0, 1, 2])
+
+
+def test_using_map_blocks():
+ """Test the `using_map_blocks` decorator."""
+ def func(np_array, block_info=None):
+ value = block_info[0]['chunk-location'][-1]
+ return np.ones(np_array.shape) * value
+
+ map_blocked_func = using_map_blocks(func)
+ arr = xr.DataArray(da.zeros((3, 10, 10), dtype=int, chunks=5), dims=['bands', 'y', 'x'])
+ res = map_blocked_func(arr)
+ assert res.shape == arr.shape
+ assert res[0, 0, 0].compute() != res[0, 9, 9].compute()
+
+
+def test_on_dask_array():
+ """Test the `on_dask_array` decorator."""
+ def func(dask_array):
+ if not isinstance(dask_array, da.core.Array):
+ pytest.fail("Array is not a dask array")
+ return dask_array
+
+ dask_func = on_dask_array(func)
+ arr = xr.DataArray(da.zeros((3, 10, 10), dtype=int, chunks=5), dims=['bands', 'y', 'x'])
+ res = dask_func(arr)
+ assert res.shape == arr.shape
diff --git a/satpy/tests/enhancement_tests/test_viirs.py b/satpy/tests/enhancement_tests/test_viirs.py
index 87d7ca29ba..5595266034 100644
--- a/satpy/tests/enhancement_tests/test_viirs.py
+++ b/satpy/tests/enhancement_tests/test_viirs.py
@@ -18,9 +18,12 @@
"""Unit testing for the VIIRS enhancement function."""
import unittest
+
+import dask.array as da
import numpy as np
import xarray as xr
-import dask.array as da
+
+from .test_enhancements import run_and_check_enhancement
class TestVIIRSEnhancement(unittest.TestCase):
@@ -69,19 +72,5 @@ def test_viirs(self):
from satpy.enhancements.viirs import water_detection
expected = [[[1, 7, 8, 8, 8, 9, 10, 11, 14, 8],
[20, 23, 26, 10, 12, 15, 18, 21, 24, 27]]]
- self._test_enhancement(water_detection, self.da, expected,
- palettes=self.palette)
-
- def _test_enhancement(self, func, data, expected, **kwargs):
- from trollimage.xrimage import XRImage
-
- pre_attrs = data.attrs
- img = XRImage(data)
- func(img, **kwargs)
-
- self.assertIsInstance(img.data.data, da.Array)
- self.assertListEqual(sorted(pre_attrs.keys()),
- sorted(img.data.attrs.keys()),
- "DataArray attributes were not preserved")
-
- np.testing.assert_allclose(img.data.values, expected, atol=1.e-6, rtol=0)
+ run_and_check_enhancement(water_detection, self.da, expected,
+ palettes=self.palette)
diff --git a/satpy/tests/etc/composites/fake_sensor.yaml b/satpy/tests/etc/composites/fake_sensor.yaml
index c079dd104e..f49c1d3d79 100644
--- a/satpy/tests/etc/composites/fake_sensor.yaml
+++ b/satpy/tests/etc/composites/fake_sensor.yaml
@@ -241,3 +241,8 @@ composites:
prerequisites:
- name: ds13
modifiers: ['mod1', 'mod_opt_only']
+ comp_multi:
+ compositor: !!python/name:satpy.tests.utils.FakeCompositor
+ prerequisites:
+ - ds1
+ - ds4_b
diff --git a/satpy/tests/etc/readers/fake1.yaml b/satpy/tests/etc/readers/fake1.yaml
index 36d9608a3d..b34c7e1b34 100644
--- a/satpy/tests/etc/readers/fake1.yaml
+++ b/satpy/tests/etc/readers/fake1.yaml
@@ -39,7 +39,7 @@ datasets:
name: ds5
resolution:
250:
- file_type: fake_file_highres
+ file_type: fake_file1_highres
500:
file_type: fake_file1
1000:
@@ -111,7 +111,7 @@ file_types:
file_reader: !!python/name:satpy.tests.utils.FakeFileHandler
file_patterns: ['fake1_{file_idx:d}.txt']
sensor: fake_sensor
- fake_file_highres:
+ fake_file1_highres:
file_reader: !!python/name:satpy.tests.utils.FakeFileHandler
file_patterns: ['fake1_highres_{file_idx:d}.txt']
sensor: fake_sensor
diff --git a/satpy/tests/etc/readers/fake2_1ds.yaml b/satpy/tests/etc/readers/fake2_1ds.yaml
index 6889e28d73..2cd1912ea2 100644
--- a/satpy/tests/etc/readers/fake2_1ds.yaml
+++ b/satpy/tests/etc/readers/fake2_1ds.yaml
@@ -1,5 +1,5 @@
reader:
- name: fake1
+ name: fake2_1ds
description: Fake reader used for easier testing
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
sensors: [fake_sensor2]
@@ -13,4 +13,4 @@ file_types:
fake_file1:
file_reader: !!python/name:satpy.tests.utils.FakeFileHandler
file_patterns: ['fake2_1ds_{file_idx:d}.txt']
- sensor: fake_sensor
+ sensor: fake_sensor2
diff --git a/satpy/tests/etc/readers/fake2_3ds.yaml b/satpy/tests/etc/readers/fake2_3ds.yaml
new file mode 100644
index 0000000000..9d3d76f6f0
--- /dev/null
+++ b/satpy/tests/etc/readers/fake2_3ds.yaml
@@ -0,0 +1,27 @@
+reader:
+ name: fake2_1ds
+ description: Fake reader used for easier testing
+ reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
+ sensors: [fake_sensor2]
+datasets:
+ lons:
+ name: lons
+ resolution: [250, 500, 1000]
+ standard_name: longitude
+ file_type: fake_file1
+ lats:
+ name: lats
+ resolution: [250, 500, 1000]
+ standard_name: latitude
+ file_type: fake_file1
+ ds2:
+ name: ds2
+ resolution: 250
+ calibration: "reflectance"
+ file_type: fake_file1
+ coordinates: [lons, lats]
+file_types:
+ fake_file1:
+ file_reader: !!python/name:satpy.tests.utils.FakeFileHandler
+ file_patterns: ['fake2_3ds_{file_idx:d}.txt']
+ sensor: fake_sensor2
diff --git a/satpy/tests/etc/readers/fake3.yaml b/satpy/tests/etc/readers/fake3.yaml
index f86d3bf406..fb8a6c17c4 100644
--- a/satpy/tests/etc/readers/fake3.yaml
+++ b/satpy/tests/etc/readers/fake3.yaml
@@ -1,8 +1,8 @@
reader:
- name: fake1
+ name: fake3
description: Fake reader used for easier testing
reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
- sensors: [fake_sensor2]
+ sensors: [fake_sensor]
datasets:
duplicate1:
name: duplicate1
diff --git a/satpy/tests/etc/readers/fake4.yaml b/satpy/tests/etc/readers/fake4.yaml
new file mode 100644
index 0000000000..5ae87fcf73
--- /dev/null
+++ b/satpy/tests/etc/readers/fake4.yaml
@@ -0,0 +1,33 @@
+reader:
+ name: fake4
+ description: Fake reader used for easier testing
+ reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
+ sensors: [fake_sensor_4]
+datasets:
+ lons:
+ name: lons
+ resolution: [250, 500, 1000]
+ standard_name: longitude
+ file_type: fake_file4
+ lats:
+ name: lats
+ resolution: [250, 500, 1000]
+ standard_name: latitude
+ file_type: fake_file4
+ ds4_a:
+ name: ds4_a
+ resolution: 1000
+ wavelength: [0.1, 0.2, 0.3]
+ file_type: fake_file4
+ coordinates: [lons, lats]
+ ds4_b:
+ name: ds4_b
+ resolution: 250
+ wavelength: [0.4, 0.5, 0.6]
+ file_type: fake_file4
+ coordinates: [lons, lats]
+file_types:
+ fake_file4:
+ file_reader: !!python/name:satpy.tests.utils.FakeFileHandler
+ file_patterns: ['fake4_{file_idx:d}.txt']
+ sensor: fake_sensor_4
diff --git a/satpy/tests/features/feature-load.feature b/satpy/tests/features/feature-load.feature
index f6f756da76..62c3af3e43 100644
--- a/satpy/tests/features/feature-load.feature
+++ b/satpy/tests/features/feature-load.feature
@@ -35,4 +35,4 @@ Feature: Simple and intuitive scene loading (sc. 1)
Scenario: Accessing datasets by name prefers less modified datasets
Given datasets with the same name
When a dataset is retrieved by name
- Then the least modified version of the dataset is returned
\ No newline at end of file
+ Then the least modified version of the dataset is returned
diff --git a/satpy/tests/features/feature-real-load-process-write.feature b/satpy/tests/features/feature-real-load-process-write.feature
index ea043681d5..11b12f9174 100644
--- a/satpy/tests/features/feature-real-load-process-write.feature
+++ b/satpy/tests/features/feature-real-load-process-write.feature
@@ -66,11 +66,11 @@ Feature: Loading real data in many formats with the same command
| format | composite | area |
| avhrr_l1b_eps | overview | eurol |
- Examples: FCI FDHSI data
- | format | composite | area |
- | fci_l1c_fdhsi | overview | eurol |
- | fci_l1c_fdhsi | cloudtop | eurol |
- | fci_l1c_fdhsi | true_color | eurol |
+ Examples: FCI L1c data
+ | format | composite | area |
+ | fci_l1c_nc | overview | eurol |
+ | fci_l1c_nc | cloudtop | eurol |
+ | fci_l1c_nc | true_color | eurol |
Examples: GAC data
| format | composite | area |
diff --git a/satpy/tests/features/feature-save.feature b/satpy/tests/features/feature-save.feature
index 3e181c1164..2e5421b4f7 100644
--- a/satpy/tests/features/feature-save.feature
+++ b/satpy/tests/features/feature-save.feature
@@ -21,4 +21,3 @@ Feature: Simple and intuitive saving
Given a bunch of datasets are available
When the save_datasets command is called
Then a bunch of files should be saved on disk
-
diff --git a/satpy/tests/features/steps/steps-load.py b/satpy/tests/features/steps/steps-load.py
index f8de2b28fa..9dfe9eb9cc 100644
--- a/satpy/tests/features/steps/steps-load.py
+++ b/satpy/tests/features/steps/steps-load.py
@@ -18,9 +18,10 @@
"""Behaviour steps for loading."""
import os
-from behave import use_step_matcher, given, when, then
from urllib.request import urlopen
+from behave import given, then, use_step_matcher, when
+
use_step_matcher("re")
@@ -44,8 +45,9 @@ def step_impl_data_available(context):
@when(u'user loads the data without providing a config file')
def step_impl_user_loads_no_config(context):
"""Load the data without a config."""
- from satpy import Scene, find_files_and_readers
from datetime import datetime
+
+ from satpy import Scene, find_files_and_readers
os.chdir("/tmp/")
readers_files = find_files_and_readers(sensor='viirs',
start_time=datetime(2015, 3, 11, 11, 20),
@@ -58,12 +60,8 @@ def step_impl_user_loads_no_config(context):
@then(u'the data is available in a scene object')
def step_impl_data_available_in_scene(context):
"""Check that the data is available in the scene."""
- assert (context.scene["M02"] is not None)
- try:
- context.scene["M01"] is None
- raise AssertionError()
- except KeyError:
- pass
+ assert context.scene["M02"] is not None
+ assert context.scene.get("M01") is None
@when(u'some items are not available')
@@ -75,8 +73,9 @@ def step_impl_items_not_available(context):
@when(u'user wants to know what data is available')
def step_impl_user_checks_availability(context):
"""Check availability."""
- from satpy import Scene, find_files_and_readers
from datetime import datetime
+
+ from satpy import Scene, find_files_and_readers
os.chdir("/tmp/")
reader_files = find_files_and_readers(sensor="viirs",
start_time=datetime(2015, 3, 11, 11, 20),
@@ -94,8 +93,9 @@ def step_impl_available_datasets_are_returned(context):
@given("datasets with the same name")
def step_impl_datasets_with_same_name(context):
"""Datasets with the same name but different other ID parameters."""
- from satpy import Scene
from xarray import DataArray
+
+ from satpy import Scene
from satpy.tests.utils import make_dataid
scn = Scene()
scn[make_dataid(name='ds1', calibration='radiance')] = DataArray([[1, 2], [3, 4]])
@@ -118,4 +118,4 @@ def step_impl_dataset_retrieved_by_name(context):
@then("the least modified version of the dataset is returned")
def step_impl_least_modified_dataset_returned(context):
"""Check that the dataset should be one of the least modified datasets."""
- assert(len(context.returned_dataset.attrs['modifiers']) == 0)
+ assert len(context.returned_dataset.attrs['modifiers']) == 0
diff --git a/satpy/tests/features/steps/steps-real-load-process-write.py b/satpy/tests/features/steps/steps-real-load-process-write.py
index eaa6455d64..d719d397e4 100644
--- a/satpy/tests/features/steps/steps-real-load-process-write.py
+++ b/satpy/tests/features/steps/steps-real-load-process-write.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
-# Copyright (c) 2018 Satpy developers
+# Copyright (c) 2018-2021 Satpy developers
#
# This file is part of satpy.
#
@@ -15,17 +15,14 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
-"""Step for the real load-process-write tests.
-"""
+"""Step for the real load-process-write tests."""
-import os
import fnmatch
-
-from behave import given, when, then
-
+import os
from tempfile import NamedTemporaryFile
import numpy as np
+from behave import given, then, when
from PIL import Image
@@ -68,22 +65,24 @@ def assert_images_match(image1, image2, threshold=0.1):
def get_all_files(directory, pattern):
- """Find all files matching *pattern* under *directory*."""
+ """Find all files matching *pattern* under ``directory``."""
matches = []
- for root, dirnames, filenames in os.walk(directory):
+ for root, _, filenames in os.walk(directory):
for filename in fnmatch.filter(filenames, pattern):
matches.append(os.path.join(root, filename))
return matches
def before_all(context):
+ """Enable satpy debugging."""
if not context.config.log_capture:
from satpy.utils import debug_on
debug_on()
-@given(u'{dformat} data is available') # noqa
-def step_impl(context, dformat):
+@given(u'{dformat} data is available')
+def step_impl_input_files_exists(context, dformat):
+ """Check that input data exists on disk."""
data_path = os.path.join('test_data', dformat)
data_available = os.path.exists(data_path)
if not data_available:
@@ -93,8 +92,9 @@ def step_impl(context, dformat):
context.data_path = data_path
-@when(u'the user loads the {composite} composite') # noqa
-def step_impl(context, composite):
+@when(u'the user loads the {composite} composite')
+def step_impl_create_scene_and_load_single(context, composite):
+ """Create a Scene and load a single composite."""
from satpy import Scene
scn = Scene(reader=context.dformat,
filenames=get_all_files(os.path.join(context.data_path, 'data'),
@@ -104,8 +104,9 @@ def step_impl(context, composite):
context.composite = composite
-@when(u'the user resamples the data to {area}') # noqa
-def step_impl(context, area):
+@when(u'the user resamples the data to {area}')
+def step_impl_resample_scene(context, area):
+ """Resample the scene to an area or use the native resampler."""
if area != '-':
context.lscn = context.scn.resample(area)
else:
@@ -113,15 +114,17 @@ def step_impl(context, area):
context.area = area
-@when(u'the user saves the composite to disk') # noqa
-def step_impl(context):
+@when(u'the user saves the composite to disk')
+def step_impl_save_to_png(context):
+ """Call Scene.save_dataset to write a PNG image."""
with NamedTemporaryFile(suffix='.png', delete=False) as tmp_file:
context.lscn.save_dataset(context.composite, filename=tmp_file.name)
context.new_filename = tmp_file.name
-@then(u'the resulting image should match the reference image') # noqa
-def step_impl(context):
+@then(u'the resulting image should match the reference image')
+def step_impl_compare_two_png_images(context):
+ """Compare two PNG image files."""
if context.area == '-':
ref_filename = context.composite + ".png"
else:
diff --git a/satpy/tests/features/steps/steps-save.py b/satpy/tests/features/steps/steps-save.py
index 00b737c2b8..b42d8751a2 100644
--- a/satpy/tests/features/steps/steps-save.py
+++ b/satpy/tests/features/steps/steps-save.py
@@ -15,93 +15,116 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
-from behave import given, when, then, use_step_matcher
+"""Behave steps related to saving or showing datasets."""
-try:
- from unittest.mock import patch
-except ImportError:
- from mock import patch
+from unittest.mock import patch
+from behave import given, then, use_step_matcher, when
use_step_matcher("re")
-@given("a dataset is available") # noqa: F811
-def step_impl(context):
- """
- :type context: behave.runner.Context
+@given("a dataset is available")
+def step_impl_create_scene_one_dataset(context):
+ """Create a Scene with a fake dataset for testing.
+
+ Args:
+ context (behave.runner.Context): Test context
+
"""
- from satpy import Scene
from xarray import DataArray
+
+ from satpy import Scene
scn = Scene()
scn["MyDataset"] = DataArray([[1, 2], [3, 4]], dims=['y', 'x'])
context.scene = scn
-@when("the show command is called") # noqa: F811
-def step_impl(context):
- """
- :type context: behave.runner.Context
+@when("the show command is called")
+def step_impl_scene_show(context):
+ """Call the Scene.show method.
+
+ Args:
+ context (behave.runner.Context): Test context
+
"""
with patch('trollimage.xrimage.XRImage.show') as mock_show:
context.scene.show("MyDataset")
mock_show.assert_called_once_with()
-@then("an image should pop up") # noqa: F811
-def step_impl(context):
- """
- :type context: behave.runner.Context
- """
- pass
+@then("an image should pop up")
+def step_impl_image_pop_up(context):
+ """Check that a image window pops up (no-op currently).
+ Args:
+ context (behave.runner.Context): Test context
-@when("the save_dataset command is called") # noqa: F811
-def step_impl(context):
"""
- :type context: behave.runner.Context
+
+
+@when("the save_dataset command is called")
+def step_impl_save_dataset_to_png(context):
+ """Run Scene.save_dataset to create a PNG image.
+
+ Args:
+ context (behave.runner.Context): Test context
+
"""
context.filename = "/tmp/test_dataset.png"
context.scene.save_dataset("MyDataset", context.filename)
-@then("a file should be saved on disk") # noqa: F811
-def step_impl(context):
- """
- :type context: behave.runner.Context
+@then("a file should be saved on disk")
+def step_impl_file_exists_and_remove(context):
+ """Check that a file exists on disk and then remove it.
+
+ Args:
+ context (behave.runner.Context): Test context
+
"""
import os
- assert(os.path.exists(context.filename))
+ assert os.path.exists(context.filename)
os.remove(context.filename)
-@given("a bunch of datasets are available") # noqa: F811
-def step_impl(context):
- """
- :type context: behave.runner.Context
+@given("a bunch of datasets are available")
+def step_impl_create_scene_two_datasets(context):
+ """Create a Scene with two fake datasets for testing.
+
+ Args:
+ context (behave.runner.Context): Test context
+
"""
- from satpy import Scene
from xarray import DataArray
+
+ from satpy import Scene
scn = Scene()
scn["MyDataset"] = DataArray([[1, 2], [3, 4]], dims=['y', 'x'])
scn["MyDataset2"] = DataArray([[5, 6], [7, 8]], dims=['y', 'x'])
context.scene = scn
-@when("the save_datasets command is called") # noqa: F811
-def step_impl(context):
- """
- :type context: behave.runner.Context
+@when("the save_datasets command is called")
+def step_impl_save_datasets(context):
+ """Run Scene.save_datsets to create PNG images.
+
+ Args:
+ context (behave.runner.Context): Test context
+
"""
context.scene.save_datasets(writer="simple_image", filename="{name}.png")
-@then("a bunch of files should be saved on disk") # noqa: F811
-def step_impl(context):
- """
- :type context: behave.runner.Context
+@then("a bunch of files should be saved on disk")
+def step_impl_check_two_pngs_exist(context):
+ """Check that two PNGs exist.
+
+ Args:
+ context (behave.runner.Context): Test context
+
"""
import os
for filename in ["MyDataset.png", "MyDataset2.png"]:
- assert(os.path.exists(filename))
+ assert os.path.exists(filename)
os.remove(filename)
diff --git a/satpy/tests/modifier_tests/test_angles.py b/satpy/tests/modifier_tests/test_angles.py
new file mode 100644
index 0000000000..8348d939e9
--- /dev/null
+++ b/satpy/tests/modifier_tests/test_angles.py
@@ -0,0 +1,353 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2022 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+
+"""Tests for the angles in modifiers."""
+import contextlib
+import warnings
+from copy import deepcopy
+from datetime import datetime, timedelta
+from glob import glob
+from typing import Optional, Union
+from unittest import mock
+
+import dask.array as da
+import numpy as np
+import pytest
+import xarray as xr
+from pyresample.geometry import AreaDefinition, StackedAreaDefinition
+
+import satpy
+from satpy.utils import PerformanceWarning
+
+
+def _angle_cache_area_def():
+ area = AreaDefinition(
+ "test", "", "",
+ {"proj": "merc"},
+ 5, 5,
+ (-2500, -2500, 2500, 2500),
+ )
+ return area
+
+
+def _angle_cache_stacked_area_def():
+ area1 = AreaDefinition(
+ "test", "", "",
+ {"proj": "merc"},
+ 5, 2,
+ (2500, 500, 7500, 2500),
+ )
+ area2 = AreaDefinition(
+ "test", "", "",
+ {"proj": "merc"},
+ 5, 3,
+ (2500, -2500, 7500, 500),
+ )
+ return StackedAreaDefinition(area1, area2)
+
+
+def _get_angle_test_data(area_def: Optional[Union[AreaDefinition, StackedAreaDefinition]] = None,
+ chunks: Optional[Union[int, tuple]] = 2,
+ shape: tuple = (5, 5),
+ dims: tuple = None,
+ ) -> xr.DataArray:
+ if area_def is None:
+ area_def = _angle_cache_area_def()
+ orb_params = {
+ "satellite_nominal_altitude": 12345678,
+ "satellite_nominal_longitude": 10.0,
+ "satellite_nominal_latitude": 0.0,
+ }
+ stime = datetime(2020, 1, 1, 12, 0, 0)
+ data = da.zeros(shape, chunks=chunks)
+ vis = xr.DataArray(data,
+ dims=dims,
+ attrs={
+ 'area': area_def,
+ 'start_time': stime,
+ 'orbital_parameters': orb_params,
+ })
+ return vis
+
+
+def _get_stacked_angle_test_data():
+ return _get_angle_test_data(area_def=_angle_cache_stacked_area_def(),
+ chunks=(5, (2, 2, 1)))
+
+
+def _get_angle_test_data_rgb():
+ return _get_angle_test_data(shape=(5, 5, 3), chunks=((2, 2, 1), (2, 2, 1), (1, 1, 1)),
+ dims=("y", "x", "bands"))
+
+
+def _get_angle_test_data_rgb_nodims():
+ return _get_angle_test_data(shape=(3, 5, 5), chunks=((1, 1, 1), (2, 2, 1), (2, 2, 1)))
+
+
+def _get_angle_test_data_odd_chunks():
+ return _get_angle_test_data(chunks=((2, 1, 2), (1, 1, 2, 1)))
+
+
+def _similar_sat_pos_datetime(orig_data, lon_offset=0.04):
+ # change data slightly
+ new_data = orig_data.copy()
+ old_lon = new_data.attrs["orbital_parameters"]["satellite_nominal_longitude"]
+ new_data.attrs["orbital_parameters"]["satellite_nominal_longitude"] = old_lon + lon_offset
+ new_data.attrs["start_time"] = new_data.attrs["start_time"] + timedelta(hours=36)
+ return new_data
+
+
+def _diff_sat_pos_datetime(orig_data):
+ return _similar_sat_pos_datetime(orig_data, lon_offset=0.05)
+
+
+def _glob_reversed(pat):
+ """Behave like glob but force results to be in the wrong order."""
+ return sorted(glob(pat), reverse=True)
+
+
+@contextlib.contextmanager
+def _mock_glob_if(mock_glob):
+ if mock_glob:
+ with mock.patch("satpy.modifiers.angles.glob", _glob_reversed):
+ yield
+ else:
+ yield
+
+
+def _assert_allclose_if(expect_equal, arr1, arr2):
+ if not expect_equal:
+ pytest.raises(AssertionError, np.testing.assert_allclose, arr1, arr2)
+ else:
+ np.testing.assert_allclose(arr1, arr2)
+
+
+class TestAngleGeneration:
+ """Test the angle generation utility functions."""
+
+ @pytest.mark.parametrize(
+ ("input_func", "exp_calls"),
+ [
+ (_get_angle_test_data, 9),
+ (_get_stacked_angle_test_data, 3),
+ (_get_angle_test_data_rgb, 9),
+ (_get_angle_test_data_rgb_nodims, 9),
+ ],
+ )
+ def test_get_angles(self, input_func, exp_calls):
+ """Test sun and satellite angle calculation."""
+ from satpy.modifiers.angles import get_angles
+ data = input_func()
+
+ from pyorbital.orbital import get_observer_look
+ with mock.patch("satpy.modifiers.angles.get_observer_look", wraps=get_observer_look) as gol:
+ angles = get_angles(data)
+ assert all(isinstance(x, xr.DataArray) for x in angles)
+ da.compute(angles)
+
+ # get_observer_look should have been called once per array chunk
+ assert gol.call_count == exp_calls
+ # Check arguments of get_orbserver_look() call, especially the altitude
+ # unit conversion from meters to kilometers
+ args = gol.call_args[0]
+ assert args[:4] == (10.0, 0.0, 12345.678, data.attrs["start_time"])
+
+ @pytest.mark.parametrize("forced_preference", ["actual", "nadir"])
+ def test_get_angles_satpos_preference(self, forced_preference):
+ """Test that 'actual' satellite position is used for generating sensor angles."""
+ from satpy.modifiers.angles import get_angles
+
+ input_data1 = _get_angle_test_data()
+ # add additional satellite position metadata
+ input_data1.attrs["orbital_parameters"]["nadir_longitude"] = 9.0
+ input_data1.attrs["orbital_parameters"]["nadir_latitude"] = 0.01
+ input_data1.attrs["orbital_parameters"]["satellite_actual_longitude"] = 9.5
+ input_data1.attrs["orbital_parameters"]["satellite_actual_latitude"] = 0.005
+ input_data1.attrs["orbital_parameters"]["satellite_actual_altitude"] = 12345679
+ input_data2 = input_data1.copy(deep=True)
+ input_data2.attrs = deepcopy(input_data1.attrs)
+ input_data2.attrs["orbital_parameters"]["nadir_longitude"] = 9.1
+ input_data2.attrs["orbital_parameters"]["nadir_latitude"] = 0.02
+ input_data2.attrs["orbital_parameters"]["satellite_actual_longitude"] = 9.5
+ input_data2.attrs["orbital_parameters"]["satellite_actual_latitude"] = 0.005
+ input_data2.attrs["orbital_parameters"]["satellite_actual_altitude"] = 12345679
+
+ from pyorbital.orbital import get_observer_look
+ with mock.patch("satpy.modifiers.angles.get_observer_look", wraps=get_observer_look) as gol, \
+ satpy.config.set(sensor_angles_position_preference=forced_preference):
+ angles1 = get_angles(input_data1)
+ da.compute(angles1)
+ angles2 = get_angles(input_data2)
+ da.compute(angles2)
+
+ # get_observer_look should have been called once per array chunk
+ assert gol.call_count == input_data1.data.blocks.size * 2
+ if forced_preference == "actual":
+ exp_call = mock.call(9.5, 0.005, 12345.679, input_data1.attrs["start_time"], mock.ANY, mock.ANY, 0)
+ all_same_calls = [exp_call] * gol.call_count
+ gol.assert_has_calls(all_same_calls)
+ # the dask arrays should have the same name to prove they are the same computation
+ for angle_arr1, angle_arr2 in zip(angles1, angles2):
+ assert angle_arr1.data.name == angle_arr2.data.name
+ else:
+ # nadir 1
+ gol.assert_any_call(9.0, 0.01, 12345.679, input_data1.attrs["start_time"], mock.ANY, mock.ANY, 0)
+ # nadir 2
+ gol.assert_any_call(9.1, 0.02, 12345.679, input_data1.attrs["start_time"], mock.ANY, mock.ANY, 0)
+
+ @pytest.mark.parametrize("force_bad_glob", [False, True])
+ @pytest.mark.parametrize(
+ ("input2_func", "exp_equal_sun", "exp_num_zarr"),
+ [
+ (lambda x: x, True, 4),
+ (_similar_sat_pos_datetime, False, 4),
+ (_diff_sat_pos_datetime, False, 6),
+ ]
+ )
+ @pytest.mark.parametrize(
+ ("input_func", "num_normalized_chunks", "exp_zarr_chunks"),
+ [
+ (_get_angle_test_data, 9, ((2, 2, 1), (2, 2, 1))),
+ (_get_stacked_angle_test_data, 3, ((5,), (2, 2, 1))),
+ (_get_angle_test_data_odd_chunks, 9, ((2, 1, 2), (1, 1, 2, 1))),
+ (_get_angle_test_data_rgb, 9, ((2, 2, 1), (2, 2, 1))),
+ (_get_angle_test_data_rgb_nodims, 9, ((2, 2, 1), (2, 2, 1))),
+ ])
+ def test_cache_get_angles(
+ self,
+ input_func, num_normalized_chunks, exp_zarr_chunks,
+ input2_func, exp_equal_sun, exp_num_zarr,
+ force_bad_glob, tmp_path):
+ """Test get_angles when caching is enabled."""
+ from satpy.modifiers.angles import STATIC_EARTH_INERTIAL_DATETIME, get_angles
+
+ # Patch methods
+ data = input_func()
+ additional_cache = exp_num_zarr > 4
+
+ # Compute angles
+ from pyorbital.orbital import get_observer_look
+ with mock.patch("satpy.modifiers.angles.get_observer_look", wraps=get_observer_look) as gol, \
+ satpy.config.set(cache_lonlats=True, cache_sensor_angles=True, cache_dir=str(tmp_path)), \
+ warnings.catch_warnings(record=True) as caught_warnings:
+ res = get_angles(data)
+ self._check_cached_result(res, exp_zarr_chunks)
+
+ # call again, should be cached
+ new_data = input2_func(data)
+ with _mock_glob_if(force_bad_glob):
+ res2 = get_angles(new_data)
+ self._check_cached_result(res2, exp_zarr_chunks)
+
+ res_numpy, res2_numpy = da.compute(res, res2)
+ for r1, r2 in zip(res_numpy[:2], res2_numpy[:2]):
+ _assert_allclose_if(not additional_cache, r1, r2)
+ for r1, r2 in zip(res_numpy[2:], res2_numpy[2:]):
+ _assert_allclose_if(exp_equal_sun, r1, r2)
+
+ self._check_cache_and_clear(tmp_path, exp_num_zarr)
+
+ if "odd_chunks" in input_func.__name__:
+ assert any(w.category is PerformanceWarning for w in caught_warnings)
+ else:
+ assert not any(w.category is PerformanceWarning for w in caught_warnings)
+ assert gol.call_count == num_normalized_chunks * (int(additional_cache) + 1)
+ args = gol.call_args_list[0][0]
+ assert args[:4] == (10.0, 0.0, 12345.678, STATIC_EARTH_INERTIAL_DATETIME)
+ exp_sat_lon = 10.1 if additional_cache else 10.0
+ args = gol.call_args_list[-1][0]
+ assert args[:4] == (exp_sat_lon, 0.0, 12345.678, STATIC_EARTH_INERTIAL_DATETIME)
+
+ @staticmethod
+ def _check_cached_result(results, exp_zarr_chunks):
+ assert all(isinstance(x, xr.DataArray) for x in results)
+ # output chunks should be consistent
+ for angle_data_arr in results:
+ assert angle_data_arr.chunks == exp_zarr_chunks
+
+ @staticmethod
+ def _check_cache_and_clear(tmp_path, exp_num_zarr):
+ from satpy.modifiers.angles import _get_sensor_angles_from_sat_pos, _get_valid_lonlats
+ zarr_dirs = glob(str(tmp_path / "*.zarr"))
+ assert len(zarr_dirs) == exp_num_zarr # two for lon/lat, one for sata, one for satz
+
+ _get_valid_lonlats.cache_clear()
+ _get_sensor_angles_from_sat_pos.cache_clear()
+ zarr_dirs = glob(str(tmp_path / "*.zarr"))
+ assert len(zarr_dirs) == 0
+
+ def test_cached_no_chunks_fails(self, tmp_path):
+ """Test that trying to pass non-dask arrays and no chunks fails."""
+ from satpy.modifiers.angles import _sanitize_args_with_chunks, cache_to_zarr_if
+
+ @cache_to_zarr_if("cache_lonlats", sanitize_args_func=_sanitize_args_with_chunks)
+ def _fake_func(data, tuple_arg, chunks):
+ return da.from_array(data)
+
+ data = list(range(5))
+ with pytest.raises(RuntimeError), \
+ satpy.config.set(cache_lonlats=True, cache_dir=str(tmp_path)):
+ _fake_func(data, (1, 2, 3), 5)
+
+ def test_cached_result_numpy_fails(self, tmp_path):
+ """Test that trying to cache with non-dask arrays fails."""
+ from satpy.modifiers.angles import _sanitize_args_with_chunks, cache_to_zarr_if
+
+ @cache_to_zarr_if("cache_lonlats", sanitize_args_func=_sanitize_args_with_chunks)
+ def _fake_func(shape, chunks):
+ return np.zeros(shape)
+
+ with pytest.raises(ValueError), \
+ satpy.config.set(cache_lonlats=True, cache_dir=str(tmp_path)):
+ _fake_func((5, 5), ((5,), (5,)))
+
+ def test_no_cache_dir_fails(self, tmp_path):
+ """Test that 'cache_dir' not being set fails."""
+ from satpy.modifiers.angles import _get_sensor_angles_from_sat_pos, get_angles
+ data = _get_angle_test_data()
+ with pytest.raises(RuntimeError), \
+ satpy.config.set(cache_lonlats=True, cache_sensor_angles=True, cache_dir=None):
+ get_angles(data)
+ with pytest.raises(RuntimeError), \
+ satpy.config.set(cache_lonlats=True, cache_sensor_angles=True, cache_dir=None):
+ _get_sensor_angles_from_sat_pos.cache_clear()
+
+ def test_relative_azimuth_calculation(self):
+ """Test relative azimuth calculation."""
+ from satpy.modifiers.angles import compute_relative_azimuth
+
+ saa = xr.DataArray(np.array([-120, 40., 0.04, 179.4, 94.2, 12.1]))
+ vaa = xr.DataArray(np.array([60., 57.7, 175.1, 234.18, 355.4, 12.1]))
+
+ expected_raa = xr.DataArray(np.array([180., 17.7, 175.06, 54.78, 98.8, 0.]))
+
+ raa = compute_relative_azimuth(vaa, saa)
+ assert isinstance(raa, xr.DataArray)
+ np.testing.assert_allclose(expected_raa, raa)
+
+ def test_solazi_correction(self):
+ """Test that solar azimuth angles are corrected into the right range."""
+ from datetime import datetime
+
+ from satpy.modifiers.angles import _get_sun_azimuth_ndarray
+
+ lats = np.array([-80, 40, 0, 40, 80])
+ lons = np.array([-80, 40, 0, 40, 80])
+
+ dt = datetime(2022, 1, 5, 12, 50, 0)
+
+ azi = _get_sun_azimuth_ndarray(lats, lons, dt)
+
+ assert np.all(azi > 0)
diff --git a/satpy/tests/modifier_tests/test_crefl.py b/satpy/tests/modifier_tests/test_crefl.py
index a70d088417..5ab156be8c 100644
--- a/satpy/tests/modifier_tests/test_crefl.py
+++ b/satpy/tests/modifier_tests/test_crefl.py
@@ -13,67 +13,75 @@
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
"""Tests for the CREFL ReflectanceCorrector modifier."""
-import unittest
+from contextlib import contextmanager
+from datetime import datetime
from unittest import mock
import numpy as np
import pytest
+import xarray as xr
from dask import array as da
from pyresample.geometry import AreaDefinition
+from ..utils import assert_maximum_dask_computes
-class TestViirsReflectanceCorrectorAnglesTest(unittest.TestCase):
- """Tests for the VIIRS/MODIS Corrected Reflectance modifier handling angles."""
-
- def setUp(self):
- """Patch in-class imports."""
- self.astronomy = mock.MagicMock()
- self.orbital = mock.MagicMock()
- modules = {
- 'pyorbital.astronomy': self.astronomy,
- 'pyorbital.orbital': self.orbital,
- }
- self.module_patcher = mock.patch.dict('sys.modules', modules)
- self.module_patcher.start()
-
- def tearDown(self):
- """Unpatch in-class imports."""
- self.module_patcher.stop()
-
- @mock.patch('satpy.modifiers._crefl.get_satpos')
- def test_get_angles(self, get_satpos):
- """Test sun and satellite angle calculation."""
- import numpy as np
- import dask.array as da
- from satpy.modifiers._crefl import ReflectanceCorrector
- # Patch methods
- get_satpos.return_value = 'sat_lon', 'sat_lat', 12345678
- self.orbital.get_observer_look.return_value = 0, 0
- self.astronomy.get_alt_az.return_value = 0, 0
- area = mock.MagicMock()
- lons = np.zeros((5, 5))
- lons[1, 1] = np.inf
- lons = da.from_array(lons, chunks=5)
- lats = np.zeros((5, 5))
- lats[1, 1] = np.inf
- lats = da.from_array(lats, chunks=5)
- area.get_lonlats.return_value = (lons, lats)
- vis = mock.MagicMock(attrs={'area': area,
- 'start_time': 'start_time'})
-
- # Compute angles
- psp = ReflectanceCorrector(name='dummy')
- psp.get_angles(vis)
-
- # Check arguments of get_orbserver_look() call, especially the altitude
- # unit conversion from meters to kilometers
- self.orbital.get_observer_look.assert_called_once()
- args = self.orbital.get_observer_look.call_args[0]
- self.assertEqual(args[:4], ('sat_lon', 'sat_lat', 12345.678, 'start_time'))
- self.assertIsInstance(args[4], da.Array)
- self.assertIsInstance(args[5], da.Array)
- self.assertEqual(args[6], 0)
+@contextmanager
+def mock_cmgdem(tmpdir, url):
+ """Create fake file representing CMGDEM.hdf."""
+ yield from _mock_and_create_dem_file(tmpdir, url, "averaged elevation", fill_value=-9999)
+
+
+@contextmanager
+def mock_tbase(tmpdir, url):
+ """Create fake file representing tbase.hdf."""
+ yield from _mock_and_create_dem_file(tmpdir, url, "Elevation")
+
+
+def _mock_and_create_dem_file(tmpdir, url, var_name, fill_value=None):
+ if not url:
+ yield None
+ return
+
+ rmock_obj, dem_fn = _mock_dem_retrieve(tmpdir, url)
+ _create_fake_dem_file(dem_fn, var_name, fill_value)
+
+ try:
+ yield rmock_obj
+ finally:
+ rmock_obj.stop()
+
+
+def _mock_dem_retrieve(tmpdir, url):
+ rmock_obj = mock.patch('satpy.modifiers._crefl.retrieve')
+ rmock = rmock_obj.start()
+ dem_fn = str(tmpdir.join(url))
+ rmock.return_value = dem_fn
+ return rmock_obj, dem_fn
+
+
+def _create_fake_dem_file(dem_fn, var_name, fill_value):
+ from pyhdf.SD import SD, SDC
+ h = SD(dem_fn, SDC.WRITE | SDC.CREATE)
+ dem_var = h.create(var_name, SDC.INT16, (10, 10))
+ dem_var[:] = np.zeros((10, 10), dtype=np.int16)
+ if fill_value is not None:
+ dem_var.setfillvalue(fill_value)
+ h.end()
+
+
+def _make_viirs_xarray(data, area, name, standard_name, wavelength=None, units='degrees', calibration=None):
+ return xr.DataArray(data, dims=('y', 'x'),
+ attrs={
+ 'start_orbit': 1708, 'end_orbit': 1708, 'wavelength': wavelength,
+ 'modifiers': None, 'calibration': calibration,
+ 'resolution': 371, 'name': name,
+ 'standard_name': standard_name, 'platform_name': 'Suomi-NPP',
+ 'polarization': None, 'sensor': 'viirs', 'units': units,
+ 'start_time': datetime(2012, 2, 25, 18, 1, 24, 570942),
+ 'end_time': datetime(2012, 2, 25, 18, 11, 21, 175760), 'area': area,
+ 'ancillary_variables': []
+ })
class TestReflectanceCorrectorModifier:
@@ -82,8 +90,8 @@ class TestReflectanceCorrectorModifier:
@staticmethod
def data_area_ref_corrector():
"""Create test area definition and data."""
- rows = 5
- cols = 10
+ rows = 3
+ cols = 5
area = AreaDefinition(
'some_area_name', 'On-the-fly area', 'geosabii',
{'a': '6378137.0', 'b': '6356752.31414', 'h': '35786023.0', 'lon_0': '-89.5', 'proj': 'geos', 'sweep': 'x',
@@ -91,32 +99,50 @@ def data_area_ref_corrector():
cols, rows,
(-5434894.954752679, -5434894.964451744, 5434894.964451744, 5434894.954752679))
- dnb = np.zeros((rows, cols)) + 25
- dnb[3, :] += 25
- dnb[4:, :] += 50
- dnb = da.from_array(dnb, chunks=100)
- return area, dnb
-
- def test_reflectance_corrector_abi(self):
+ data = np.zeros((rows, cols)) + 25
+ data[1, :] += 25
+ data[2, :] += 50
+ data = da.from_array(data, chunks=2)
+ return area, data
+
+ @pytest.mark.parametrize(
+ ("name", "wavelength", "resolution", "exp_mean", "exp_unique"),
+ [
+ ("C01", (0.45, 0.47, 0.49), 1000, 44.757951,
+ np.array([12.83774603, 14.38767557, 17.24258084, 41.87806142, 44.42472192, 47.89958451,
+ 48.23343427, 48.53847386, 71.52916035, 72.26078684, 73.10523784])),
+ ("C02", (0.59, 0.64, 0.69), 500, 51.4901,
+ np.array([23.69999579, 24.00407203, 24.49390685, 51.4304448, 51.64271324, 51.70519738,
+ 51.70942859, 51.76064747, 78.37182815, 78.77078522, 78.80199923])),
+ ("C03", (0.8455, 0.865, 0.8845), 1000, 50.7243,
+ np.array([24.78444631, 24.86790679, 24.99481254, 50.69670516, 50.72983327, 50.73601728,
+ 50.75685498, 50.83136276, 76.39973287, 76.5714688, 76.59856607])),
+ # ("C04", (1.3705, 1.378, 1.3855), 2000, 55.973458829136796, None),
+ ("C05", (1.58, 1.61, 1.64), 1000, 52.7231,
+ np.array([26.26568157, 26.43230852, 26.48936244, 52.00527783, 52.13043172, 52.20176747,
+ 53.01505657, 53.29017112, 78.93907987, 79.49089239, 79.69387535])),
+ ("C06", (2.225, 2.25, 2.275), 2000, 55.9735,
+ np.array([27.82291562, 28.2268102, 28.37246323, 54.33639308, 54.61451818, 54.77543748,
+ 56.62284858, 57.27288821, 83.57235975, 84.81324822, 85.27816457])),
+ ]
+ )
+ def test_reflectance_corrector_abi(self, name, wavelength, resolution, exp_mean, exp_unique):
"""Test ReflectanceCorrector modifier with ABI data."""
- import xarray as xr
- import dask.array as da
- import numpy as np
from satpy.modifiers._crefl import ReflectanceCorrector
from satpy.tests.utils import make_dsq
ref_cor = ReflectanceCorrector(optional_prerequisites=[
make_dsq(name='satellite_azimuth_angle'),
make_dsq(name='satellite_zenith_angle'),
make_dsq(name='solar_azimuth_angle'),
- make_dsq(name='solar_zenith_angle')], name='C01', prerequisites=[],
- wavelength=(0.45, 0.47, 0.49), resolution=1000, calibration='reflectance',
+ make_dsq(name='solar_zenith_angle')], name=name, prerequisites=[],
+ wavelength=wavelength, resolution=resolution, calibration='reflectance',
modifiers=('sunz_corrected', 'rayleigh_corrected_crefl',), sensor='abi')
assert ref_cor.attrs['modifiers'] == ('sunz_corrected', 'rayleigh_corrected_crefl')
assert ref_cor.attrs['calibration'] == 'reflectance'
- assert ref_cor.attrs['wavelength'] == (0.45, 0.47, 0.49)
- assert ref_cor.attrs['name'] == 'C01'
- assert ref_cor.attrs['resolution'] == 1000
+ assert ref_cor.attrs['wavelength'] == wavelength
+ assert ref_cor.attrs['name'] == name
+ assert ref_cor.attrs['resolution'] == resolution
assert ref_cor.attrs['sensor'] == 'abi'
assert ref_cor.attrs['prerequisites'] == []
assert ref_cor.attrs['optional_prerequisites'] == [
@@ -129,56 +155,49 @@ def test_reflectance_corrector_abi(self):
c01 = xr.DataArray(dnb,
dims=('y', 'x'),
attrs={
- 'satellite_longitude': -89.5, 'satellite_latitude': 0.0,
- 'satellite_altitude': 35786023.4375, 'platform_name': 'GOES-16',
- 'calibration': 'reflectance', 'units': '%', 'wavelength': (0.45, 0.47, 0.49),
- 'name': 'C01', 'resolution': 1000, 'sensor': 'abi',
+ 'platform_name': 'GOES-16',
+ 'calibration': 'reflectance', 'units': '%', 'wavelength': wavelength,
+ 'name': name, 'resolution': resolution, 'sensor': 'abi',
'start_time': '2017-09-20 17:30:40.800000', 'end_time': '2017-09-20 17:41:17.500000',
- 'area': area, 'ancillary_variables': []
+ 'area': area, 'ancillary_variables': [],
+ 'orbital_parameters': {
+ 'satellite_nominal_longitude': -89.5,
+ 'satellite_nominal_latitude': 0.0,
+ 'satellite_nominal_altitude': 35786023.4375,
+ },
})
- res = ref_cor([c01], [])
+ with assert_maximum_dask_computes(0):
+ res = ref_cor([c01], [])
assert isinstance(res, xr.DataArray)
assert isinstance(res.data, da.Array)
- assert res.attrs['satellite_longitude'] == -89.5
- assert res.attrs['satellite_latitude'] == 0.0
- assert res.attrs['satellite_altitude'] == 35786023.4375
assert res.attrs['modifiers'] == ('sunz_corrected', 'rayleigh_corrected_crefl')
assert res.attrs['platform_name'] == 'GOES-16'
assert res.attrs['calibration'] == 'reflectance'
assert res.attrs['units'] == '%'
- assert res.attrs['wavelength'] == (0.45, 0.47, 0.49)
- assert res.attrs['name'] == 'C01'
- assert res.attrs['resolution'] == 1000
+ assert res.attrs['wavelength'] == wavelength
+ assert res.attrs['name'] == name
+ assert res.attrs['resolution'] == resolution
assert res.attrs['sensor'] == 'abi'
assert res.attrs['start_time'] == '2017-09-20 17:30:40.800000'
assert res.attrs['end_time'] == '2017-09-20 17:41:17.500000'
assert res.attrs['area'] == area
assert res.attrs['ancillary_variables'] == []
data = res.values
- assert abs(np.nanmean(data) - 26.00760944144745) < 1e-10
- assert data.shape == (5, 10)
unique = np.unique(data[~np.isnan(data)])
- np.testing.assert_allclose(unique, [-1.0, 4.210745457958135, 6.7833906076177595, 8.730371329824473,
- 10.286627569545209, 11.744159436709374, 12.20226097829902,
- 13.501444598985305, 15.344399223932212, 17.173329483996515,
- 17.28798660754271, 18.29594550575925, 19.076835059905125,
- 19.288331720959864, 19.77043407084455, 19.887082168377006,
- 20.091028778326375, 20.230341149334617, 20.457671064690196,
- 20.82686905639114, 21.021094816441195, 21.129963777952124,
- 41.601857910095575, 43.963919057675504,
- 46.21672174361075, 46.972099490462085, 47.497072794632835,
- 47.80393007974336, 47.956765988770385, 48.043025685032106,
- 51.909142813383916, 58.8234273736508, 68.84706145641482, 69.91085190887961,
- 71.10179768327806, 71.33161009169649])
-
- @pytest.mark.parametrize('url', [None, 'CMGDEM.hdf'])
- def test_reflectance_corrector_viirs(self, tmpdir, url):
+ np.testing.assert_allclose(np.nanmean(data), exp_mean, rtol=1e-5)
+ assert data.shape == (3, 5)
+ np.testing.assert_allclose(unique, exp_unique, rtol=1e-5)
+
+ @pytest.mark.parametrize(
+ 'url,dem_mock_cm,dem_sds',
+ [
+ (None, mock_cmgdem, "average elevation"),
+ ("CMGDEM.hdf", mock_cmgdem, "averaged elevation"),
+ ("tbase.hdf", mock_tbase, "Elevation"),
+ ])
+ def test_reflectance_corrector_viirs(self, tmpdir, url, dem_mock_cm, dem_sds):
"""Test ReflectanceCorrector modifier with VIIRS data."""
- import xarray as xr
- import dask.array as da
- import numpy as np
- import datetime
from satpy.modifiers._crefl import ReflectanceCorrector
from satpy.tests.utils import make_dsq
@@ -196,7 +215,9 @@ def test_reflectance_corrector_viirs(self, tmpdir, url):
calibration='reflectance',
modifiers=('sunz_corrected_iband', 'rayleigh_corrected_crefl_iband'),
sensor='viirs',
- url=url)
+ url=url,
+ dem_sds=dem_sds,
+ )
assert ref_cor.attrs['modifiers'] == ('sunz_corrected_iband', 'rayleigh_corrected_crefl_iband')
assert ref_cor.attrs['calibration'] == 'reflectance'
@@ -211,32 +232,17 @@ def test_reflectance_corrector_viirs(self, tmpdir, url):
make_dsq(name='solar_azimuth_angle'),
make_dsq(name='solar_zenith_angle')]
- area, dnb = self.data_area_ref_corrector()
-
- def make_xarray(name, standard_name, wavelength=None, units='degrees', calibration=None):
- return xr.DataArray(dnb, dims=('y', 'x'),
- attrs={
- 'start_orbit': 1708, 'end_orbit': 1708, 'wavelength': wavelength, 'level': None,
- 'modifiers': None, 'calibration': calibration,
- 'resolution': 371, 'name': name,
- 'standard_name': standard_name, 'platform_name': 'Suomi-NPP',
- 'polarization': None, 'sensor': 'viirs', 'units': units,
- 'start_time': datetime.datetime(2012, 2, 25, 18, 1, 24, 570942),
- 'end_time': datetime.datetime(2012, 2, 25, 18, 11, 21, 175760), 'area': area,
- 'ancillary_variables': []
- })
-
- c01 = make_xarray('I01', 'toa_bidirectional_reflectance',
- wavelength=(0.6, 0.64, 0.68), units='%',
- calibration='reflectance')
- c02 = make_xarray('satellite_azimuth_angle', 'sensor_azimuth_angle')
- c03 = make_xarray('satellite_zenith_angle', 'sensor_zenith_angle')
- c04 = make_xarray('solar_azimuth_angle', 'solar_azimuth_angle')
- c05 = make_xarray('solar_zenith_angle', 'solar_zenith_angle')
+ area, data = self.data_area_ref_corrector()
+ c01 = _make_viirs_xarray(data, area, 'I01', 'toa_bidirectional_reflectance',
+ wavelength=(0.6, 0.64, 0.68), units='%',
+ calibration='reflectance')
+ c02 = _make_viirs_xarray(data, area, 'satellite_azimuth_angle', 'sensor_azimuth_angle')
+ c03 = _make_viirs_xarray(data, area, 'satellite_zenith_angle', 'sensor_zenith_angle')
+ c04 = _make_viirs_xarray(data, area, 'solar_azimuth_angle', 'solar_azimuth_angle')
+ c05 = _make_viirs_xarray(data, area, 'solar_zenith_angle', 'solar_zenith_angle')
- rmock_obj = self._start_dem_mock(tmpdir, url)
- res = ref_cor([c01], [c02, c03, c04, c05])
- self._stop_dem_mock(rmock_obj)
+ with dem_mock_cm(tmpdir, url), assert_maximum_dask_computes(0):
+ res = ref_cor([c01], [c02, c03, c04, c05])
assert isinstance(res, xr.DataArray)
assert isinstance(res.data, da.Array)
@@ -249,22 +255,18 @@ def make_xarray(name, standard_name, wavelength=None, units='degrees', calibrati
assert res.attrs['platform_name'] == 'Suomi-NPP'
assert res.attrs['sensor'] == 'viirs'
assert res.attrs['units'] == '%'
- assert res.attrs['start_time'] == datetime.datetime(2012, 2, 25, 18, 1, 24, 570942)
- assert res.attrs['end_time'] == datetime.datetime(2012, 2, 25, 18, 11, 21, 175760)
+ assert res.attrs['start_time'] == datetime(2012, 2, 25, 18, 1, 24, 570942)
+ assert res.attrs['end_time'] == datetime(2012, 2, 25, 18, 11, 21, 175760)
assert res.attrs['area'] == area
assert res.attrs['ancillary_variables'] == []
data = res.values
- assert abs(np.mean(data) - 40.7578684169142) < 1e-10
- assert data.shape == (5, 10)
+ assert abs(np.mean(data) - 51.12750267805715) < 1e-6
+ assert data.shape == (3, 5)
unique = np.unique(data)
- np.testing.assert_allclose(unique, [25.20341702519979, 52.38819447051263, 75.79089653845898])
+ np.testing.assert_allclose(unique, [25.20341703, 52.38819447, 75.79089654])
def test_reflectance_corrector_modis(self):
"""Test ReflectanceCorrector modifier with MODIS data."""
- import xarray as xr
- import dask.array as da
- import numpy as np
- import datetime
from satpy.modifiers._crefl import ReflectanceCorrector
from satpy.tests.utils import make_dsq
sataa_did = make_dsq(name='satellite_azimuth_angle')
@@ -290,22 +292,21 @@ def test_reflectance_corrector_modis(self):
area, dnb = self.data_area_ref_corrector()
- def make_xarray(name, calibration, wavelength=None, modifiers=None, resolution=1000,
- file_type='hdf_eos_geo'):
+ def make_xarray(name, calibration, wavelength=None, modifiers=None, resolution=1000):
return xr.DataArray(dnb,
dims=('y', 'x'),
attrs={
'wavelength': wavelength, 'level': None, 'modifiers': modifiers,
- 'calibration': calibration, 'resolution': resolution, 'file_type': file_type,
+ 'calibration': calibration, 'resolution': resolution,
'name': name, 'coordinates': ['longitude', 'latitude'],
'platform_name': 'EOS-Aqua', 'polarization': None, 'sensor': 'modis',
- 'units': '%', 'start_time': datetime.datetime(2012, 8, 13, 18, 46, 1, 439838),
- 'end_time': datetime.datetime(2012, 8, 13, 18, 57, 47, 746296), 'area': area,
+ 'units': '%', 'start_time': datetime(2012, 8, 13, 18, 46, 1, 439838),
+ 'end_time': datetime(2012, 8, 13, 18, 57, 47, 746296), 'area': area,
'ancillary_variables': []
})
c01 = make_xarray('1', 'reflectance', wavelength=(0.62, 0.645, 0.67), modifiers='sunz_corrected',
- resolution=500, file_type='hdf_eos_data_500m')
+ resolution=500)
c02 = make_xarray('satellite_azimuth_angle', None)
c03 = make_xarray('satellite_zenith_angle', None)
c04 = make_xarray('solar_azimuth_angle', None)
@@ -318,21 +319,19 @@ def make_xarray(name, calibration, wavelength=None, modifiers=None, resolution=1
assert res.attrs['modifiers'] == ('sunz_corrected', 'rayleigh_corrected_crefl',)
assert res.attrs['calibration'] == 'reflectance'
assert res.attrs['resolution'] == 500
- assert res.attrs['file_type'] == 'hdf_eos_data_500m'
assert res.attrs['name'] == '1'
assert res.attrs['platform_name'] == 'EOS-Aqua'
assert res.attrs['sensor'] == 'modis'
assert res.attrs['units'] == '%'
- assert res.attrs['start_time'] == datetime.datetime(2012, 8, 13, 18, 46, 1, 439838)
- assert res.attrs['end_time'] == datetime.datetime(2012, 8, 13, 18, 57, 47, 746296)
+ assert res.attrs['start_time'] == datetime(2012, 8, 13, 18, 46, 1, 439838)
+ assert res.attrs['end_time'] == datetime(2012, 8, 13, 18, 57, 47, 746296)
assert res.attrs['area'] == area
assert res.attrs['ancillary_variables'] == []
data = res.values
- if abs(np.mean(data) - 38.734365117099145) >= 1e-10:
- raise AssertionError('{} is not within {} of {}'.format(np.mean(data), 1e-10, 38.734365117099145))
- assert data.shape == (5, 10)
+ assert abs(np.mean(data) - 52.09372623964498) < 1e-6
+ assert data.shape == (3, 5)
unique = np.unique(data)
- np.testing.assert_allclose(unique, [24.641586, 50.431692, 69.315375])
+ np.testing.assert_allclose(unique, [25.43670075, 52.93221561, 77.91226236])
def test_reflectance_corrector_bad_prereqs(self):
"""Test ReflectanceCorrector modifier with wrong number of inputs."""
@@ -342,23 +341,53 @@ def test_reflectance_corrector_bad_prereqs(self):
pytest.raises(ValueError, ref_cor, [1, 2, 3, 4], [])
pytest.raises(ValueError, ref_cor, [], [1, 2, 3, 4])
- def _start_dem_mock(self, tmpdir, url):
- if not url:
- return
- rmock_obj = mock.patch('satpy.modifiers._crefl.retrieve')
- rmock = rmock_obj.start()
- dem_fn = str(tmpdir.join(url))
- rmock.return_value = dem_fn
- from netCDF4 import Dataset
-
- nc = Dataset(dem_fn, 'w')
- nc.createDimension('y', 10)
- nc.createDimension('x', 10)
- dem_var = nc.createVariable('averaged elevation', np.float32,
- ('y', 'x'))
- dem_var[:] = 0
- return rmock_obj
-
- def _stop_dem_mock(self, rmock_obj):
- if rmock_obj:
- rmock_obj.stop()
+ @pytest.mark.parametrize(
+ 'url,dem_mock_cm,dem_sds',
+ [
+ (None, mock_cmgdem, "average elevation"),
+ ("CMGDEM.hdf", mock_cmgdem, "averaged elevation"),
+ ("tbase.hdf", mock_tbase, "Elevation"),
+ ])
+ def test_reflectance_corrector_different_chunks(self, tmpdir, url, dem_mock_cm, dem_sds):
+ """Test that the modifier works with different chunk sizes for inputs.
+
+ The modifier uses dask's "map_blocks". If the input chunks aren't the
+ same an error is raised.
+
+ """
+ from satpy.modifiers._crefl import ReflectanceCorrector
+ from satpy.tests.utils import make_dsq
+
+ ref_cor = ReflectanceCorrector(
+ optional_prerequisites=[
+ make_dsq(name='satellite_azimuth_angle'),
+ make_dsq(name='satellite_zenith_angle'),
+ make_dsq(name='solar_azimuth_angle'),
+ make_dsq(name='solar_zenith_angle')
+ ],
+ name='I01',
+ prerequisites=[],
+ wavelength=(0.6, 0.64, 0.68),
+ resolution=371,
+ calibration='reflectance',
+ modifiers=('sunz_corrected_iband', 'rayleigh_corrected_crefl_iband'),
+ sensor='viirs',
+ url=url,
+ dem_sds=dem_sds,
+ )
+
+ area, data = self.data_area_ref_corrector()
+ c01 = _make_viirs_xarray(data, area, 'I01', 'toa_bidirectional_reflectance',
+ wavelength=(0.6, 0.64, 0.68), units='%',
+ calibration='reflectance')
+ c02 = _make_viirs_xarray(data, area, 'satellite_azimuth_angle', 'sensor_azimuth_angle')
+ c02.data = c02.data.rechunk((1, -1))
+ c03 = _make_viirs_xarray(data, area, 'satellite_zenith_angle', 'sensor_zenith_angle')
+ c04 = _make_viirs_xarray(data, area, 'solar_azimuth_angle', 'solar_azimuth_angle')
+ c05 = _make_viirs_xarray(data, area, 'solar_zenith_angle', 'solar_zenith_angle')
+
+ with dem_mock_cm(tmpdir, url):
+ res = ref_cor([c01], [c02, c03, c04, c05])
+
+ # make sure it can actually compute
+ res.compute()
diff --git a/satpy/tests/modifier_tests/test_parallax.py b/satpy/tests/modifier_tests/test_parallax.py
new file mode 100644
index 0000000000..70d51b49e9
--- /dev/null
+++ b/satpy/tests/modifier_tests/test_parallax.py
@@ -0,0 +1,798 @@
+# Copyright (c) 2021 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+
+"""Tests related to parallax correction."""
+
+import datetime
+import logging
+import math
+import os
+import unittest.mock
+
+import dask.array as da
+import dask.config
+import numpy as np
+import pyorbital.tlefile
+import pyresample.kd_tree
+import pytest
+import xarray as xr
+from pyproj import Geod
+from pyresample import create_area_def
+
+import satpy.resample
+
+from ...writers import get_enhanced_image
+
+
+@pytest.fixture
+def fake_tle():
+ """Produce fake Two Line Element (TLE) object from pyorbital."""
+ return pyorbital.tlefile.Tle(
+ "Meteosat-42",
+ line1="1 40732U 15034A 22011.84285506 .00000004 00000+0 00000+0 0 9995",
+ line2="2 40732 0.2533 325.0106 0000976 118.8734 330.4058 1.00272123 23817")
+
+
+def _get_fake_areas(center, sizes, resolution, code=4326):
+ """Get multiple square areas with the same center.
+
+ Returns multiple square areas centered at the same location
+
+ Args:
+ center (Tuple[float, float]): Center of all areass
+ sizes (List[int]): Sizes of areas
+ resolution (float): Resolution of fake area.
+
+ Returns:
+ List of areas.
+ """
+ return [create_area_def(
+ "fribullus_xax",
+ code,
+ units="degrees",
+ resolution=resolution,
+ center=center,
+ shape=(size, size))
+ for size in sizes]
+
+
+def _get_attrs(lat, lon, height=35_000):
+ """Get attributes for datasets in fake scene."""
+ return {
+ "orbital_parameters": {
+ "satellite_actual_altitude": height, # in km above surface
+ "satellite_actual_longitude": lon,
+ "satellite_actual_latitude": lat},
+ "units": "m" # does not apply to orbital parameters, I think!
+ }
+
+
+class TestForwardParallax:
+ """Test the forward parallax function with various inputs."""
+
+ def test_get_parallax_corrected_lonlats_ssp(self):
+ """Test that at SSP, parallax correction does nothing."""
+ from ...modifiers.parallax import get_parallax_corrected_lonlats
+ sat_lat = sat_lon = lon = lat = 0.
+ height = 5000. # m
+ sat_alt = 30_000_000. # m
+ corr_lon, corr_lat = get_parallax_corrected_lonlats(
+ sat_lon, sat_lat, sat_alt, lon, lat, height)
+ assert corr_lon == corr_lat == 0
+
+ def test_get_parallax_corrected_lonlats_clearsky(self):
+ """Test parallax correction for clearsky case (returns NaN)."""
+ from ...modifiers.parallax import get_parallax_corrected_lonlats
+ sat_lat = sat_lon = 0
+ lat = np.linspace(-20, 20, 25).reshape(5, 5)
+ lon = np.linspace(-20, 20, 25).reshape(5, 5).T
+ height = np.full((5, 5), np.nan) # no CTH --> clearsky
+ sat_alt = 35_000_000. # m above surface
+ (corr_lon, corr_lat) = get_parallax_corrected_lonlats(
+ sat_lon, sat_lat, sat_alt, lon, lat, height)
+ # clearsky becomes NaN
+ assert np.isnan(corr_lon).all()
+ assert np.isnan(corr_lat).all()
+
+ @pytest.mark.parametrize("lat,lon", [(0, 0), (0, 40), (0, 179.9)])
+ @pytest.mark.parametrize("resolution", [0.01, 0.5, 10])
+ def test_get_parallax_corrected_lonlats_cloudy_ssp(self, lat, lon, resolution):
+ """Test parallax correction for fully cloudy scene at SSP."""
+ from ...modifiers.parallax import get_parallax_corrected_lonlats
+
+ N = 5
+ lats = np.linspace(lat-N*resolution, lat+N*resolution, 25).reshape(N, N)
+ lons = np.linspace(lon-N*resolution, lon+N*resolution, 25).reshape(N, N).T
+ height = np.full((N, N), 10_000) # constant high clouds at 10 km
+ sat_alt = 35_000_000. # satellite at 35 Mm
+ (corr_lon, corr_lat) = get_parallax_corrected_lonlats(
+ lon, lat, sat_alt, lons, lats, height)
+ # confirm movements behave as expected
+ geod = Geod(ellps="sphere")
+ # need to use np.tile here as geod.inv doesn't seem to broadcast (not
+ # when turning lon/lat in arrays of size (1, 1) either)
+ corr_dist = geod.inv(np.tile(lon, [N, N]), np.tile(lat, [N, N]), corr_lon, corr_lat)[2]
+ corr_delta = geod.inv(corr_lon, corr_lat, lons, lats)[2]
+ uncorr_dist = geod.inv(np.tile(lon, [N, N]), np.tile(lat, [N, N]), lons, lats)[2]
+ # should be equal at SSP and nowhere else
+ np.testing.assert_allclose(corr_delta[2, 2], 0, atol=1e-9)
+ assert np.isclose(corr_delta, 0, atol=1e-9).sum() == 1
+ # should always get closer to SSP
+ assert (uncorr_dist - corr_dist >= -1e-8).all()
+ # should be larger the further we get from SSP
+ assert (np.diff(corr_delta[N//2, :N//2+1]) < 0).all()
+ assert (np.diff(corr_delta[N//2, N//2:]) > 0).all()
+ assert (np.diff(corr_delta[N//2:, N//2]) > 0).all()
+ assert (np.diff(corr_delta[:N//2+1, N//2]) < 0).all()
+ assert (np.diff(np.diag(corr_delta)[:N//2+1]) < 0).all()
+ assert (np.diff(np.diag(corr_delta)[N//2:]) > 0).all()
+
+ def test_get_parallax_corrected_lonlats_cloudy_slant(self):
+ """Test parallax correction for fully cloudy scene (not SSP)."""
+ from ...modifiers.parallax import get_parallax_corrected_lonlats
+ sat_lat = sat_lon = 0
+ lat = np.linspace(-20, 20, 25).reshape(5, 5)
+ lon = np.linspace(-20, 20, 25).reshape(5, 5).T
+ height = np.full((5, 5), 10_000) # constant high clouds at 10 km
+ sat_alt = 35_000_000. # satellite at 35 Mm
+ (corr_lon, corr_lat) = get_parallax_corrected_lonlats(
+ sat_lon, sat_lat, sat_alt, lon, lat, height)
+ # reference value from Simon Proud
+ np.testing.assert_allclose(
+ corr_lat[4, 4], 19.955, rtol=5e-4)
+ np.testing.assert_allclose(
+ corr_lon[4, 4], 19.960, rtol=5e-4)
+
+ def test_get_parallax_corrected_lonlats_mixed(self):
+ """Test parallax correction for mixed cloudy case."""
+ from ...modifiers.parallax import get_parallax_corrected_lonlats
+
+ sat_lon = sat_lat = 0
+ sat_alt = 35_785_831.0 # m
+ lon = da.array([[-20, -10, 0, 10, 20]]*5)
+ lat = da.array([[-20, -10, 0, 10, 20]]*5).T
+ alt = da.array([
+ [np.nan, np.nan, 5000., 6000., np.nan],
+ [np.nan, 6000., 7000., 7000., 7000.],
+ [np.nan, 7000., 8000., 9000., np.nan],
+ [np.nan, 7000., 7000., 7000., np.nan],
+ [np.nan, 4000., 3000., np.nan, np.nan]])
+ (corrected_lon, corrected_lat) = get_parallax_corrected_lonlats(
+ sat_lon, sat_lat, sat_alt, lon, lat, alt)
+ assert corrected_lon.shape == lon.shape
+ assert corrected_lat.shape == lat.shape
+ # lon/lat should be nan for clear-sky pixels
+ assert np.isnan(corrected_lon[np.isnan(alt)]).all()
+ assert np.isnan(corrected_lat[np.isnan(alt)]).all()
+ # otherwise no nans
+ assert np.isfinite(corrected_lon[~np.isnan(alt)]).all()
+ assert np.isfinite(corrected_lat[~np.isnan(alt)]).all()
+
+ def test_get_parallax_corrected_lonlats_horizon(self):
+ """Test that exception is raised if satellites exactly at the horizon.
+
+ Test the rather unlikely case of a satellite elevation of exactly 0
+ """
+ from ...modifiers.parallax import get_parallax_corrected_lonlats
+ sat_lat = sat_lon = lon = lat = 0.
+ height = 5000.
+ sat_alt = 30_000_000.
+ with unittest.mock.patch("satpy.modifiers.parallax.get_observer_look") as smpg:
+ smpg.return_value = (0, 0)
+ with pytest.raises(NotImplementedError):
+ get_parallax_corrected_lonlats(sat_lon, sat_lat, sat_alt, lon, lat, height)
+
+ def test_get_surface_parallax_displacement(self):
+ """Test surface parallax displacement."""
+ from ...modifiers.parallax import get_surface_parallax_displacement
+
+ val = get_surface_parallax_displacement(
+ 0, 0, 36_000_000, 0, 10, 10_000)
+ np.testing.assert_allclose(val, 2141.2404451757875)
+
+
+class TestParallaxCorrectionClass:
+ """Test that the ParallaxCorrection class is behaving sensibly."""
+
+ @pytest.mark.parametrize("center", [(0, 0), (80, -10), (-180, 5)])
+ @pytest.mark.parametrize("sizes", [[5, 9]])
+ @pytest.mark.parametrize("resolution", [0.05, 1, 10])
+ def test_init_parallaxcorrection(self, center, sizes, resolution):
+ """Test that ParallaxCorrection class can be instantiated."""
+ from ...modifiers.parallax import ParallaxCorrection
+ fake_area = _get_fake_areas(center, sizes, resolution)[0]
+ pc = ParallaxCorrection(fake_area)
+ assert pc.base_area == fake_area
+
+ @pytest.mark.parametrize("sat_pos,ar_pos",
+ [((0, 0), (0, 0)), ((0, 0), (40, 0))])
+ @pytest.mark.parametrize("resolution", [0.01, 0.5, 10])
+ def test_correct_area_clearsky(self, sat_pos, ar_pos, resolution, caplog):
+ """Test that ParallaxCorrection doesn't change clearsky geolocation."""
+ from ...modifiers.parallax import ParallaxCorrection
+ from ..utils import make_fake_scene
+ (sat_lat, sat_lon) = sat_pos
+ (ar_lat, ar_lon) = ar_pos
+ small = 5
+ large = 9
+ (fake_area_small, fake_area_large) = _get_fake_areas(
+ (ar_lon, ar_lat), [small, large], resolution)
+ corrector = ParallaxCorrection(fake_area_small)
+
+ sc = make_fake_scene(
+ {"CTH_clear": np.full((large, large), np.nan)},
+ daskify=False,
+ area=fake_area_large,
+ common_attrs=_get_attrs(sat_lat, sat_lon, 35_000))
+
+ with caplog.at_level(logging.DEBUG):
+ new_area = corrector(sc["CTH_clear"])
+ assert "Calculating parallax correction using heights from CTH_clear" in caplog.text
+ np.testing.assert_allclose(
+ new_area.get_lonlats(),
+ fake_area_small.get_lonlats())
+
+ @pytest.mark.parametrize("lat,lon",
+ [(0, 0), (0, 40), (0, 180),
+ (90, 0)]) # relevant for Арктика satellites
+ @pytest.mark.parametrize("resolution", [0.01, 0.5, 10])
+ def test_correct_area_ssp(self, lat, lon, resolution):
+ """Test that ParallaxCorrection doesn't touch SSP."""
+ from ...modifiers.parallax import ParallaxCorrection
+ from ..utils import make_fake_scene
+ codes = {
+ (0, 0): 4326,
+ (0, 40): 4326,
+ (0, 180): 3575,
+ (90, 0): 3575}
+ small = 5
+ large = 9
+ (fake_area_small, fake_area_large) = _get_fake_areas(
+ (lon, lat), [small, large], resolution,
+ code=codes[(lat, lon)])
+ corrector = ParallaxCorrection(fake_area_small)
+
+ sc = make_fake_scene(
+ {"CTH_constant": np.full((large, large), 10000)},
+ daskify=False,
+ area=fake_area_large,
+ common_attrs=_get_attrs(lat, lon, 35_000))
+ new_area = corrector(sc["CTH_constant"])
+ assert new_area.shape == fake_area_small.shape
+ old_lonlats = fake_area_small.get_lonlats()
+ new_lonlats = new_area.get_lonlats()
+ if lat != 90: # don't check SSP longitude if lat=90
+ np.testing.assert_allclose(
+ old_lonlats[0][2, 2],
+ new_lonlats[0][2, 2],
+ atol=1e-9)
+ np.testing.assert_allclose(
+ old_lonlats[0][2, 2],
+ lon,
+ atol=1e-9)
+ np.testing.assert_allclose(
+ old_lonlats[1][2, 2],
+ new_lonlats[1][2, 2],
+ atol=1e-9)
+ np.testing.assert_allclose(
+ old_lonlats[1][2, 2],
+ lat,
+ atol=1e-9)
+
+ @pytest.mark.parametrize("daskify", [False, True])
+ def test_correct_area_partlycloudy(self, daskify):
+ """Test ParallaxCorrection for partly cloudy situation."""
+ from ...modifiers.parallax import ParallaxCorrection
+ from ..utils import make_fake_scene
+ small = 5
+ large = 9
+ (fake_area_small, fake_area_large) = _get_fake_areas(
+ (0, 50), [small, large], 0.1)
+ (fake_area_lons, fake_area_lats) = fake_area_small.get_lonlats()
+ corrector = ParallaxCorrection(fake_area_small)
+
+ sc = make_fake_scene(
+ {"CTH": np.array([
+ [np.nan, np.nan, 5000., 6000., 7000., 6000., 5000., np.nan, np.nan],
+ [np.nan, 6000., 7000., 7000., 7000., np.nan, np.nan, np.nan, np.nan],
+ [np.nan, 7000., 8000., 9000., np.nan, np.nan, np.nan, np.nan, np.nan],
+ [np.nan, 7000., 7000., 7000., np.nan, np.nan, np.nan, np.nan, np.nan],
+ [np.nan, 4000., 3000., np.nan, np.nan, np.nan, np.nan, np.nan, np.nan],
+ [np.nan, np.nan, 5000., 8000., 8000., 8000., 6000., np.nan, np.nan],
+ [np.nan, 9000., 9000., 9000., 9000., 9000., 9000., 9000., np.nan],
+ [np.nan, 9000., 9000., 9000., 9000., 9000., 9000., 9000., np.nan],
+ [np.nan, 9000., 9000., 9000., 9000., 9000., 9000., 9000., np.nan],
+ ])},
+ daskify=daskify,
+ area=fake_area_large,
+ common_attrs=_get_attrs(0, 0, 40_000))
+ new_area = corrector(sc["CTH"])
+ assert new_area.shape == fake_area_small.shape
+ (new_lons, new_lats) = new_area.get_lonlats()
+ assert fake_area_lons[3, 4] != new_lons[3, 4]
+
+ np.testing.assert_allclose(
+ new_lons,
+ np.array([
+ [np.nan, np.nan, 0.0, 0.1, 0.2],
+ [-0.20078652, -0.10044222, 0.0, 0.1, 0.2],
+ [-0.20068529, -0.10034264, 0.0, 0.1, 0.2],
+ [np.nan, np.nan, np.nan, np.nan, np.nan],
+ [-0.20048537, -0.10038778, 0., 0.10038778, 0.20058219]]),
+ rtol=1e-5)
+ np.testing.assert_allclose(
+ new_lats,
+ np.array([
+ [np.nan, np.nan, 50.2, 50.2, 50.2],
+ [50.2110675, 50.22493181, 50.1, 50.1, 50.1],
+ [50.09680357, 50.09680346, 50.0, 50.0, 50.0],
+ [np.nan, np.nan, np.nan, np.nan, np.nan],
+ [49.86860622, 49.9097198, 49.90971976, 49.9097198, 49.88231496]]),
+ rtol=1e-6)
+
+ @pytest.mark.parametrize("res1,res2", [(0.08, 0.3), (0.3, 0.08)])
+ def test_correct_area_clearsky_different_resolutions(self, res1, res2):
+ """Test clearsky correction when areas have different resolutions."""
+ from ...modifiers.parallax import ParallaxCorrection
+ from ..utils import make_fake_scene
+
+ # areas with different resolutions, but same coverage
+
+ area1 = create_area_def(
+ "fribullus_xax",
+ 4326,
+ units="degrees",
+ resolution=res1,
+ area_extent=[-1, -1, 1, 1])
+
+ area2 = create_area_def(
+ "fribullus_xax",
+ 4326,
+ units="degrees",
+ resolution=res2,
+ area_extent=[-1, -1, 1, 1])
+
+ with pytest.warns(None) as record:
+ sc = make_fake_scene(
+ {"CTH_clear": np.full(area1.shape, np.nan)},
+ daskify=False,
+ area=area1,
+ common_attrs=_get_attrs(0, 0, 35_000))
+ assert len(record) == 0
+
+ corrector = ParallaxCorrection(area2)
+ new_area = corrector(sc["CTH_clear"])
+ np.testing.assert_allclose(
+ new_area.get_lonlats(),
+ area2.get_lonlats())
+
+ @pytest.mark.xfail(reason="awaiting pyresample fixes")
+ def test_correct_area_cloudy_no_overlap(self, ):
+ """Test cloudy correction when areas have no overlap."""
+ from ...modifiers.parallax import MissingHeightError, ParallaxCorrection
+ from ..utils import make_fake_scene
+ areas_00 = _get_fake_areas((0, 40), [5, 9], 0.1)
+ areas_shift = _get_fake_areas((90, 20), [5, 9], 0.1)
+ fake_area_small = areas_00[0]
+ fake_area_large = areas_shift[1]
+
+ sc = make_fake_scene(
+ {"CTH_constant": np.full((9, 9), 10000)},
+ daskify=False,
+ area=fake_area_large,
+ common_attrs=_get_attrs(0, 0, 35_000))
+
+ corrector = ParallaxCorrection(fake_area_small)
+ with pytest.raises(MissingHeightError):
+ corrector(sc["CTH_constant"])
+
+ @pytest.mark.xfail(reason="awaiting pyresample fixes")
+ def test_correct_area_cloudy_partly_shifted(self, ):
+ """Test cloudy correction when areas overlap only partly."""
+ from ...modifiers.parallax import IncompleteHeightWarning, ParallaxCorrection
+ from ..utils import make_fake_scene
+ areas_00 = _get_fake_areas((0, 40), [5, 9], 0.1)
+ areas_shift = _get_fake_areas((0.5, 40), [5, 9], 0.1)
+ fake_area_small = areas_00[0]
+ fake_area_large = areas_shift[1]
+
+ sc = make_fake_scene(
+ {"CTH_constant": np.full((9, 9), 10000)},
+ daskify=False,
+ area=fake_area_large,
+ common_attrs=_get_attrs(0, 0, 35_000))
+
+ corrector = ParallaxCorrection(fake_area_small)
+
+ with pytest.warns(IncompleteHeightWarning):
+ new_area = corrector(sc["CTH_constant"])
+ assert new_area.shape == fake_area_small.shape
+
+ def test_correct_area_cloudy_same_area(self, ):
+ """Test cloudy correction when areas are the same."""
+ from ...modifiers.parallax import ParallaxCorrection
+ from ..utils import make_fake_scene
+ area = _get_fake_areas((0, 0), [9], 0.1)[0]
+
+ sc = make_fake_scene(
+ {"CTH_constant": np.full((9, 9), 10000)},
+ daskify=False,
+ area=area,
+ common_attrs=_get_attrs(0, 0, 35_000))
+
+ corrector = ParallaxCorrection(area)
+ corrector(sc["CTH_constant"])
+
+ def test_correct_area_no_orbital_parameters(self, caplog, fake_tle):
+ """Test ParallaxCorrection when CTH has no orbital parameters.
+
+ Some CTH products, such as NWCSAF-GEO, do not include information
+ on satellite location directly. Rather, they include platform name,
+ sensor, start time, and end time, that we have to use instead.
+ """
+ from ...modifiers.parallax import ParallaxCorrection
+ from ..utils import make_fake_scene
+ small = 5
+ large = 9
+ (fake_area_small, fake_area_large) = _get_fake_areas(
+ (0, 0), [small, large], 0.05)
+ corrector = ParallaxCorrection(fake_area_small)
+
+ sc = make_fake_scene(
+ {"CTH_clear": np.full((large, large), np.nan)},
+ daskify=False,
+ area=fake_area_large,
+ common_attrs={
+ "platform_name": "Meteosat-42",
+ "sensor": "irives",
+ "start_time": datetime.datetime(3021, 11, 30, 12, 24, 17),
+ "end_time": datetime.datetime(3021, 11, 30, 12, 27, 22)})
+ with unittest.mock.patch("pyorbital.tlefile.read") as plr:
+ plr.return_value = fake_tle
+ with caplog.at_level(logging.WARNING):
+ new_area = corrector(sc["CTH_clear"])
+ assert "Orbital parameters missing from metadata." in caplog.text
+ np.testing.assert_allclose(
+ new_area.get_lonlats(),
+ fake_area_small.get_lonlats())
+
+
+class TestParallaxCorrectionModifier:
+ """Test that the parallax correction modifier works correctly."""
+
+ def test_parallax_modifier_interface(self):
+ """Test the modifier interface."""
+ from ...modifiers.parallax import ParallaxCorrectionModifier
+ (area_small, area_large) = _get_fake_areas((0, 0), [5, 9], 0.1)
+ fake_bt = xr.DataArray(
+ np.linspace(220, 230, 25).reshape(5, 5),
+ dims=("y", "x"),
+ attrs={"area": area_small, **_get_attrs(0, 0, 35_000)})
+ cth_clear = xr.DataArray(
+ np.full((9, 9), np.nan),
+ dims=("y", "x"),
+ attrs={"area": area_large, **_get_attrs(0, 0, 35_000)})
+ modif = ParallaxCorrectionModifier(
+ name="parallax_corrected_dataset",
+ prerequisites=[fake_bt, cth_clear],
+ optional_prerequisites=[],
+ cth_radius_of_influence=48_000,
+ dataset_radius_of_influence=49_000)
+ res = modif([fake_bt, cth_clear], optional_datasets=[])
+ np.testing.assert_allclose(res, fake_bt)
+ with unittest.mock.patch("satpy.modifiers.parallax.resample_dataset") as smp:
+ smp.side_effect = satpy.resample.resample_dataset
+ modif([fake_bt, cth_clear], optional_datasets=[])
+ assert smp.call_args_list[0].kwargs["radius_of_influence"] == 48_000
+ assert smp.call_args_list[1].kwargs["radius_of_influence"] == 49_000
+
+ def test_parallax_modifier_interface_with_cloud(self):
+ """Test the modifier interface with a cloud.
+
+ Test corresponds to a real bug encountered when using CTH data
+ from NWCSAF-GEO, which created strange speckles in Africa (see
+ https://github.com/pytroll/satpy/pull/1904#issuecomment-1011161623
+ for an example). Create fake CTH corresponding to NWCSAF-GEO area and
+ BT corresponding to full disk SEVIRI, and test that no strange speckles
+ occur.
+ """
+ from ...modifiers.parallax import ParallaxCorrectionModifier
+
+ w_cth = 25
+ h_cth = 15
+ proj_dict = {'a': '6378137', 'h': '35785863', 'proj': 'geos', 'units': 'm'}
+ fake_area_cth = pyresample.create_area_def(
+ area_id="test-area",
+ projection=proj_dict,
+ area_extent=(-2296808.75, 2785874.75, 2293808.25, 5570249.0),
+ shape=(h_cth, w_cth))
+
+ sz_bt = 20
+ fake_area_bt = pyresample.create_area_def(
+ "test-area-2",
+ projection=proj_dict,
+ area_extent=(-5567248.0742, -5513240.8172, 5513240.8172, 5567248.0742),
+ shape=(sz_bt, sz_bt))
+
+ (lons_cth, lats_cth) = fake_area_cth.get_lonlats()
+ fake_cth_data = np.where(
+ np.isfinite(lons_cth) & np.isfinite(lats_cth),
+ 15000,
+ np.nan)
+
+ (lons_bt, lats_bt) = fake_area_bt.get_lonlats()
+ fake_bt_data = np.where(
+ np.isfinite(lons_bt) & np.isfinite(lats_bt),
+ np.linspace(200, 300, lons_bt.size).reshape(lons_bt.shape),
+ np.nan)
+
+ attrs = _get_attrs(0, 0)
+
+ fake_bt = xr.DataArray(
+ fake_bt_data,
+ dims=("y", "x"),
+ attrs={**attrs, "area": fake_area_bt})
+ fake_cth = xr.DataArray(
+ fake_cth_data,
+ dims=("y", "x"),
+ attrs={**attrs, "area": fake_area_cth})
+
+ modif = ParallaxCorrectionModifier(
+ name="parallax_corrected_dataset",
+ prerequisites=[fake_bt, fake_cth],
+ optional_prerequisites=[],
+ search_radius=25_000)
+
+ res = modif([fake_bt, fake_cth], optional_datasets=[])
+
+ # with a constant cloud, a monotonically increasing BT should still
+ # do so after parallax correction
+ assert not (res.diff("x") < 0).any()
+
+ @pytest.fixture
+ def test_area(self, request):
+ """Produce test area for parallax correction unit tests.
+
+ Produce test area for the modifier-interface parallax correction unit
+ tests.
+ """
+ extents = {
+ "foroyar": [-861785.8867075047, 6820719.391005835, -686309.8124887547, 6954386.383193335],
+ "ouagadougou": [-232482.90622750926, 1328206.360136668,
+ -114074.70310250926, 1422810.852324168],
+ }
+ where = request.param
+ return pyresample.create_area_def(where, 4087, area_extent=extents[where], resolution=500)
+
+ def _get_fake_cloud_datasets(self, test_area, cth, use_dask):
+ """Return datasets for BT and CTH for fake cloud."""
+ w_cloud = 20
+ h_cloud = 5
+
+ # location of cloud in uncorrected data
+ lat_min_i = 155
+ lat_max_i = lat_min_i + h_cloud
+ lon_min_i = 140
+ lon_max_i = lon_min_i + w_cloud
+
+ fake_bt_data = np.linspace(
+ 270, 330, math.prod(test_area.shape), dtype="f8").reshape(
+ test_area.shape).round(2)
+ fake_cth_data = np.full(test_area.shape, np.nan, dtype="f8")
+ fake_bt_data[lat_min_i:lat_max_i, lon_min_i:lon_max_i] = np.linspace(
+ 180, 220, w_cloud*h_cloud).reshape(h_cloud, w_cloud).round(2)
+ fake_cth_data[lat_min_i:lat_max_i, lon_min_i:lon_max_i] = cth
+
+ if use_dask:
+ fake_bt_data = da.array(fake_bt_data)
+ fake_cth_data = da.array(fake_cth_data)
+
+ attrs = _get_attrs(0, 0)
+
+ fake_bt = xr.DataArray(
+ fake_bt_data,
+ dims=("y", "x"),
+ attrs={**attrs, "area": test_area})
+
+ fake_cth = xr.DataArray(
+ fake_cth_data,
+ dims=("y", "x"),
+ attrs={**attrs, "area": test_area})
+
+ cma = np.zeros(shape=fake_bt.shape, dtype="?")
+ cma[lat_min_i:lat_max_i, lon_min_i:lon_max_i] = True
+
+ return (fake_bt, fake_cth, cma)
+
+ @pytest.mark.parametrize("test_area", ["foroyar", "ouagadougou"], indirect=["test_area"])
+ def test_modifier_interface_fog_no_shift(self, test_area):
+ """Test that fog isn't masked or shifted."""
+ from ...modifiers.parallax import ParallaxCorrectionModifier
+
+ (fake_bt, fake_cth, _) = self._get_fake_cloud_datasets(test_area, 50, use_dask=False)
+
+ modif = ParallaxCorrectionModifier(
+ name="parallax_corrected_dataset",
+ prerequisites=[fake_bt, fake_cth],
+ optional_prerequisites=[],
+ debug_mode=True)
+
+ res = modif([fake_bt, fake_cth], optional_datasets=[])
+
+ assert np.isfinite(res).all()
+ np.testing.assert_allclose(res, fake_bt)
+
+ @pytest.mark.parametrize("cth", [7500, 15000])
+ @pytest.mark.parametrize("use_dask", [True, False])
+ @pytest.mark.parametrize("test_area", ["foroyar", "ouagadougou"], indirect=["test_area"])
+ def test_modifier_interface_cloud_moves_to_observer(self, cth, use_dask, test_area):
+ """Test that a cloud moves to the observer.
+
+ With the modifier interface, use a high resolution area and test that
+ pixels are moved in the direction of the observer and not away from it.
+ """
+ from ...modifiers.parallax import ParallaxCorrectionModifier
+
+ (fake_bt, fake_cth, cma) = self._get_fake_cloud_datasets(test_area, cth, use_dask=use_dask)
+
+ # location of cloud in corrected data
+ # this may no longer be rectangular!
+ dest_mask = np.zeros(shape=test_area.shape, dtype="?")
+ cloud_location = {
+ "foroyar": {
+ 7500: (197, 202, 152, 172),
+ 15000: (239, 244, 165, 184)},
+ "ouagadougou": {
+ 7500: (159, 164, 140, 160),
+ 15000: (163, 168, 141, 161)}}
+ (x_lo, x_hi, y_lo, y_hi) = cloud_location[test_area.name][cth]
+ dest_mask[x_lo:x_hi, y_lo:y_hi] = True
+
+ modif = ParallaxCorrectionModifier(
+ name="parallax_corrected_dataset",
+ prerequisites=[fake_bt, fake_cth],
+ optional_prerequisites=[],
+ debug_mode=True)
+
+ res = modif([fake_bt, fake_cth], optional_datasets=[])
+
+ assert fake_bt.attrs["area"] == test_area # should not be changed
+ assert res.attrs["area"] == fake_bt.attrs["area"]
+ # confirm old cloud area now fill value
+ # except where it overlaps with new cloud
+ assert np.isnan(res.data[cma & (~dest_mask)]).all()
+ # confirm rest of the area does not have fill values
+ assert np.isfinite(res.data[~cma]).all()
+ # confirm that rest of area pixel values did not change, except where
+ # cloud arrived or originated
+ delta = res - fake_bt
+ assert (delta.data[~(cma | dest_mask)] == 0).all()
+ # verify that cloud moved south. Pointwise comparison might not work because
+ # cloud may shrink.
+ assert ((res.attrs["area"].get_lonlats()[1][dest_mask]).mean() <
+ fake_bt.attrs["area"].get_lonlats()[1][cma].mean())
+ # verify that all pixels at the new cloud location are indeed cloudy
+ assert (res.data[dest_mask] < 250).all()
+
+
+_test_yaml_code = """
+sensor_name: visir
+
+modifiers:
+ parallax_corrected:
+ modifier: !!python/name:satpy.modifiers.parallax.ParallaxCorrectionModifier
+ prerequisites:
+ - name: "ctth_alti"
+
+composites:
+ parallax_corrected_VIS006:
+ compositor: !!python/name:satpy.composites.SingleBandCompositor
+ prerequisites:
+ - name: VIS006
+ modifiers: [parallax_corrected]
+"""
+
+
+class TestParallaxCorrectionSceneLoad:
+ """Test that scene load interface works as expected."""
+
+ @pytest.fixture
+ def yaml_code(self):
+ """Return YAML code for parallax_corrected_VIS006."""
+ return _test_yaml_code
+
+ @pytest.fixture
+ def conf_file(self, yaml_code, tmp_path):
+ """Produce a fake configuration file."""
+ conf_file = tmp_path / "test.yaml"
+ with conf_file.open(mode="wt", encoding="ascii") as fp:
+ fp.write(yaml_code)
+ return conf_file
+
+ @pytest.fixture
+ def fake_scene(self, yaml_code):
+ """Produce fake scene and prepare fake composite config."""
+ from satpy import Scene
+ from satpy.dataset.dataid import WavelengthRange
+ from satpy.tests.utils import make_dataid
+
+ area = _get_fake_areas((0, 0), [5], 1)[0]
+ sc = Scene()
+ sc["VIS006"] = xr.DataArray(
+ np.linspace(0, 99, 25).reshape(5, 5),
+ dims=("y", "x"),
+ attrs={
+ "_satpy_id": make_dataid(
+ name="VIS006",
+ wavelength=WavelengthRange(min=0.56, central=0.635, max=0.71, unit="µm"),
+ resolution=3000,
+ calibration="reflectance",
+ modifiers=()),
+ "modifiers": (),
+ "sensor": "seviri",
+ "area": area})
+ sc["ctth_alti"] = xr.DataArray(
+ np.linspace(0, 99, 25).reshape(5, 5),
+ dims=("y", "x"),
+ attrs={
+ "_satpy_id": make_dataid(
+ name="ctth_alti",
+ resolution=3000,
+ modifiers=()),
+ "modifiers": (),
+ "sensor": {"seviri"},
+ "platform_name": "Meteosat-11",
+ "start_time": datetime.datetime(2022, 4, 12, 9, 0),
+ "area": area})
+ return sc
+
+ def test_double_load(self, fake_scene, conf_file, fake_tle):
+ """Test that loading corrected and uncorrected works correctly.
+
+ When the modifier ``__call__`` method fails to call
+ ``self.apply_modifier_info(new, old)`` and the original and
+ parallax-corrected dataset are requested at the same time, the
+ DataArrays differ but the underlying dask arrays have object identity,
+ which in turn leads to both being parallax corrected. This unit test
+ confirms that there is no such object identity.
+ """
+ with unittest.mock.patch(
+ "satpy.composites.config_loader.config_search_paths") as sccc, \
+ unittest.mock.patch("pyorbital.tlefile.read") as plr:
+ sccc.return_value = [os.fspath(conf_file)]
+ plr.return_value = fake_tle
+ fake_scene.load(["parallax_corrected_VIS006", "VIS006"])
+ assert fake_scene["VIS006"] is not fake_scene["parallax_corrected_VIS006"]
+ assert fake_scene["VIS006"].data is not fake_scene["parallax_corrected_VIS006"].data
+
+ @pytest.mark.xfail(reason="awaiting pyresample fixes")
+ def test_no_compute(self, fake_scene, conf_file):
+ """Test that no computation occurs."""
+ from satpy.tests.utils import CustomScheduler
+ with unittest.mock.patch(
+ "satpy.composites.config_loader.config_search_paths") as sccc, \
+ dask.config.set(scheduler=CustomScheduler(max_computes=0)):
+ sccc.return_value = [os.fspath(conf_file)]
+ fake_scene.load(["parallax_corrected_VIS006"])
+
+ def test_enhanced_image(self, fake_scene, conf_file, fake_tle):
+ """Test that image enhancement is the same."""
+ with unittest.mock.patch(
+ "satpy.composites.config_loader.config_search_paths") as sccc, \
+ unittest.mock.patch("pyorbital.tlefile.read") as plr:
+ sccc.return_value = [os.fspath(conf_file)]
+ plr.return_value = fake_tle
+ fake_scene.load(["parallax_corrected_VIS006", "VIS006"])
+ im1 = get_enhanced_image(fake_scene["VIS006"])
+ im2 = get_enhanced_image(fake_scene["parallax_corrected_VIS006"])
+ assert im1.data.attrs["enhancement_history"] == im2.data.attrs["enhancement_history"]
diff --git a/satpy/tests/reader_tests/_modis_fixtures.py b/satpy/tests/reader_tests/_modis_fixtures.py
new file mode 100644
index 0000000000..34cb6a7aab
--- /dev/null
+++ b/satpy/tests/reader_tests/_modis_fixtures.py
@@ -0,0 +1,584 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""MODIS L1b and L2 test fixtures."""
+from __future__ import annotations
+
+from datetime import datetime, timedelta
+from typing import Optional
+
+import numpy as np
+import pytest
+from pyhdf.SD import SD, SDC
+
+# Level 1 Fixtures
+
+AVAILABLE_1KM_VIS_PRODUCT_NAMES = [str(x) for x in range(8, 13)]
+AVAILABLE_1KM_VIS_PRODUCT_NAMES += ['13lo', '13hi', '14lo', '14hi']
+AVAILABLE_1KM_VIS_PRODUCT_NAMES += [str(x) for x in range(15, 20)]
+AVAILABLE_1KM_IR_PRODUCT_NAMES = [str(x) for x in range(20, 37)]
+AVAILABLE_1KM_PRODUCT_NAMES = AVAILABLE_1KM_VIS_PRODUCT_NAMES + AVAILABLE_1KM_IR_PRODUCT_NAMES
+AVAILABLE_HKM_PRODUCT_NAMES = [str(x) for x in range(3, 8)]
+AVAILABLE_QKM_PRODUCT_NAMES = ['1', '2']
+SCAN_LEN_5KM = 6 # 3 scans of 5km data
+SCAN_WIDTH_5KM = 270
+SCALE_FACTOR = 0.5
+ADD_OFFSET = -0.5
+RES_TO_REPEAT_FACTOR = {
+ 250: 20,
+ 500: 10,
+ 1000: 5,
+ 5000: 1,
+}
+
+
+def _shape_for_resolution(resolution: int) -> tuple[int, int]:
+ assert resolution in RES_TO_REPEAT_FACTOR
+ factor = RES_TO_REPEAT_FACTOR[resolution]
+ if factor == 1:
+ return SCAN_LEN_5KM, SCAN_WIDTH_5KM
+
+ factor_1km = RES_TO_REPEAT_FACTOR[1000]
+ shape_1km = (factor_1km * SCAN_LEN_5KM, factor_1km * SCAN_WIDTH_5KM + 4)
+ factor //= 5
+ return factor * shape_1km[0], factor * shape_1km[1]
+
+
+def _generate_lonlat_data(resolution: int) -> np.ndarray:
+ shape = _shape_for_resolution(resolution)
+ lat = np.repeat(np.linspace(35., 45., shape[0])[:, None], shape[1], 1)
+ lat *= np.linspace(0.9, 1.1, shape[1])
+ lon = np.repeat(np.linspace(-45., -35., shape[1])[None, :], shape[0], 0)
+ lon *= np.linspace(0.9, 1.1, shape[0])[:, None]
+ return lon.astype(np.float32), lat.astype(np.float32)
+
+
+def _generate_angle_data(resolution: int) -> np.ndarray:
+ shape = _shape_for_resolution(resolution)
+ data = np.repeat(abs(np.linspace(-65.2, 65.4, shape[1]))[None, :], shape[0], 0)
+ return (data * 100).astype(np.int16)
+
+
+def _generate_visible_data(resolution: int, num_bands: int, dtype=np.uint16) -> np.ndarray:
+ shape = _shape_for_resolution(resolution)
+ data = np.ones((num_bands, shape[0], shape[1]), dtype=dtype)
+
+ # add fill value to every band
+ data[:, -1, -1] = 65535
+
+ # add band 2 saturation and can't aggregate fill values
+ data[1, -1, -2] = 65533
+ data[1, -1, -3] = 65528
+ return data
+
+
+def _generate_visible_uncertainty_data(shape: tuple) -> np.ndarray:
+ uncertainty = np.zeros(shape, dtype=np.uint8)
+ uncertainty[:, -1, -1] = 15 # fill value
+ uncertainty[:, -1, -2] = 15 # saturated
+ uncertainty[:, -1, -3] = 15 # can't aggregate
+ return uncertainty
+
+
+def _get_lonlat_variable_info(resolution: int) -> dict:
+ lon_5km, lat_5km = _generate_lonlat_data(resolution)
+ return {
+ 'Latitude': {'data': lat_5km,
+ 'type': SDC.FLOAT32,
+ 'fill_value': -999,
+ 'attrs': {'dim_labels': ['Cell_Along_Swath_5km:mod35', 'Cell_Across_Swath_5km:mod35']}},
+ 'Longitude': {'data': lon_5km,
+ 'type': SDC.FLOAT32,
+ 'fill_value': -999,
+ 'attrs': {'dim_labels': ['Cell_Along_Swath_5km:mod35', 'Cell_Across_Swath_5km:mod35']}},
+ }
+
+
+def _get_angles_variable_info(resolution: int) -> dict:
+ angle_data = _generate_angle_data(resolution)
+ dim_factor = RES_TO_REPEAT_FACTOR[resolution] * 2
+ angle_info = {
+ 'data': angle_data,
+ 'type': SDC.INT16,
+ 'fill_value': -32767,
+ 'attrs': {
+ 'dim_labels': [
+ f'{dim_factor}*nscans:MODIS_SWATH_Type_L1B',
+ '1KM_geo_dim:MODIS_SWATH_Type_L1B'],
+ 'scale_factor': 0.01,
+ 'add_offset': -0.01,
+ },
+ }
+ angles_info = {}
+ for var_name in ('SensorAzimuth', 'SensorZenith', 'SolarAzimuth', 'SolarZenith'):
+ angles_info[var_name] = angle_info
+ return angles_info
+
+
+def _get_visible_variable_info(var_name: str, resolution: int, bands: list[str]):
+ num_bands = len(bands)
+ data = _generate_visible_data(resolution, len(bands))
+ uncertainty = _generate_visible_uncertainty_data(data.shape)
+ dim_factor = RES_TO_REPEAT_FACTOR[resolution] * 2
+ band_dim_name = f"Band_{resolution}_{num_bands}_RefSB:MODIS_SWATH_Type_L1B"
+ row_dim_name = f'{dim_factor}*nscans:MODIS_SWATH_Type_L1B'
+ col_dim_name = 'Max_EV_frames:MODIS_SWATH_Type_L1B'
+ return {
+ var_name: {
+ 'data': data,
+ 'type': SDC.UINT16,
+ 'fill_value': 0,
+ 'attrs': {
+ # dim_labels are just unique dimension names, may not match exactly with real world files
+ 'dim_labels': [band_dim_name,
+ row_dim_name,
+ col_dim_name],
+ 'valid_range': (0, 32767),
+ 'reflectance_scales': (2.0,) * num_bands,
+ 'reflectance_offsets': (-0.5,) * num_bands,
+ 'band_names': ",".join(bands),
+ },
+ },
+ var_name + '_Uncert_Indexes': {
+ 'data': uncertainty,
+ 'type': SDC.UINT8,
+ 'fill_value': 255,
+ 'attrs': {
+ 'dim_labels': [band_dim_name,
+ row_dim_name,
+ col_dim_name],
+ },
+ },
+ }
+
+
+def _get_emissive_variable_info(var_name: str, resolution: int, bands: list[str]):
+ num_bands = len(bands)
+ data = _generate_visible_data(resolution, len(bands))
+ dim_factor = RES_TO_REPEAT_FACTOR[resolution] * 2
+ band_dim_name = f"Band_{resolution}_{num_bands}_Emissive:MODIS_SWATH_Type_L1B"
+ row_dim_name = f'{dim_factor}*nscans:MODIS_SWATH_Type_L1B'
+ col_dim_name = 'Max_EV_frames:MODIS_SWATH_Type_L1B'
+ return {
+ var_name: {
+ 'data': data,
+ 'type': SDC.UINT16,
+ 'fill_value': 0,
+ 'attrs': {
+ 'dim_labels': [band_dim_name,
+ row_dim_name,
+ col_dim_name],
+ 'valid_range': (0, 32767),
+ 'band_names': ",".join(bands),
+ },
+ },
+ var_name + '_Uncert_Indexes': {
+ 'data': np.zeros(data.shape, dtype=np.uint8),
+ 'type': SDC.UINT8,
+ 'fill_value': 255,
+ 'attrs': {
+ 'dim_labels': [band_dim_name,
+ row_dim_name,
+ col_dim_name],
+ },
+ },
+ }
+
+
+def _get_l1b_geo_variable_info(filename: str,
+ geo_resolution: int,
+ include_angles: bool = True
+ ) -> dict:
+ variables_info = {}
+ variables_info.update(_get_lonlat_variable_info(geo_resolution))
+ if include_angles:
+ variables_info.update(_get_angles_variable_info(geo_resolution))
+ return variables_info
+
+
+def generate_nasa_l1b_filename(prefix):
+ """Generate a filename that follows NASA MODIS L1b convention."""
+ now = datetime.now()
+ return f'{prefix}_A{now:%y%j_%H%M%S}_{now:%Y%j%H%M%S}.hdf'
+
+
+def generate_imapp_filename(suffix):
+ """Generate a filename that follows IMAPP MODIS L1b convention."""
+ now = datetime.now()
+ return f't1.{now:%y%j.%H%M}.{suffix}.hdf'
+
+
+def create_hdfeos_test_file(filename: str,
+ variable_infos: dict,
+ geo_resolution: Optional[int] = None,
+ file_shortname: Optional[str] = None,
+ include_metadata: bool = True):
+ """Create a fake MODIS L1b HDF4 file with headers.
+
+ Args:
+ filename: Full path of filename to be created.
+ variable_infos: Dictionary mapping HDF4 variable names to dictionary
+ of variable information (see ``_add_variable_to_file``).
+ geo_resolution: Resolution of geolocation datasets to be stored in the
+ metadata strings stored in the global metadata attributes. Only
+ used if ``include_metadata`` is ``True`` (default).
+ file_shortname: Short name of the file to be stored in global metadata
+ attributes. Only used if ``include_metadata`` is ``True``
+ (default).
+ include_metadata: Include global metadata attributes (default: True).
+
+ """
+ h = SD(filename, SDC.WRITE | SDC.CREATE)
+
+ if include_metadata:
+ if geo_resolution is None or file_shortname is None:
+ raise ValueError("'geo_resolution' and 'file_shortname' are required when including metadata.")
+ setattr(h, 'CoreMetadata.0', _create_core_metadata(file_shortname)) # noqa
+ setattr(h, 'StructMetadata.0', _create_struct_metadata(geo_resolution)) # noqa
+ setattr(h, 'ArchiveMetadata.0', _create_header_metadata()) # noqa
+
+ for var_name, var_info in variable_infos.items():
+ _add_variable_to_file(h, var_name, var_info)
+
+ h.end()
+
+
+def _add_variable_to_file(h, var_name, var_info):
+ v = h.create(var_name, var_info['type'], var_info['data'].shape)
+ v[:] = var_info['data']
+ dim_count = 0
+ for dimension_name in var_info['attrs']['dim_labels']:
+ v.dim(dim_count).setname(dimension_name)
+ dim_count += 1
+ v.setfillvalue(var_info['fill_value'])
+ v.scale_factor = var_info['attrs'].get('scale_factor', SCALE_FACTOR)
+ v.add_offset = var_info['attrs'].get('add_offset', ADD_OFFSET)
+ for attr_key, attr_val in var_info['attrs'].items():
+ if attr_key == 'dim_labels':
+ continue
+ setattr(v, attr_key, attr_val)
+
+
+def _create_core_metadata(file_shortname: str) -> str:
+ beginning_date = datetime.now()
+ ending_date = beginning_date + timedelta(minutes=5)
+ core_metadata_header = "GROUP = INVENTORYMETADATA\nGROUPTYPE = MASTERGROUP\n\n" \
+ "GROUP = RANGEDATETIME\n\nOBJECT = RANGEBEGINNINGDATE\nNUM_VAL = 1\nVALUE = \"{}\"\n" \
+ "END_OBJECT = RANGEBEGINNINGDATE\n\nOBJECT = RANGEBEGINNINGTIME\n" \
+ "NUM_VAL = 1\nVALUE = \"{}\"\n" \
+ "END_OBJECT = RANGEBEGINNINGTIME\n\nOBJECT = RANGEENDINGDATE\n" \
+ "NUM_VAL = 1\nVALUE = \"{}\"\n" \
+ "END_OBJECT = RANGEENDINGDATE\n\nOBJECT = RANGEENDINGTIME\nNUM_VAL = 1\nVALUE = \"{}\"\n" \
+ "END_OBJECT = RANGEENDINGTIME\nEND_GROUP = RANGEDATETIME"
+ core_metadata_header = core_metadata_header.format(
+ beginning_date.strftime("%Y-%m-%d"),
+ beginning_date.strftime("%H:%M:%S.%f"),
+ ending_date.strftime("%Y-%m-%d"),
+ ending_date.strftime("%H:%M:%S.%f")
+ )
+ inst_metadata = "GROUP = ASSOCIATEDPLATFORMINSTRUMENTSENSOR\n\n" \
+ "OBJECT = ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER\nCLASS = \"1\"\n\n" \
+ "OBJECT = ASSOCIATEDSENSORSHORTNAME\nCLASS = \"1\"\nNUM_VAL = 1\n" \
+ "VALUE = \"MODIS\"\nEND_OBJECT = ASSOCIATEDSENSORSHORTNAME\n\n" \
+ "OBJECT = ASSOCIATEDPLATFORMSHORTNAME\nCLASS = \"1\"\nNUM_VAL = 1\n" \
+ "VALUE = \"Terra\"\nEND_OBJECT = ASSOCIATEDPLATFORMSHORTNAME\n\n" \
+ "OBJECT = ASSOCIATEDINSTRUMENTSHORTNAME\nCLASS = \"1\"\nNUM_VAL = 1\n" \
+ "VALUE = \"MODIS\"\nEND_OBJECT = ASSOCIATEDINSTRUMENTSHORTNAME\n\n" \
+ "END_OBJECT = ASSOCIATEDPLATFORMINSTRUMENTSENSORCONTAINER\n\n" \
+ "END_GROUP = ASSOCIATEDPLATFORMINSTRUMENTSENSOR\n\n"
+ collection_metadata = "GROUP = COLLECTIONDESCRIPTIONCLASS\n\nOBJECT = SHORTNAME\nNUM_VAL = 1\n" \
+ f"VALUE = \"{file_shortname}\"\nEND_OBJECT = SHORTNAME\n\n" \
+ "OBJECT = VERSIONID\nNUM_VAL = 1\nVALUE = 6\nEND_OBJECT = VERSIONID\n\n" \
+ "END_GROUP = COLLECTIONDESCRIPTIONCLASS\n\n"
+ core_metadata_header += "\n\n" + inst_metadata + collection_metadata
+ return core_metadata_header
+
+
+def _create_struct_metadata(geo_resolution: int) -> str:
+ geo_dim_factor = RES_TO_REPEAT_FACTOR[geo_resolution] * 2
+ struct_metadata_header = "GROUP=SwathStructure\n" \
+ "GROUP=SWATH_1\n" \
+ "GROUP=DimensionMap\n" \
+ "OBJECT=DimensionMap_2\n" \
+ f"GeoDimension=\"{geo_dim_factor}*nscans\"\n" \
+ "END_OBJECT=DimensionMap_2\n" \
+ "END_GROUP=DimensionMap\n" \
+ "END_GROUP=SWATH_1\n" \
+ "END_GROUP=SwathStructure\nEND"
+ return struct_metadata_header
+
+
+def _create_header_metadata() -> str:
+ archive_metadata_header = "GROUP = ARCHIVEDMETADATA\nEND_GROUP = ARCHIVEDMETADATA\nEND"
+ return archive_metadata_header
+
+
+@pytest.fixture(scope="session")
+def modis_l1b_nasa_mod021km_file(tmpdir_factory) -> list[str]:
+ """Create a single MOD021KM file following standard NASA file scheme."""
+ filename = generate_nasa_l1b_filename("MOD021km")
+ full_path = str(tmpdir_factory.mktemp("modis_l1b").join(filename))
+ variable_infos = _get_l1b_geo_variable_info(filename, 5000, include_angles=True)
+ variable_infos.update(_get_visible_variable_info("EV_1KM_RefSB", 1000, AVAILABLE_1KM_VIS_PRODUCT_NAMES))
+ variable_infos.update(_get_visible_variable_info("EV_500_Aggr1km_RefSB", 1000, AVAILABLE_HKM_PRODUCT_NAMES))
+ variable_infos.update(_get_visible_variable_info("EV_250_Aggr1km_RefSB", 1000, AVAILABLE_QKM_PRODUCT_NAMES))
+ variable_infos.update(_get_emissive_variable_info("EV_1KM_Emissive", 1000, AVAILABLE_1KM_IR_PRODUCT_NAMES))
+ create_hdfeos_test_file(full_path, variable_infos, geo_resolution=5000, file_shortname="MOD021KM")
+ return [full_path]
+
+
+@pytest.fixture(scope="session")
+def modis_l1b_imapp_1000m_file(tmpdir_factory) -> list[str]:
+ """Create a single MOD021KM file following IMAPP file scheme."""
+ filename = generate_imapp_filename("1000m")
+ full_path = str(tmpdir_factory.mktemp("modis_l1b").join(filename))
+ variable_infos = _get_l1b_geo_variable_info(filename, 5000, include_angles=True)
+ variable_infos.update(_get_visible_variable_info("EV_1KM_RefSB", 1000, AVAILABLE_1KM_VIS_PRODUCT_NAMES))
+ variable_infos.update(_get_visible_variable_info("EV_500_Aggr1km_RefSB", 1000, AVAILABLE_HKM_PRODUCT_NAMES))
+ variable_infos.update(_get_visible_variable_info("EV_250_Aggr1km_RefSB", 1000, AVAILABLE_QKM_PRODUCT_NAMES))
+ variable_infos.update(_get_emissive_variable_info("EV_1KM_Emissive", 1000, AVAILABLE_1KM_IR_PRODUCT_NAMES))
+ create_hdfeos_test_file(full_path, variable_infos, geo_resolution=5000, file_shortname="MOD021KM")
+ return [full_path]
+
+
+@pytest.fixture(scope="session")
+def modis_l1b_nasa_mod02hkm_file(tmpdir_factory) -> list[str]:
+ """Create a single MOD02HKM file following standard NASA file scheme."""
+ filename = generate_nasa_l1b_filename("MOD02Hkm")
+ full_path = str(tmpdir_factory.mktemp("modis_l1b").join(filename))
+ variable_infos = _get_l1b_geo_variable_info(filename, 1000, include_angles=False)
+ variable_infos.update(_get_visible_variable_info("EV_500_RefSB", 250, AVAILABLE_QKM_PRODUCT_NAMES))
+ create_hdfeos_test_file(full_path, variable_infos, geo_resolution=1000, file_shortname="MOD02HKM")
+ return [full_path]
+
+
+@pytest.fixture
+def modis_l1b_nasa_mod02qkm_file(tmpdir_factory) -> list[str]:
+ """Create a single MOD02QKM file following standard NASA file scheme."""
+ filename = generate_nasa_l1b_filename("MOD02Qkm")
+ full_path = str(tmpdir_factory.mktemp("modis_l1b").join(filename))
+ variable_infos = _get_l1b_geo_variable_info(filename, 1000, include_angles=False)
+ variable_infos.update(_get_visible_variable_info("EV_250_RefSB", 250, AVAILABLE_QKM_PRODUCT_NAMES))
+ create_hdfeos_test_file(full_path, variable_infos, geo_resolution=1000, file_shortname="MOD02QKM")
+ return [full_path]
+
+
+@pytest.fixture(scope="session")
+def modis_l1b_nasa_mod03_file(tmpdir_factory) -> list[str]:
+ """Create a single MOD03 file following standard NASA file scheme."""
+ filename = generate_nasa_l1b_filename("MOD03")
+ full_path = str(tmpdir_factory.mktemp("modis_l1b").join(filename))
+ variable_infos = _get_l1b_geo_variable_info(filename, 1000, include_angles=True)
+ create_hdfeos_test_file(full_path, variable_infos, geo_resolution=1000, file_shortname="MOD03")
+ return [full_path]
+
+
+@pytest.fixture(scope="session")
+def modis_l1b_imapp_geo_file(tmpdir_factory) -> list[str]:
+ """Create a single geo file following standard IMAPP file scheme."""
+ filename = generate_imapp_filename("geo")
+ full_path = str(tmpdir_factory.mktemp("modis_l1b").join(filename))
+ variable_infos = _get_l1b_geo_variable_info(filename, 1000, include_angles=True)
+ create_hdfeos_test_file(full_path, variable_infos, geo_resolution=1000, file_shortname="MOD03")
+ return [full_path]
+
+
+@pytest.fixture(scope="session")
+def modis_l1b_nasa_1km_mod03_files(modis_l1b_nasa_mod021km_file, modis_l1b_nasa_mod03_file) -> list[str]:
+ """Create input files including the 1KM and MOD03 files."""
+ return modis_l1b_nasa_mod021km_file + modis_l1b_nasa_mod03_file
+
+
+# Level 2 Fixtures
+
+
+def _get_basic_variable_info(var_name: str, resolution: int) -> dict:
+ shape = _shape_for_resolution(resolution)
+ data = np.ones((shape[0], shape[1]), dtype=np.uint16)
+ row_dim_name = f'Cell_Along_Swath_{resolution}m:modl2'
+ col_dim_name = f'Cell_Across_Swath_{resolution}m:modl2'
+ return {
+ var_name: {
+ 'data': data,
+ 'type': SDC.UINT16,
+ 'fill_value': 0,
+ 'attrs': {
+ # dim_labels are just unique dimension names, may not match exactly with real world files
+ 'dim_labels': [row_dim_name,
+ col_dim_name],
+ 'valid_range': (0, 32767),
+ 'scale_factor': 2.0,
+ 'add_offset': -1.0,
+ },
+ },
+ }
+
+
+def _get_cloud_mask_variable_info(var_name: str, resolution: int) -> dict:
+ num_bytes = 6
+ shape = _shape_for_resolution(resolution)
+ data = np.zeros((num_bytes, shape[0], shape[1]), dtype=np.int8)
+ byte_dim_name = "Byte_Segment:mod35"
+ row_dim_name = 'Cell_Along_Swath_1km:mod35'
+ col_dim_name = 'Cell_Across_Swath_1km:mod35'
+ return {
+ var_name: {
+ 'data': data,
+ 'type': SDC.INT8,
+ 'fill_value': 0,
+ 'attrs': {
+ # dim_labels are just unique dimension names, may not match exactly with real world files
+ 'dim_labels': [byte_dim_name,
+ row_dim_name,
+ col_dim_name],
+ 'valid_range': (0, -1),
+ 'scale_factor': 1.,
+ 'add_offset': 0.,
+ },
+ },
+ 'Quality_Assurance': {
+ 'data': np.ones((shape[0], shape[1], 10), dtype=np.int8),
+ 'type': SDC.INT8,
+ 'fill_value': 0,
+ 'attrs': {
+ # dim_labels are just unique dimension names, may not match exactly with real world files
+ 'dim_labels': [row_dim_name,
+ col_dim_name,
+ 'Quality_Dimension:mod35'],
+ 'valid_range': (0, -1),
+ 'scale_factor': 2.,
+ 'add_offset': -0.5,
+ },
+ },
+ }
+
+
+def _get_mask_byte1_variable_info() -> dict:
+ shape = _shape_for_resolution(1000)
+ data = np.zeros((shape[0], shape[1]), dtype=np.uint16)
+ row_dim_name = 'Cell_Along_Swath_1km:mod35'
+ col_dim_name = 'Cell_Across_Swath_1km:mod35'
+ return {
+ "MODIS_Cloud_Mask": {
+ 'data': data,
+ 'type': SDC.UINT16,
+ 'fill_value': 9999,
+ 'attrs': {
+ # dim_labels are just unique dimension names, may not match exactly with real world files
+ 'dim_labels': [row_dim_name,
+ col_dim_name],
+ 'valid_range': (0, 4),
+ 'scale_factor': 2,
+ 'add_offset': -1,
+ },
+
+ },
+ "MODIS_Simple_LandSea_Mask": {
+ 'data': data,
+ 'type': SDC.UINT16,
+ 'fill_value': 9999,
+ 'attrs': {
+ # dim_labels are just unique dimension names, may not match exactly with real world files
+ 'dim_labels': [row_dim_name,
+ col_dim_name],
+ 'valid_range': (0, 4),
+ 'scale_factor': 2,
+ 'add_offset': -1,
+ },
+ },
+ "MODIS_Snow_Ice_Flag": {
+ 'data': data,
+ 'type': SDC.UINT16,
+ 'fill_value': 9999,
+ 'attrs': {
+ # dim_labels are just unique dimension names, may not match exactly with real world files
+ 'dim_labels': [row_dim_name,
+ col_dim_name],
+ 'valid_range': (0, 2),
+ 'scale_factor': 2,
+ 'add_offset': -1,
+ },
+ },
+ }
+
+
+def generate_nasa_l2_filename(prefix: str) -> str:
+ """Generate a file name that follows MODIS 35 L2 convention in a temporary directory."""
+ now = datetime.now()
+ return f'{prefix}_L2.A{now:%Y%j.%H%M}.061.{now:%Y%j%H%M%S}.hdf'
+
+
+@pytest.fixture(scope="session")
+def modis_l2_nasa_mod35_file(tmpdir_factory) -> list[str]:
+ """Create a single MOD35 L2 HDF4 file with headers."""
+ filename = generate_nasa_l2_filename("MOD35")
+ full_path = str(tmpdir_factory.mktemp("modis_l2").join(filename))
+ variable_infos = _get_l1b_geo_variable_info(filename, 5000, include_angles=True)
+ variable_infos.update(_get_cloud_mask_variable_info("Cloud_Mask", 1000))
+ create_hdfeos_test_file(full_path, variable_infos, geo_resolution=5000, file_shortname="MOD35")
+ return [full_path]
+
+
+@pytest.fixture(scope="session")
+def modis_l2_nasa_mod35_mod03_files(modis_l2_nasa_mod35_file, modis_l1b_nasa_mod03_file) -> list[str]:
+ """Create a MOD35 L2 HDF4 file and MOD03 L1b geolocation file."""
+ return modis_l2_nasa_mod35_file + modis_l1b_nasa_mod03_file
+
+
+@pytest.fixture(scope="session")
+def modis_l2_nasa_mod06_file(tmpdir_factory) -> list[str]:
+ """Create a single MOD06 L2 HDF4 file with headers."""
+ filename = generate_nasa_l2_filename("MOD06")
+ full_path = str(tmpdir_factory.mktemp("modis_l2").join(filename))
+ variable_infos = _get_l1b_geo_variable_info(filename, 5000, include_angles=True)
+ variable_infos.update(_get_basic_variable_info("Surface_Pressure", 5000))
+ create_hdfeos_test_file(full_path, variable_infos, geo_resolution=5000, file_shortname="MOD06")
+ return [full_path]
+
+
+@pytest.fixture(scope="session")
+def modis_l2_imapp_snowmask_file(tmpdir_factory) -> list[str]:
+ """Create a single IMAPP snowmask L2 HDF4 file with headers."""
+ filename = generate_imapp_filename("snowmask")
+ full_path = str(tmpdir_factory.mktemp("modis_l2").join(filename))
+ variable_infos = _get_l1b_geo_variable_info(filename, 5000, include_angles=False)
+ variable_infos.update(_get_basic_variable_info("Snow_Mask", 1000))
+ create_hdfeos_test_file(full_path, variable_infos, include_metadata=False)
+ return [full_path]
+
+
+@pytest.fixture(scope="session")
+def modis_l2_imapp_snowmask_geo_files(modis_l2_imapp_snowmask_file, modis_l1b_nasa_mod03_file) -> list[str]:
+ """Create the IMAPP snowmask and geo HDF4 files."""
+ return modis_l2_imapp_snowmask_file + modis_l1b_nasa_mod03_file
+
+
+@pytest.fixture(scope="session")
+def modis_l2_imapp_mask_byte1_file(tmpdir_factory) -> list[str]:
+ """Create a single IMAPP mask_byte1 L2 HDF4 file with headers."""
+ filename = generate_imapp_filename("mask_byte1")
+ full_path = str(tmpdir_factory.mktemp("modis_l2").join(filename))
+ variable_infos = _get_l1b_geo_variable_info(filename, 5000, include_angles=False)
+ variable_infos.update(_get_mask_byte1_variable_info())
+ create_hdfeos_test_file(full_path, variable_infos, include_metadata=False)
+ return [full_path]
+
+
+@pytest.fixture(scope="session")
+def modis_l2_imapp_mask_byte1_geo_files(modis_l2_imapp_mask_byte1_file, modis_l1b_nasa_mod03_file) -> list[str]:
+ """Create the IMAPP mask_byte1 and geo HDF4 files."""
+ return modis_l2_imapp_mask_byte1_file + modis_l1b_nasa_mod03_file
diff --git a/satpy/tests/reader_tests/conftest.py b/satpy/tests/reader_tests/conftest.py
new file mode 100644
index 0000000000..ca9e3fc66e
--- /dev/null
+++ b/satpy/tests/reader_tests/conftest.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Setup and configuration for all reader tests."""
+
+from ._modis_fixtures import (
+ modis_l1b_imapp_1000m_file,
+ modis_l1b_imapp_geo_file,
+ modis_l1b_nasa_1km_mod03_files,
+ modis_l1b_nasa_mod02hkm_file,
+ modis_l1b_nasa_mod02qkm_file,
+ modis_l1b_nasa_mod03_file,
+ modis_l1b_nasa_mod021km_file,
+ modis_l2_imapp_mask_byte1_file,
+ modis_l2_imapp_mask_byte1_geo_files,
+ modis_l2_imapp_snowmask_file,
+ modis_l2_imapp_snowmask_geo_files,
+ modis_l2_nasa_mod06_file,
+ modis_l2_nasa_mod35_file,
+ modis_l2_nasa_mod35_mod03_files,
+)
diff --git a/satpy/tests/reader_tests/test_aapp_l1b.py b/satpy/tests/reader_tests/test_aapp_l1b.py
index 7475ec2086..c0f84c5a63 100644
--- a/satpy/tests/reader_tests/test_aapp_l1b.py
+++ b/satpy/tests/reader_tests/test_aapp_l1b.py
@@ -22,6 +22,7 @@
import tempfile
import unittest
from contextlib import suppress
+from unittest import mock
import numpy as np
@@ -105,12 +106,12 @@ def test_read(self):
for name in ['1', '2', '3a']:
key = make_dataid(name=name, calibration='reflectance')
res = fh.get_dataset(key, info)
- assert(res.min() == 0)
- assert(res.max() >= 100)
+ assert res.min() == 0
+ assert res.max() >= 100
mins.append(res.min().values)
maxs.append(res.max().values)
if name == '3a':
- assert(np.all(np.isnan(res[:2, :])))
+ assert np.all(np.isnan(res[:2, :]))
for name in ['3b', '4', '5']:
key = make_dataid(name=name, calibration='reflectance')
@@ -118,7 +119,7 @@ def test_read(self):
mins.append(res.min().values)
maxs.append(res.max().values)
if name == '3b':
- assert(np.all(np.isnan(res[2:, :])))
+ assert np.all(np.isnan(res[2:, :]))
np.testing.assert_allclose(mins, [0., 0., 0., 204.10106939, 103.23477235, 106.42609758])
np.testing.assert_allclose(maxs, [108.40391775, 107.68545158, 106.80061233,
@@ -135,7 +136,7 @@ def test_angles(self):
info = {}
key = make_dataid(name='solar_zenith_angle')
res = fh.get_dataset(key, info)
- assert(np.all(res == 0))
+ assert np.all(res == 0)
def test_navigation(self):
"""Test reading the lon and lats."""
@@ -148,10 +149,135 @@ def test_navigation(self):
info = {}
key = make_dataid(name='longitude')
res = fh.get_dataset(key, info)
- assert(np.all(res == 0))
+ assert np.all(res == 0)
key = make_dataid(name='latitude')
res = fh.get_dataset(key, info)
- assert(np.all(res == 0))
+ assert np.all(res == 0)
+
+ def test_interpolation(self):
+ """Test reading the lon and lats."""
+ with tempfile.TemporaryFile() as tmpfile:
+ self._header.tofile(tmpfile)
+ tmpfile.seek(22016, 0)
+ self._data.tofile(tmpfile)
+ fh = AVHRRAAPPL1BFile(tmpfile, self.filename_info, self.filetype_info)
+ lons40km = np.array([
+ [-115.9773, -122.3054, -127.7482, -132.464, -136.5788, -140.1951,
+ -143.3961, -146.2497, -148.8112, -151.1259, -153.2309, -155.1568,
+ -156.9291, -158.5689, -160.0941, -161.5196, -162.8584, -164.1212,
+ -165.3176, -166.4557, -167.5426, -168.5846, -169.5872, -170.5555,
+ -171.4937, -172.406, -173.296, -174.1671, -175.0224, -175.865,
+ -176.6976, -177.523, -178.3439, -179.1628, -179.9825, 179.1944,
+ 178.3651, 177.5267, 176.6761, 175.8098, 174.9242, 174.0149,
+ 173.0773, 172.1057, 171.0935, 170.0326, 168.9128, 167.7211,
+ 166.4397, 165.0436, 163.4946],
+ [-115.9639, -122.2967, -127.7441, -132.4639, -136.5824, -140.2018,
+ -143.4055, -146.2614, -148.8249, -151.1413, -153.2478, -155.175,
+ -156.9484, -158.5892, -160.1152, -161.5415, -162.8809, -164.1443,
+ -165.3412, -166.4797, -167.567, -168.6094, -169.6123, -170.5808,
+ -171.5192, -172.4317, -173.3219, -174.1931, -175.0486, -175.8913,
+ -176.724, -177.5494, -178.3703, -179.1893, 179.991, 179.168,
+ 178.3388, 177.5005, 176.6499, 175.7838, 174.8983, 173.9892,
+ 173.0518, 172.0805, 171.0685, 170.0079, 168.8885, 167.6972,
+ 166.4164, 165.0209, 163.4726],
+ [-115.9504, -122.288, -127.7399, -132.4639, -136.5859, -140.2084,
+ -143.4148, -146.2731, -148.8386, -151.1567, -153.2647, -155.1932,
+ -156.9677, -158.6095, -160.1363, -161.5634, -162.9034, -164.1674,
+ -165.3648, -166.5038, -167.5915, -168.6341, -169.6374, -170.6061,
+ -171.5448, -172.4575, -173.3478, -174.2192, -175.0748, -175.9176,
+ -176.7503, -177.5758, -178.3968, -179.2157, 179.9646, 179.1416,
+ 178.3124, 177.4742, 176.6238, 175.7577, 174.8724, 173.9635,
+ 173.0263, 172.0552, 171.0436, 169.9833, 168.8643, 167.6734,
+ 166.3931, 164.9982, 163.4507]])
+ lats40km = np.array([
+ [78.6613, 78.9471, 79.0802, 79.1163, 79.0889, 79.019, 78.9202,
+ 78.8016, 78.6695, 78.528, 78.38, 78.2276, 78.0721, 77.9145,
+ 77.7553, 77.5949, 77.4335, 77.2712, 77.1079, 76.9435, 76.7779,
+ 76.6108, 76.4419, 76.2708, 76.0973, 75.921, 75.7412, 75.5576,
+ 75.3696, 75.1764, 74.9776, 74.7721, 74.5592, 74.3379, 74.1069,
+ 73.865, 73.6106, 73.342, 73.057, 72.7531, 72.4273, 72.076,
+ 71.6945, 71.2773, 70.8171, 70.3046, 69.7272, 69.0676, 68.3014,
+ 67.3914, 66.2778],
+ [78.6703, 78.9565, 79.0897, 79.1259, 79.0985, 79.0286, 78.9297,
+ 78.8111, 78.6789, 78.5373, 78.3892, 78.2367, 78.0811, 77.9233,
+ 77.764, 77.6035, 77.442, 77.2796, 77.1162, 76.9518, 76.7861,
+ 76.6188, 76.4498, 76.2787, 76.1051, 75.9287, 75.7488, 75.5651,
+ 75.377, 75.1838, 74.9848, 74.7793, 74.5663, 74.3448, 74.1138,
+ 73.8718, 73.6173, 73.3486, 73.0635, 72.7595, 72.4336, 72.0821,
+ 71.7005, 71.2832, 70.8229, 70.3102, 69.7326, 69.0729, 68.3065,
+ 67.3963, 66.2825],
+ [78.6794, 78.9658, 79.0993, 79.1355, 79.1082, 79.0381, 78.9392,
+ 78.8205, 78.6882, 78.5465, 78.3984, 78.2458, 78.0901, 77.9322,
+ 77.7728, 77.6122, 77.4506, 77.2881, 77.1246, 76.96, 76.7942,
+ 76.6269, 76.4578, 76.2866, 76.1129, 75.9364, 75.7564, 75.5727,
+ 75.3844, 75.1911, 74.9921, 74.7864, 74.5734, 74.3518, 74.1207,
+ 73.8786, 73.624, 73.3552, 73.0699, 72.7658, 72.4398, 72.0882,
+ 71.7065, 71.2891, 70.8286, 70.3158, 69.7381, 69.0782, 68.3116,
+ 67.4012, 66.2872]])
+ fh._get_coordinates_in_degrees = mock.MagicMock()
+ fh._get_coordinates_in_degrees.return_value = (lons40km, lats40km)
+ (lons, lats) = fh._get_all_interpolated_coordinates()
+ lon_data = lons.compute()
+ self.assertTrue(np.max(lon_data) <= 180)
+ # Not longitdes between -110, 110 in indata
+ self.assertTrue(np.all(np.abs(lon_data) > 110))
+
+ def test_interpolation_angles(self):
+ """Test reading the lon and lats."""
+ with tempfile.TemporaryFile() as tmpfile:
+ self._header.tofile(tmpfile)
+ tmpfile.seek(22016, 0)
+ self._data.tofile(tmpfile)
+ fh = AVHRRAAPPL1BFile(tmpfile, self.filename_info, self.filetype_info)
+
+ sunz40km = np.array(
+ [[122.42, 121.72, 121.14, 120.63, 120.19, 119.79, 119.43, 119.1, 118.79, 118.51,
+ 118.24, 117.99, 117.76, 117.53, 117.31, 117.1, 116.9, 116.71, 116.52, 116.33,
+ 116.15, 115.97, 115.79, 115.61, 115.44, 115.26, 115.08, 114.91, 114.73, 114.55,
+ 114.36, 114.18, 113.98, 113.79, 113.58, 113.37, 113.15, 112.92, 112.68, 112.43,
+ 112.15, 111.87, 111.55, 111.22, 110.85, 110.44, 109.99, 109.47, 108.88, 108.18,
+ 107.33],
+ [122.41, 121.71, 121.13, 120.62, 120.18, 119.78, 119.42, 119.09, 118.78, 118.5,
+ 118.24, 117.99, 117.75, 117.52, 117.31, 117.1, 116.9, 116.7, 116.51, 116.32,
+ 116.14, 115.96, 115.78, 115.6, 115.43, 115.25, 115.08, 114.9, 114.72, 114.54,
+ 114.36, 114.17, 113.98, 113.78, 113.57, 113.36, 113.14, 112.91, 112.67, 112.42,
+ 112.15, 111.86, 111.55, 111.21, 110.84, 110.43, 109.98, 109.46, 108.87, 108.17,
+ 107.32]])
+ satz40km = np.array(
+ [[6.623e+01, 6.281e+01, 5.960e+01, 5.655e+01, 5.360e+01, 5.075e+01, 4.797e+01,
+ 4.524e+01, 4.256e+01, 3.992e+01, 3.731e+01, 3.472e+01, 3.216e+01, 2.962e+01,
+ 2.710e+01, 2.460e+01, 2.210e+01, 1.962e+01, 1.714e+01, 1.467e+01, 1.221e+01,
+ 9.760e+00, 7.310e+00, 4.860e+00, 2.410e+00, 3.000e-02, 2.470e+00, 4.920e+00,
+ 7.370e+00, 9.820e+00, 1.227e+01, 1.474e+01, 1.720e+01, 1.968e+01, 2.216e+01,
+ 2.466e+01, 2.717e+01, 2.969e+01, 3.223e+01, 3.479e+01, 3.737e+01, 3.998e+01,
+ 4.263e+01, 4.531e+01, 4.804e+01, 5.082e+01, 5.368e+01, 5.662e+01, 5.969e+01,
+ 6.290e+01, 6.633e+01],
+ [6.623e+01, 6.281e+01, 5.960e+01, 5.655e+01, 5.360e+01, 5.075e+01, 4.797e+01,
+ 4.524e+01, 4.256e+01, 3.992e+01, 3.731e+01, 3.472e+01, 3.216e+01, 2.962e+01,
+ 2.710e+01, 2.460e+01, 2.210e+01, 1.962e+01, 1.714e+01, 1.467e+01, 1.221e+01,
+ 9.760e+00, 7.310e+00, 4.860e+00, 2.410e+00, 3.000e-02, 2.470e+00, 4.920e+00,
+ 7.370e+00, 9.820e+00, 1.227e+01, 1.474e+01, 1.720e+01, 1.968e+01, 2.216e+01,
+ 2.466e+01, 2.717e+01, 2.969e+01, 3.223e+01, 3.479e+01, 3.737e+01, 3.998e+01,
+ 4.263e+01, 4.531e+01, 4.804e+01, 5.082e+01, 5.368e+01, 5.662e+01, 5.969e+01,
+ 6.290e+01, 6.633e+01]])
+ azidiff40km = np.array([
+ [56.9, 56.24, 55.71, 55.27, 54.9, 54.57, 54.29, 54.03, 53.8, 53.59,
+ 53.4, 53.22, 53.05, 52.89, 52.74, 52.6, 52.47, 52.34, 52.22, 52.1,
+ 51.98, 51.87, 51.76, 51.65, 51.55, 128.55, 128.65, 128.76, 128.86, 128.96,
+ 129.07, 129.17, 129.27, 129.38, 129.49, 129.6, 129.72, 129.83, 129.95, 130.08,
+ 130.21, 130.35, 130.5, 130.65, 130.81, 130.99, 131.18, 131.39, 131.63, 131.89,
+ 132.19],
+ [56.9, 56.24, 55.72, 55.28, 54.9, 54.58, 54.29, 54.03, 53.8, 53.59,
+ 53.4, 53.22, 53.05, 52.89, 52.75, 52.6, 52.47, 52.34, 52.22, 52.1,
+ 51.98, 51.87, 51.76, 51.65, 51.55, 128.55, 128.65, 128.75, 128.86, 128.96,
+ 129.06, 129.17, 129.27, 129.38, 129.49, 129.6, 129.71, 129.83, 129.95, 130.08,
+ 130.21, 130.35, 130.49, 130.65, 130.81, 130.99, 131.18, 131.39, 131.62, 131.89,
+ 132.19]])
+ fh._get_tiepoint_angles_in_degrees = mock.MagicMock()
+ fh._get_tiepoint_angles_in_degrees.return_value = (sunz40km, satz40km, azidiff40km)
+ (sunz, satz, azidiff) = fh._get_all_interpolated_angles()
+ self.assertTrue(np.max(sunz) <= 123)
+ self.assertTrue(np.max(satz) <= 70)
class TestAAPPL1BChannel3AMissing(unittest.TestCase):
@@ -257,7 +383,7 @@ class TestNegativeCalibrationSlope(unittest.TestCase):
def setUp(self):
"""Set up the test case."""
- from satpy.readers.aapp_l1b import _SCANTYPE, _HEADERTYPE
+ from satpy.readers.aapp_l1b import _HEADERTYPE, _SCANTYPE
calvis = np.array([[[617200000, -24330000, 1840000000, -632800000, 498], # calvis
[0, 0, 0, 0, 0],
[540000000, -21300002, 1610000000, -553699968, 501]],
diff --git a/satpy/tests/reader_tests/test_aapp_mhs_amsub_l1c.py b/satpy/tests/reader_tests/test_aapp_mhs_amsub_l1c.py
new file mode 100644
index 0000000000..4a981c7c3e
--- /dev/null
+++ b/satpy/tests/reader_tests/test_aapp_mhs_amsub_l1c.py
@@ -0,0 +1,500 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2021 Pytroll
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+
+"""Test module for the MHS AAPP level-1c reader."""
+
+
+import datetime
+import tempfile
+import unittest
+
+import numpy as np
+
+from satpy.readers.aapp_mhs_amsub_l1c import _HEADERTYPE, _SCANTYPE, HEADER_LENGTH, MHS_AMSUB_AAPPL1CFile
+from satpy.tests.utils import make_dataid
+
+SCANLINE1 = [[26798, 27487, 23584, 24816, 26196],
+ [26188, 27392, 23453, 24832, 26223],
+ [23777, 26804, 23529, 24876, 26193],
+ [23311, 26781, 23583, 24898, 26157],
+ [23194, 26737, 23743, 24917, 26199],
+ [23811, 26871, 23836, 25017, 26239],
+ [25000, 27034, 23952, 25118, 26247],
+ [25937, 26988, 24019, 25058, 26143],
+ [25986, 26689, 24048, 25081, 25967],
+ [24689, 26083, 24062, 24975, 25744],
+ [23719, 25519, 24016, 24938, 25617],
+ [23327, 25565, 23882, 24960, 25571],
+ [23214, 25646, 23862, 24847, 25561],
+ [23473, 25886, 23859, 24832, 25640],
+ [23263, 25449, 23759, 24730, 25525],
+ [23335, 25672, 23716, 24727, 25578],
+ [23477, 25983, 23771, 24847, 25882],
+ [23141, 25863, 23758, 24971, 26066],
+ [23037, 25813, 23855, 25113, 26231],
+ [22908, 25701, 23958, 25130, 26226],
+ [22608, 25493, 23980, 25223, 26277],
+ [22262, 25275, 24019, 25356, 26247],
+ [21920, 25116, 24161, 25375, 26268],
+ [21559, 24795, 24169, 25351, 26284],
+ [21269, 24591, 24333, 25503, 26300],
+ [21028, 24395, 24413, 25498, 26300],
+ [20887, 24254, 24425, 25479, 26228],
+ [20882, 24288, 24440, 25463, 26284],
+ [20854, 24261, 24569, 25438, 26266],
+ [20911, 24277, 24564, 25464, 26213],
+ [21069, 24369, 24567, 25473, 26211],
+ [20994, 24201, 24747, 25552, 26130],
+ [21909, 24648, 24856, 25546, 26249],
+ [21936, 24662, 24843, 25612, 26207],
+ [21142, 24248, 24885, 25616, 26159],
+ [21180, 24251, 24817, 25553, 26114],
+ [21236, 24219, 24840, 25569, 26100],
+ [21057, 24152, 24735, 25535, 26093],
+ [20825, 24018, 24830, 25528, 26103],
+ [20731, 23866, 24789, 25579, 26117],
+ [20924, 23972, 24808, 25512, 26082],
+ [21228, 24259, 24723, 25501, 26071],
+ [21307, 24285, 24733, 25491, 26058],
+ [21558, 24521, 24739, 25511, 26009],
+ [21562, 24500, 24706, 25538, 26091],
+ [21568, 24448, 24639, 25504, 26011],
+ [21636, 24520, 24673, 25462, 26028],
+ [21895, 24667, 24662, 25494, 26048],
+ [22251, 24892, 24570, 25435, 25977],
+ [22459, 25109, 24557, 25340, 26010],
+ [22426, 25030, 24533, 25310, 25964],
+ [22419, 24966, 24528, 25316, 25953],
+ [22272, 24851, 24503, 25318, 25891],
+ [22261, 24799, 24548, 25326, 25912],
+ [22445, 25023, 24410, 25333, 25930],
+ [22371, 24902, 24381, 25323, 25892],
+ [21791, 24521, 24407, 25362, 25880],
+ [20930, 23820, 24440, 25287, 25849],
+ [21091, 24008, 24412, 25251, 25854],
+ [21575, 24331, 24405, 25272, 25774],
+ [21762, 24545, 24395, 25216, 25763],
+ [21891, 24550, 24317, 25256, 25790],
+ [21865, 24584, 24250, 25205, 25797],
+ [21431, 24178, 24302, 25228, 25738],
+ [21285, 23978, 24240, 25205, 25735],
+ [21935, 24515, 24232, 25240, 25834],
+ [22372, 24790, 24325, 25311, 25878],
+ [22621, 24953, 24410, 25395, 25897],
+ [23642, 25290, 24456, 25428, 25959],
+ [23871, 25209, 24376, 25369, 25976],
+ [22846, 24495, 24378, 25347, 25868],
+ [22490, 24320, 24327, 25374, 25849],
+ [23237, 24599, 24182, 25298, 25839],
+ [23134, 24601, 24121, 25306, 25864],
+ [22647, 24314, 24108, 25248, 25787],
+ [22499, 24293, 24049, 25165, 25823],
+ [22247, 23987, 23936, 25131, 25742],
+ [22291, 23942, 23908, 25028, 25715],
+ [22445, 24205, 23784, 24997, 25615],
+ [22487, 24417, 23764, 24921, 25643],
+ [22386, 24420, 23765, 24865, 25715],
+ [22217, 24326, 23748, 24823, 25617],
+ [21443, 23814, 23722, 24750, 25552],
+ [20354, 22599, 23580, 24722, 25439],
+ [20331, 22421, 23431, 24655, 25389],
+ [19925, 21855, 23412, 24623, 25284],
+ [20240, 22224, 23339, 24545, 25329],
+ [20368, 22596, 23419, 24474, 25362],
+ [20954, 23192, 23345, 24416, 25403],
+ [22292, 24303, 23306, 24330, 25353]]
+
+ANGLES_SCLINE1 = [[5926, 35786, 7682, 23367],
+ [5769, 35780, 7709, 23352],
+ [5614, 35774, 7733, 23339],
+ [5463, 35769, 7756, 23326],
+ [5314, 35763, 7777, 23313],
+ [5167, 35758, 7797, 23302],
+ [5022, 35753, 7816, 23290],
+ [4879, 35747, 7834, 23280],
+ [4738, 35742, 7851, 23269],
+ [4598, 35737, 7868, 23259],
+ [4459, 35732, 7883, 23249],
+ [4321, 35727, 7899, 23240],
+ [4185, 35721, 7913, 23231],
+ [4049, 35716, 7927, 23222],
+ [3914, 35711, 7940, 23213],
+ [3780, 35706, 7953, 23204],
+ [3647, 35701, 7966, 23195],
+ [3515, 35695, 7978, 23187],
+ [3383, 35690, 7990, 23179],
+ [3252, 35685, 8001, 23170],
+ [3121, 35680, 8013, 23162],
+ [2991, 35674, 8023, 23154],
+ [2861, 35669, 8034, 23146],
+ [2732, 35663, 8045, 23138],
+ [2603, 35658, 8055, 23130],
+ [2474, 35652, 8065, 23122],
+ [2346, 35647, 8075, 23114],
+ [2218, 35641, 8084, 23106],
+ [2090, 35635, 8094, 23098],
+ [1963, 35630, 8103, 23090],
+ [1836, 35624, 8112, 23082],
+ [1709, 35618, 8121, 23074],
+ [1582, 35612, 8130, 23066],
+ [1455, 35605, 8139, 23057],
+ [1329, 35599, 8148, 23049],
+ [1203, 35593, 8157, 23041],
+ [1077, 35586, 8165, 23032],
+ [951, 35580, 8174, 23023],
+ [825, 35573, 8182, 23014],
+ [699, 35566, 8191, 23005],
+ [573, 35560, 8199, 22996],
+ [448, 35553, 8208, 22987],
+ [322, 35548, 8216, 22977],
+ [196, 35545, 8224, 22968],
+ [71, 35561, 8233, 22958],
+ [54, 17463, 8241, 22947],
+ [179, 17489, 8249, 22937],
+ [305, 17486, 8258, 22926],
+ [431, 17479, 8266, 22915],
+ [556, 17471, 8275, 22903],
+ [682, 17461, 8283, 22891],
+ [808, 17451, 8291, 22879],
+ [934, 17440, 8300, 22866],
+ [1060, 17428, 8309, 22853],
+ [1186, 17416, 8317, 22839],
+ [1312, 17403, 8326, 22824],
+ [1438, 17390, 8335, 22809],
+ [1565, 17375, 8344, 22793],
+ [1692, 17360, 8353, 22776],
+ [1818, 17344, 8362, 22759],
+ [1946, 17327, 8371, 22740],
+ [2073, 17309, 8381, 22720],
+ [2201, 17289, 8390, 22699],
+ [2329, 17268, 8400, 22676],
+ [2457, 17245, 8410, 22652],
+ [2585, 17220, 8420, 22625],
+ [2714, 17194, 8431, 22597],
+ [2843, 17164, 8441, 22566],
+ [2973, 17132, 8452, 22533],
+ [3103, 17097, 8463, 22496],
+ [3234, 17058, 8475, 22455],
+ [3365, 17014, 8486, 22410],
+ [3497, 16965, 8498, 22359],
+ [3629, 16909, 8511, 22301],
+ [3762, 16844, 8524, 22236],
+ [3896, 16770, 8537, 22160],
+ [4031, 16683, 8551, 22071],
+ [4166, 16578, 8565, 21965],
+ [4303, 16452, 8580, 21837],
+ [4440, 16295, 8595, 21679],
+ [4579, 16096, 8611, 21478],
+ [4718, 15835, 8628, 21215],
+ [4860, 15477, 8646, 20856],
+ [5003, 14963, 8665, 20341],
+ [5147, 14178, 8684, 19553],
+ [5294, 12897, 8705, 18270],
+ [5442, 10778, 8727, 16150],
+ [5593, 7879, 8751, 13250],
+ [5747, 5305, 8776, 10674],
+ [5904, 3659, 8803, 9027]]
+
+LATLON_SCLINE1 = [[715994, 787602],
+ [720651, 786999],
+ [724976, 786407],
+ [729013, 785827],
+ [732799, 785255],
+ [736362, 784692],
+ [739728, 784134],
+ [742919, 783583],
+ [745953, 783035],
+ [748844, 782492],
+ [751607, 781951],
+ [754254, 781412],
+ [756796, 780875],
+ [759240, 780338],
+ [761597, 779801],
+ [763872, 779264],
+ [766073, 778726],
+ [768206, 778186],
+ [770275, 777644],
+ [772287, 777100],
+ [774245, 776552],
+ [776153, 776000],
+ [778015, 775444],
+ [779836, 774882],
+ [781617, 774316],
+ [783361, 773743],
+ [785073, 773163],
+ [786753, 772576],
+ [788405, 771981],
+ [790031, 771377],
+ [791633, 770764],
+ [793212, 770140],
+ [794771, 769506],
+ [796312, 768860],
+ [797837, 768201],
+ [799346, 767528],
+ [800842, 766841],
+ [802326, 766138],
+ [803799, 765419],
+ [805264, 764681],
+ [806721, 763924],
+ [808171, 763147],
+ [809617, 762347],
+ [811060, 761523],
+ [812500, 760673],
+ [813939, 759796],
+ [815378, 758888],
+ [816819, 757949],
+ [818263, 756974],
+ [819712, 755962],
+ [821166, 754909],
+ [822627, 753812],
+ [824096, 752666],
+ [825575, 751468],
+ [827065, 750213],
+ [828567, 748894],
+ [830084, 747507],
+ [831617, 746043],
+ [833167, 744496],
+ [834736, 742855],
+ [836327, 741112],
+ [837940, 739253],
+ [839578, 737265],
+ [841243, 735132],
+ [842938, 732835],
+ [844665, 730352],
+ [846425, 727656],
+ [848223, 724716],
+ [850060, 721492],
+ [851941, 717939],
+ [853868, 713998],
+ [855845, 709597],
+ [857875, 704644],
+ [859963, 699024],
+ [862113, 692583],
+ [864329, 685119],
+ [866616, 676358],
+ [868979, 665918],
+ [871421, 653256],
+ [873947, 637570],
+ [876557, 617626],
+ [879250, 591448],
+ [882013, 555681],
+ [884815, 504285],
+ [887577, 425703],
+ [890102, 297538],
+ [891907, 85636],
+ [892134, -204309],
+ [890331, -461741],
+ [887022, -626300]]
+
+
+class TestMHS_AMSUB_AAPPL1CReadData(unittest.TestCase):
+ """Test the filehandler."""
+
+ def setUp(self):
+ """Set up the test case."""
+ self._header = np.zeros(1, dtype=_HEADERTYPE)
+ self._header['satid'][0] = 3
+ self._header['instrument'][0] = 12
+ self._header['tempradcnv'][0] = [[2968720, 0, 1000000, 5236956, 0],
+ [1000000, 6114597, 0, 1000000, 6114597],
+ [-3100, 1000270, 6348092, 0, 1000000]]
+ self._data = np.zeros(3, dtype=_SCANTYPE)
+ self._data['scnlinyr'][:] = 2020
+ self._data['scnlindy'][:] = 261
+ self._data['scnlintime'][0] = 36368496
+ self._data['scnlintime'][1] = 36371163
+ self._data['scnlintime'][2] = 36373830
+ self._data['qualind'][0] = 0
+ self._data['qualind'][1] = 0
+ self._data['qualind'][2] = 0
+ self._data['scnlinqual'][0] = 16384
+ self._data['scnlinqual'][1] = 16384
+ self._data['scnlinqual'][2] = 16384
+ self._data['chanqual'][0] = [6, 6, 6, 6, 6]
+ self._data['chanqual'][1] = [6, 6, 6, 6, 6]
+ self._data['chanqual'][2] = [6, 6, 6, 6, 6]
+ self._data['instrtemp'][:] = [29520, 29520, 29520]
+ self._data['dataqual'][:] = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0]
+ self._data['scalti'][0:3] = [8321, 8321, 8321]
+ self._data['latlon'][0] = LATLON_SCLINE1
+ self._data['angles'][0] = ANGLES_SCLINE1
+ self._data['btemps'][0] = SCANLINE1
+ self.filename_info = {'platform_shortname': 'metop01',
+ 'start_time': datetime.datetime(2020, 9, 17, 10, 6),
+ 'orbit_number': 41509}
+
+ self.filetype_info = {'file_reader': MHS_AMSUB_AAPPL1CFile,
+ 'file_patterns':
+ ['mhsl1c_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit_number:05d}.l1c'],
+ 'file_type': 'mhs_aapp_l1c'}
+
+ def test_platform_name(self):
+ """Test getting the platform name."""
+ with tempfile.TemporaryFile() as tmpfile:
+ self._header.tofile(tmpfile)
+ tmpfile.seek(HEADER_LENGTH, 0)
+ self._data.tofile(tmpfile)
+
+ fh_ = MHS_AMSUB_AAPPL1CFile(tmpfile, self.filename_info, self.filetype_info)
+
+ assert fh_.platform_name == 'Metop-C'
+
+ self._header['satid'][0] = 1
+ with tempfile.TemporaryFile() as tmpfile:
+ self._header.tofile(tmpfile)
+ tmpfile.seek(HEADER_LENGTH, 0)
+ self._data.tofile(tmpfile)
+
+ fh_ = MHS_AMSUB_AAPPL1CFile(tmpfile, self.filename_info, self.filetype_info)
+
+ assert fh_.platform_name == 'Metop-B'
+
+ def test_sensor_name(self):
+ """Test getting the sensor name."""
+ with tempfile.TemporaryFile() as tmpfile:
+ self._header.tofile(tmpfile)
+ tmpfile.seek(HEADER_LENGTH, 0)
+ self._data.tofile(tmpfile)
+
+ fh_ = MHS_AMSUB_AAPPL1CFile(tmpfile, self.filename_info, self.filetype_info)
+
+ assert fh_.sensor == 'mhs'
+
+ self._header['instrument'][0] = 11
+ with tempfile.TemporaryFile() as tmpfile:
+ self._header.tofile(tmpfile)
+ tmpfile.seek(HEADER_LENGTH, 0)
+ self._data.tofile(tmpfile)
+
+ fh_ = MHS_AMSUB_AAPPL1CFile(tmpfile, self.filename_info, self.filetype_info)
+
+ assert fh_.sensor == 'amsub'
+
+ self._header['instrument'][0] = 10
+
+ with tempfile.TemporaryFile() as tmpfile:
+ self._header.tofile(tmpfile)
+ tmpfile.seek(HEADER_LENGTH, 0)
+ self._data.tofile(tmpfile)
+
+ with self.assertRaises(IOError):
+ fh_ = MHS_AMSUB_AAPPL1CFile(tmpfile, self.filename_info, self.filetype_info)
+
+ def test_read(self):
+ """Test getting the platform name."""
+ with tempfile.TemporaryFile() as tmpfile:
+ self._header.tofile(tmpfile)
+ tmpfile.seek(HEADER_LENGTH, 0)
+ self._data.tofile(tmpfile)
+
+ fh_ = MHS_AMSUB_AAPPL1CFile(tmpfile, self.filename_info, self.filetype_info)
+
+ info = {}
+
+ chmin = [199.25, 218.55, 233.06, 243.3, 252.84]
+ chmax = [267.98, 274.87, 248.85, 256.16, 263.]
+ for chn, name in enumerate(['1', '2', '3', '4', '5']):
+ key = make_dataid(name=name, calibration='brightness_temperature')
+ res = fh_.get_dataset(key, info)
+
+ assert res.min() == chmin[chn]
+ assert res.max() == chmax[chn]
+
+ def test_angles(self):
+ """Test reading the angles."""
+ with tempfile.TemporaryFile() as tmpfile:
+ self._header.tofile(tmpfile)
+ tmpfile.seek(HEADER_LENGTH, 0)
+ self._data.tofile(tmpfile)
+
+ fh_ = MHS_AMSUB_AAPPL1CFile(tmpfile, self.filename_info, self.filetype_info)
+ info = {}
+ key = make_dataid(name='solar_zenith_angle')
+ res = fh_.get_dataset(key, info)
+
+ assert np.all(res[2] == 0)
+ assert np.all(res[1] == 0)
+ expected = np.array([76.82, 77.09, 77.33, 77.56, 77.77, 77.97, 78.16, 78.34, 78.51,
+ 78.68, 78.83, 78.99, 79.13, 79.27, 79.4, 79.53, 79.66, 79.78,
+ 79.9, 80.01, 80.13, 80.23, 80.34, 80.45, 80.55, 80.65, 80.75,
+ 80.84, 80.94, 81.03, 81.12, 81.21, 81.3, 81.39, 81.48, 81.57,
+ 81.65, 81.74, 81.82, 81.91, 81.99, 82.08, 82.16, 82.24, 82.33,
+ 82.41, 82.49, 82.58, 82.66, 82.75, 82.83, 82.91, 83., 83.09,
+ 83.17, 83.26, 83.35, 83.44, 83.53, 83.62, 83.71, 83.81, 83.9,
+ 84., 84.1, 84.2, 84.31, 84.41, 84.52, 84.63, 84.75, 84.86,
+ 84.98, 85.11, 85.24, 85.37, 85.51, 85.65, 85.8, 85.95, 86.11,
+ 86.28, 86.46, 86.65, 86.84, 87.05, 87.27, 87.51, 87.76, 88.03])
+
+ np.testing.assert_allclose(res[0], expected)
+
+ def test_navigation(self):
+ """Test reading the longitudes and latitudes."""
+ with tempfile.TemporaryFile() as tmpfile:
+ self._header.tofile(tmpfile)
+ tmpfile.seek(HEADER_LENGTH, 0)
+ self._data.tofile(tmpfile)
+
+ fh_ = MHS_AMSUB_AAPPL1CFile(tmpfile, self.filename_info, self.filetype_info)
+ info = {}
+ key = make_dataid(name='longitude')
+ res = fh_.get_dataset(key, info)
+
+ assert np.all(res[2] == 0)
+ assert np.all(res[1] == 0)
+ expected = np.array([78.7602, 78.6999, 78.6407, 78.5827, 78.5255, 78.4692,
+ 78.4134, 78.3583, 78.3035, 78.2492, 78.1951, 78.1412,
+ 78.0875, 78.0338, 77.9801, 77.9264, 77.8726, 77.8186,
+ 77.7644, 77.71, 77.6552, 77.6, 77.5444, 77.4882,
+ 77.4316, 77.3743, 77.3163, 77.2576, 77.1981, 77.1377,
+ 77.0764, 77.014, 76.9506, 76.886, 76.8201, 76.7528,
+ 76.6841, 76.6138, 76.5419, 76.4681, 76.3924, 76.3147,
+ 76.2347, 76.1523, 76.0673, 75.9796, 75.8888, 75.7949,
+ 75.6974, 75.5962, 75.4909, 75.3812, 75.2666, 75.1468,
+ 75.0213, 74.8894, 74.7507, 74.6043, 74.4496, 74.2855,
+ 74.1112, 73.9253, 73.7265, 73.5132, 73.2835, 73.0352,
+ 72.7656, 72.4716, 72.1492, 71.7939, 71.3998, 70.9597,
+ 70.4644, 69.9024, 69.2583, 68.5119, 67.6358, 66.5918,
+ 65.3256, 63.757, 61.7626, 59.1448, 55.5681, 50.4285,
+ 42.5703, 29.7538, 8.5636, -20.4309, -46.1741, -62.63])
+
+ np.testing.assert_allclose(res[0], expected)
+
+ key = make_dataid(name='latitude')
+ res = fh_.get_dataset(key, info)
+
+ assert np.all(res[2] == 0)
+ assert np.all(res[1] == 0)
+ expected = np.array([71.5994, 72.0651, 72.4976, 72.9013, 73.2799, 73.6362, 73.9728,
+ 74.2919, 74.5953, 74.8844, 75.1607, 75.4254, 75.6796, 75.924,
+ 76.1597, 76.3872, 76.6073, 76.8206, 77.0275, 77.2287, 77.4245,
+ 77.6153, 77.8015, 77.9836, 78.1617, 78.3361, 78.5073, 78.6753,
+ 78.8405, 79.0031, 79.1633, 79.3212, 79.4771, 79.6312, 79.7837,
+ 79.9346, 80.0842, 80.2326, 80.3799, 80.5264, 80.6721, 80.8171,
+ 80.9617, 81.106, 81.25, 81.3939, 81.5378, 81.6819, 81.8263,
+ 81.9712, 82.1166, 82.2627, 82.4096, 82.5575, 82.7065, 82.8567,
+ 83.0084, 83.1617, 83.3167, 83.4736, 83.6327, 83.794, 83.9578,
+ 84.1243, 84.2938, 84.4665, 84.6425, 84.8223, 85.006, 85.1941,
+ 85.3868, 85.5845, 85.7875, 85.9963, 86.2113, 86.4329, 86.6616,
+ 86.8979, 87.1421, 87.3947, 87.6557, 87.925, 88.2013, 88.4815,
+ 88.7577, 89.0102, 89.1907, 89.2134, 89.0331, 88.7022])
+
+ np.testing.assert_allclose(res[0], expected)
diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py
index 6ea4a3599e..b8ad4400cb 100644
--- a/satpy/tests/reader_tests/test_abi_l1b.py
+++ b/satpy/tests/reader_tests/test_abi_l1b.py
@@ -20,8 +20,8 @@
import unittest
from unittest import mock
-import pytest
import numpy as np
+import pytest
import xarray as xr
from satpy.tests.utils import make_dataid
@@ -269,6 +269,67 @@ def test_vis_calibrate(self):
'Bidirectional Reflectance')
+class Test_NC_ABI_L1B_raw_cal(Test_NC_ABI_L1B_Base):
+ """Test the NC_ABI_L1B reader raw calibration."""
+
+ def setUp(self):
+ """Create fake data for the tests."""
+ rad_data = (np.arange(10.).reshape((2, 5)) + 1.)
+ rad_data = (rad_data + 1.) / 0.5
+ rad_data = rad_data.astype(np.int16)
+ rad = xr.DataArray(
+ rad_data,
+ dims=('y', 'x'),
+ attrs={
+ 'scale_factor': 0.5,
+ 'add_offset': -1.,
+ '_FillValue': 20,
+ }
+ )
+ super(Test_NC_ABI_L1B_raw_cal, self).setUp(rad=rad)
+
+ def test_raw_calibrate(self):
+ """Test RAW calibration."""
+ res = self.reader.get_dataset(
+ make_dataid(name='C05', calibration='counts'), {})
+
+ # We expect the raw data to be unchanged
+ expected = res.data
+ self.assertTrue(np.allclose(res.data, expected, equal_nan=True))
+
+ # check for the presence of typical attributes
+ self.assertIn('scale_factor', res.attrs)
+ self.assertIn('add_offset', res.attrs)
+ self.assertIn('_FillValue', res.attrs)
+ self.assertIn('orbital_parameters', res.attrs)
+ self.assertIn('platform_shortname', res.attrs)
+ self.assertIn('scene_id', res.attrs)
+
+ # determine if things match their expected values/types.
+ self.assertEqual(res.data.dtype, np.int16, "int16 data type expected")
+ self.assertEqual(res.attrs['standard_name'],
+ 'counts')
+ self.assertEqual(res.attrs['long_name'],
+ 'Raw Counts')
+
+
+class Test_NC_ABI_L1B_invalid_cal(Test_NC_ABI_L1B_Base):
+ """Test the NC_ABI_L1B reader with invalid calibration."""
+
+ def test_invalid_calibration(self):
+ """Test detection of invalid calibration values."""
+ # Need to use a custom DataID class because the real DataID class is
+ # smart enough to detect the invalid calibration before the ABI L1B
+ # get_dataset method gets a chance to run.
+ class FakeDataID(dict):
+ def to_dict(self):
+ return self
+
+ with self.assertRaises(ValueError, msg='Did not detect invalid cal'):
+ did = FakeDataID(name='C05', calibration='invalid', modifiers=())
+ self.reader.get_dataset(did, {})
+
+
class Test_NC_ABI_File(unittest.TestCase):
"""Test file opening."""
diff --git a/satpy/tests/reader_tests/test_abi_l2_nc.py b/satpy/tests/reader_tests/test_abi_l2_nc.py
index 3c1ce163a7..63014685f9 100644
--- a/satpy/tests/reader_tests/test_abi_l2_nc.py
+++ b/satpy/tests/reader_tests/test_abi_l2_nc.py
@@ -16,71 +16,105 @@
# along with this program. If not, see .
"""The abi_l2_nc reader tests package."""
-import numpy as np
-import xarray as xr
import unittest
from unittest import mock
+import numpy as np
+import xarray as xr
-class Test_NC_ABI_L2_base(unittest.TestCase):
- """Test the NC_ABI_L2 reader."""
- @mock.patch('satpy.readers.abi_base.xr')
- def setUp(self, xr_):
- """Create fake data for the tests."""
- from satpy.readers.abi_l2_nc import NC_ABI_L2
+def _create_cmip_dataset():
+ proj = xr.DataArray(
+ [],
+ attrs={
+ 'semi_major_axis': 1.,
+ 'semi_minor_axis': 1.,
+ 'perspective_point_height': 1.,
+ 'longitude_of_projection_origin': -90.,
+ 'sweep_angle_axis': u'x'
+ }
+ )
+ x__ = xr.DataArray(
+ [0, 1],
+ attrs={'scale_factor': 2., 'add_offset': -1.},
+ dims=('x',),
+ )
+ y__ = xr.DataArray(
+ [0, 1],
+ attrs={'scale_factor': -2., 'add_offset': 1.},
+ dims=('y',),
+ )
- proj = xr.DataArray(
- [],
- attrs={
- 'semi_major_axis': 1.,
- 'semi_minor_axis': 1.,
- 'perspective_point_height': 1.,
- 'longitude_of_projection_origin': -90.,
- 'sweep_angle_axis': u'x'
- }
- )
- x__ = xr.DataArray(
- [0, 1],
- attrs={'scale_factor': 2., 'add_offset': -1.},
- dims=('x',),
- )
- y__ = xr.DataArray(
- [0, 1],
- attrs={'scale_factor': -2., 'add_offset': 1.},
- dims=('y',),
- )
+ ht_da = xr.DataArray(np.array([2, -1, -32768, 32767]).astype(np.int16).reshape((2, 2)),
+ dims=('y', 'x'),
+ attrs={'scale_factor': 0.3052037,
+ 'add_offset': 0.,
+ '_FillValue': np.array(-1).astype(np.int16),
+ '_Unsigned': 'True',
+ 'units': 'm'},)
- ht_da = xr.DataArray(np.array([2, -1, -32768, 32767]).astype(np.int16).reshape((2, 2)),
- dims=('y', 'x'),
- attrs={'scale_factor': 0.3052037,
- 'add_offset': 0.,
- '_FillValue': np.array(-1).astype(np.int16),
- '_Unsigned': 'True',
- 'units': 'm'},)
+ fake_dataset = xr.Dataset(
+ data_vars={
+ 'goes_imager_projection': proj,
+ 'x': x__,
+ 'y': y__,
+ 'HT': ht_da,
+ "nominal_satellite_subpoint_lat": np.array(0.0),
+ "nominal_satellite_subpoint_lon": np.array(-89.5),
+ "nominal_satellite_height": np.array(35786.02),
+ "spatial_resolution": "10km at nadir",
- fake_dataset = xr.Dataset(
- data_vars={
- 'goes_imager_projection': proj,
- 'x': x__,
- 'y': y__,
- 'HT': ht_da,
- "nominal_satellite_subpoint_lat": np.array(0.0),
- "nominal_satellite_subpoint_lon": np.array(-89.5),
- "nominal_satellite_height": np.array(35786.02),
- "spatial_resolution": "10km at nadir",
+ },
+ attrs={
+ "time_coverage_start": "2017-09-20T17:30:40.8Z",
+ "time_coverage_end": "2017-09-20T17:41:17.5Z",
+ "spatial_resolution": "2km at nadir",
+ }
+ )
+ return fake_dataset
- },
- attrs={
- "time_coverage_start": "2017-09-20T17:30:40.8Z",
- "time_coverage_end": "2017-09-20T17:41:17.5Z",
- }
- )
- xr_.open_dataset.return_value = fake_dataset
- self.reader = NC_ABI_L2('filename',
- {'platform_shortname': 'G16', 'observation_type': 'HT',
- 'scan_mode': 'M3'},
- {'filetype': 'info'})
+
+def _compare_subdict(actual_dict, exp_sub_dict):
+ for key, value in exp_sub_dict.items():
+ assert key in actual_dict
+ assert actual_dict[key] == value
+
+
+def _assert_orbital_parameters(orb_params):
+ assert orb_params['satellite_nominal_longitude'] == -89.5
+ assert orb_params['satellite_nominal_latitude'] == 0.0
+ assert orb_params['satellite_nominal_altitude'] == 35786020.0
+
+
+def _create_mcmip_dataset():
+ fake_dataset = _create_cmip_dataset()
+ fake_dataset = fake_dataset.copy(deep=True)
+ fake_dataset['CMI_C14'] = fake_dataset['HT']
+ del fake_dataset['HT']
+ return fake_dataset
+
+
+class Test_NC_ABI_L2_base(unittest.TestCase):
+ """Test the NC_ABI_L2 reader."""
+
+ def setUp(self):
+ """Create fake data for the tests."""
+ from satpy.readers.abi_l2_nc import NC_ABI_L2
+ fake_cmip_dataset = _create_cmip_dataset()
+ with mock.patch('satpy.readers.abi_base.xr') as xr_:
+ xr_.open_dataset.return_value = fake_cmip_dataset
+ self.reader = NC_ABI_L2(
+ 'filename',
+ {
+ 'platform_shortname': 'G16',
+ 'scan_mode': 'M3',
+ 'scene_abbr': 'M1',
+ },
+ {
+ 'file_type': 'info',
+ 'observation_type': 'ACHA',
+ },
+ )
class Test_NC_ABI_L2_get_dataset(Test_NC_ABI_L2_base):
@@ -98,21 +132,70 @@ def test_get_dataset(self):
exp_attrs = {'instrument_ID': None,
'modifiers': (),
'name': 'HT',
+ 'observation_type': 'ACHA',
'orbital_slot': None,
'platform_name': 'GOES-16',
'platform_shortname': 'G16',
'production_site': None,
- 'satellite_altitude': 35786020.,
- 'satellite_latitude': 0.0,
- 'satellite_longitude': -89.5,
'scan_mode': 'M3',
+ 'scene_abbr': 'M1',
'scene_id': None,
'sensor': 'abi',
'timeline_ID': None,
'units': 'm'}
self.assertTrue(np.allclose(res.data, exp_data, equal_nan=True))
- self.assertDictEqual(dict(res.attrs), exp_attrs)
+ _compare_subdict(res.attrs, exp_attrs)
+ _assert_orbital_parameters(res.attrs['orbital_parameters'])
+
+
+class TestMCMIPReading:
+ """Test cases of the MCMIP file format."""
+
+ @mock.patch('satpy.readers.abi_base.xr')
+ def test_mcmip_get_dataset(self, xr_):
+ """Test getting channel from MCMIP file."""
+ from datetime import datetime
+
+ from pyresample.geometry import AreaDefinition
+
+ from satpy import Scene
+ from satpy.dataset.dataid import WavelengthRange
+ xr_.open_dataset.return_value = _create_mcmip_dataset()
+
+ fn = "OR_ABI-L2-MCMIPF-M6_G16_s20192600241149_e20192600243534_c20192600245360.nc"
+ scn = Scene(reader='abi_l2_nc', filenames=[fn])
+ scn.load(['C14'])
+
+ exp_data = np.array([[2 * 0.3052037, np.nan],
+ [32768 * 0.3052037, 32767 * 0.3052037]])
+
+ exp_attrs = {'instrument_ID': None,
+ 'modifiers': (),
+ 'name': 'C14',
+ 'observation_type': 'MCMIP',
+ 'orbital_slot': None,
+ 'reader': 'abi_l2_nc',
+ 'platform_name': 'GOES-16',
+ 'platform_shortname': 'G16',
+ 'production_site': None,
+ 'scan_mode': 'M6',
+ 'scene_abbr': 'F',
+ 'scene_id': None,
+ 'sensor': 'abi',
+ 'timeline_ID': None,
+ 'start_time': datetime(2017, 9, 20, 17, 30, 40, 800000),
+ 'end_time': datetime(2017, 9, 20, 17, 41, 17, 500000),
+ 'calibration': 'brightness_temperature',
+ 'ancillary_variables': [],
+ 'wavelength': WavelengthRange(10.8, 11.2, 11.6, unit='µm'),
+ 'units': 'm'}
+
+ res = scn['C14']
+ np.testing.assert_allclose(res.data, exp_data, equal_nan=True)
+ assert isinstance(res.attrs['area'], AreaDefinition)
+ _compare_subdict(res.attrs, exp_attrs)
+ _assert_orbital_parameters(res.attrs['orbital_parameters'])
class Test_NC_ABI_L2_area_fixedgrid(Test_NC_ABI_L2_base):
@@ -196,3 +279,69 @@ def test_get_area_def_latlon(self, adef):
self.assertEqual(call_args[4], self.reader.ncols)
self.assertEqual(call_args[5], self.reader.nlines)
np.testing.assert_allclose(call_args[6], (-85.0, -20.0, -65.0, 20))
+
+
+class Test_NC_ABI_L2_area_AOD(unittest.TestCase):
+ """Test the NC_ABI_L2 reader for the AOD product."""
+
+ @mock.patch('satpy.readers.abi_base.xr')
+ def setUp(self, xr_):
+ """Create fake data for the tests."""
+ from satpy.readers.abi_l2_nc import NC_ABI_L2
+ proj = xr.DataArray(
+ [],
+ attrs={'semi_major_axis': 1.,
+ 'semi_minor_axis': 1.,
+ 'inverse_flattening': 1.,
+ 'longitude_of_prime_meridian': 0.0,
+ }
+ )
+
+ proj_ext = xr.DataArray(
+ [],
+ attrs={'geospatial_westbound_longitude': -85.0,
+ 'geospatial_eastbound_longitude': -65.0,
+ 'geospatial_northbound_latitude': 20.0,
+ 'geospatial_southbound_latitude': -20.0,
+ 'geospatial_lat_center': 0.0,
+ 'geospatial_lon_center': -75.0,
+ })
+
+ x__ = xr.DataArray(
+ [0, 1],
+ attrs={'scale_factor': 2., 'add_offset': -1.},
+ dims=('x',),
+ )
+ y__ = xr.DataArray(
+ [0, 1],
+ attrs={'scale_factor': -2., 'add_offset': 1.},
+ dims=('y',),
+ )
+ fake_dataset = xr.Dataset(
+ data_vars={
+ 'goes_lat_lon_projection': proj,
+ 'geospatial_lat_lon_extent': proj_ext,
+ 'x': x__,
+ 'y': y__,
+ 'RSR': xr.DataArray(np.ones((2, 2)), dims=('y', 'x')),
+ },
+ )
+ xr_.open_dataset.return_value = fake_dataset
+
+ self.reader = NC_ABI_L2('filename',
+ {'platform_shortname': 'G16', 'observation_type': 'RSR',
+ 'scene_abbr': 'C', 'scan_mode': 'M3'},
+ {'filetype': 'info'})
+
+ @mock.patch('satpy.readers.abi_base.geometry.AreaDefinition')
+ def test_get_area_def_xy(self, adef):
+ """Test the area generation."""
+ self.reader.get_area_def(None)
+
+ self.assertEqual(adef.call_count, 1)
+ call_args = tuple(adef.call_args)[0]
+ self.assertDictEqual(call_args[3], {'proj': 'latlong', 'a': 1.0, 'b': 1.0, 'fi': 1.0, 'pm': 0.0,
+ 'lon_0': -75.0, 'lat_0': 0.0})
+ self.assertEqual(call_args[4], self.reader.ncols)
+ self.assertEqual(call_args[5], self.reader.nlines)
+ np.testing.assert_allclose(call_args[6], (-85.0, -20.0, -65.0, 20))
diff --git a/satpy/tests/reader_tests/test_acspo.py b/satpy/tests/reader_tests/test_acspo.py
index b792d48f49..aca2609492 100644
--- a/satpy/tests/reader_tests/test_acspo.py
+++ b/satpy/tests/reader_tests/test_acspo.py
@@ -19,11 +19,13 @@
import os
from datetime import datetime, timedelta
+from unittest import mock
+
import numpy as np
+import pytest
+
from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler
from satpy.tests.utils import convert_file_content_to_data_array
-import unittest
-from unittest import mock
DEFAULT_FILE_DTYPE = np.uint16
DEFAULT_FILE_SHAPE = (10, 300)
@@ -44,6 +46,7 @@ def get_test_content(self, filename, filename_info, filetype_info):
dt = filename_info.get('start_time', datetime(2016, 1, 1, 12, 0, 0))
sat, inst = {
'VIIRS_NPP': ('NPP', 'VIIRS'),
+ 'VIIRS_N20': ('N20', 'VIIRS'),
}[filename_info['sensor_id']]
file_content = {
@@ -92,16 +95,16 @@ def get_test_content(self, filename, filename_info, filetype_info):
(1, DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1]),
dtype=np.uint16)
- convert_file_content_to_data_array(file_content)
+ convert_file_content_to_data_array(file_content, dims=("time", "nj", "ni"))
return file_content
-class TestACSPOReader(unittest.TestCase):
+class TestACSPOReader:
"""Test ACSPO Reader."""
yaml_file = "acspo.yaml"
- def setUp(self):
+ def setup_method(self):
"""Wrap NetCDF4 file handler with our own fake handler."""
from satpy._config import config_search_paths
from satpy.readers.acspo import ACSPOFileHandler
@@ -111,21 +114,26 @@ def setUp(self):
self.fake_handler = self.p.start()
self.p.is_local = True
- def tearDown(self):
+ def teardown_method(self):
"""Stop wrapping the NetCDF4 file handler."""
self.p.stop()
- def test_init(self):
+ @pytest.mark.parametrize(
+ ("filename",),
+ [
+ ["20170401174600-STAR-L2P_GHRSST-SSTskin-VIIRS_NPP-ACSPO_V2.40-v02.0-fv01.0.nc"],
+ ["20210916161708-STAR-L2P_GHRSST-SSTsubskin-VIIRS_N20-ACSPO_V2.80-v02.0-fv01.0.nc"],
+ ]
+ )
+ def test_init(self, filename):
"""Test basic init with no extra parameters."""
from satpy.readers import load_reader
r = load_reader(self.reader_configs)
- loadables = r.select_files_from_pathnames([
- '20170401174600-STAR-L2P_GHRSST-SSTskin-VIIRS_NPP-ACSPO_V2.40-v02.0-fv01.0.nc',
- ])
- self.assertEqual(len(loadables), 1)
+ loadables = r.select_files_from_pathnames([filename])
+ assert len(loadables) == 1
r.create_filehandlers(loadables)
# make sure we have some files
- self.assertTrue(r.file_handlers)
+ assert r.file_handlers
def test_load_every_dataset(self):
"""Test loading all datasets."""
@@ -139,6 +147,9 @@ def test_load_every_dataset(self):
'satellite_zenith_angle',
'sea_ice_fraction',
'wind_speed'])
- self.assertEqual(len(datasets), 4)
+ assert len(datasets) == 4
for d in datasets.values():
- self.assertTupleEqual(d.shape, DEFAULT_FILE_SHAPE)
+ assert d.shape == DEFAULT_FILE_SHAPE
+ assert d.dims == ("y", "x")
+ assert d.attrs["sensor"] == "viirs"
+ assert d.attrs["rows_per_scan"] == 16
diff --git a/satpy/tests/reader_tests/test_agri_l1.py b/satpy/tests/reader_tests/test_agri_l1.py
index de39992f15..722ea05a71 100644
--- a/satpy/tests/reader_tests/test_agri_l1.py
+++ b/satpy/tests/reader_tests/test_agri_l1.py
@@ -18,7 +18,6 @@
"""The agri_l1 reader tests package."""
import os
-import unittest
from unittest import mock
import dask.array as da
@@ -28,6 +27,31 @@
from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler
+ALL_BAND_NAMES = ["C01", "C02", "C03", "C04", "C05", "C06", "C07",
+ "C08", "C09", "C10", "C11", "C12", "C13", "C14"]
+
+CHANNELS_BY_RESOLUTION = {500: ["C02"],
+ 1000: ["C01", "C02", "C03"],
+ 2000: ["C01", "C02", "C03", "C04", "C05", "C06", "C07"],
+ 4000: ALL_BAND_NAMES,
+ 'GEO': 'solar_azimuth_angle'
+ }
+
+RESOLUTION_LIST = [500, 1000, 2000, 4000]
+
+AREA_EXTENTS_BY_RESOLUTION = {'FY4A': {
+ 500: (-5495271.006002, -5496021.008869, -5493270.998357, -5495521.006957),
+ 1000: (-5494521.070252, -5496021.076004, -5490521.054912, -5495021.072169),
+ 2000: (-5493021.198696, -5496021.210274, -5485021.167823, -5494021.202556),
+ 4000: (-5490021.187119, -5496021.210274, -5474021.125371, -5492021.194837)
+},
+ 'FY4B': {
+ 500: (-5495271.006002, -5496021.008869, -5493270.998357, -5495521.006957),
+ 1000: (-5494521.070252, -5496021.076004, -5490521.054912, -5495021.072169),
+ 2000: (-5493021.198696, -5496021.210274, -5485021.167823, -5494021.202556),
+ 4000: (-5490021.187119, -5496021.210274, -5474021.125371, -5492021.194837)
+ }}
+
class FakeHDF5FileHandler2(FakeHDF5FileHandler):
"""Swap-in HDF5 File Handler."""
@@ -36,161 +60,140 @@ def make_test_data(self, cwl, ch, prefix, dims, file_type):
"""Make test data."""
if prefix == 'CAL':
data = xr.DataArray(
- da.from_array((np.arange(10.) + 1.) / 10., [dims[0] * dims[1]]),
- attrs={
- 'Slope': 1., 'Intercept': 0.,
- 'FillValue': -65535.0,
- 'units': 'NUL',
- 'center_wavelength': '{}um'.format(cwl).encode('utf-8'),
- 'band_names': 'band{}(band number is range from 1 to 14)'
- .format(ch).encode('utf-8'),
- 'long_name': 'Calibration table of {}um Channel'.format(cwl).encode('utf-8'),
- 'valid_range': [0, 1.5],
- },
- dims=('_const'))
+ da.from_array((np.arange(10.) + 1.) / 10., [dims[0] * dims[1]]),
+ attrs={
+ 'Slope': np.array(1.), 'Intercept': np.array(0.),
+ 'FillValue': np.array(-65535.0),
+ 'units': 'NUL',
+ 'center_wavelength': '{}um'.format(cwl).encode('utf-8'),
+ 'band_names': 'band{}(band number is range from 1 to 14)'
+ .format(ch).encode('utf-8'),
+ 'long_name': 'Calibration table of {}um Channel'.format(cwl).encode('utf-8'),
+ 'valid_range': np.array([0, 1.5]),
+ },
+ dims='_const')
elif prefix == 'NOM':
data = xr.DataArray(
- da.from_array(np.arange(10, dtype=np.uint16).reshape((2, 5)) + 1,
- [dim for dim in dims]),
- attrs={
- 'Slope': 1., 'Intercept': 0.,
- 'FillValue': 65535,
- 'units': 'DN',
- 'center_wavelength': '{}um'.format(cwl).encode('utf-8'),
- 'band_names': 'band{}(band number is range from 1 to 14)'
- .format(ch).encode('utf-8'),
- 'long_name': 'Calibration table of {}um Channel'.format(cwl).encode('utf-8'),
- 'valid_range': [0, 4095],
- },
- dims=('_RegLength', '_RegWidth'))
+ da.from_array(np.arange(10, dtype=np.uint16).reshape((2, 5)) + 1,
+ [dim for dim in dims]),
+ attrs={
+ 'Slope': np.array(1.), 'Intercept': np.array(0.),
+ 'FillValue': np.array(65535),
+ 'units': 'DN',
+ 'center_wavelength': '{}um'.format(cwl).encode('utf-8'),
+ 'band_names': 'band{}(band number is range from 1 to 14)'
+ .format(ch).encode('utf-8'),
+ 'long_name': 'Calibration table of {}um Channel'.format(cwl).encode('utf-8'),
+ 'valid_range': np.array([0, 4095]),
+ },
+ dims=('_RegLength', '_RegWidth'))
+
+ elif prefix == 'GEO':
+ data = xr.DataArray(
+ da.from_array(np.arange(0., 360., 36., dtype=np.float32).reshape((2, 5)),
+ [dim for dim in dims]),
+ attrs={
+ 'Slope': np.array(1.), 'Intercept': np.array(0.),
+ 'FillValue': np.array(65535.),
+ 'units': 'NUL',
+ 'band_names': 'NUL',
+ 'valid_range': np.array([0., 360.]),
+ },
+ dims=('_RegLength', '_RegWidth'))
elif prefix == 'COEF':
if file_type == '500':
- data = xr.DataArray(
- da.from_array((np.arange(2.).reshape((1, 2)) + 1.) / np.array([1E4, 1E2]), [1, 2]),
- attrs={
- 'Slope': 1., 'Intercept': 0.,
- 'FillValue': 0,
- 'units': 'NUL',
- 'band_names': 'NUL',
- 'long_name': b'Calibration coefficient (SCALE and OFFSET)',
- 'valid_range': [-500, 500],
- },
- dims=('_num_channel', '_coefs'))
+ data = self._create_coeff_array(1)
elif file_type == '1000':
- data = xr.DataArray(
- da.from_array((np.arange(6.).reshape((3, 2)) + 1.) / np.array([1E4, 1E2]), [3, 2]),
- attrs={
- 'Slope': 1., 'Intercept': 0.,
- 'FillValue': 0,
- 'units': 'NUL',
- 'band_names': 'NUL',
- 'long_name': b'Calibration coefficient (SCALE and OFFSET)',
- 'valid_range': [-500, 500],
- },
- dims=('_num_channel', '_coefs'))
+ data = self._create_coeff_array(3)
elif file_type == '2000':
- data = xr.DataArray(
- da.from_array((np.arange(14.).reshape((7, 2)) + 1.) / np.array([1E4, 1E2]), [7, 2]),
- attrs={
- 'Slope': 1., 'Intercept': 0.,
- 'FillValue': 0,
- 'units': 'NUL',
- 'band_names': 'NUL',
- 'long_name': b'Calibration coefficient (SCALE and OFFSET)',
- 'valid_range': [-500, 500],
- },
- dims=('_num_channel', '_coefs'))
+ data = self._create_coeff_array(7)
elif file_type == '4000':
- data = xr.DataArray(
- da.from_array((np.arange(28.).reshape((14, 2)) + 1.)
- / np.array([1E4, 1E2]), [14, 2]),
- attrs={
- 'Slope': 1., 'Intercept': 0.,
- 'FillValue': 0,
- 'units': 'NUL',
- 'band_names': 'NUL',
- 'long_name': b'Calibration coefficient (SCALE and OFFSET)',
- 'valid_range': [-500, 500],
- },
- dims=('_num_channel', '_coefs'))
+ data = self._create_coeff_array(14)
return data
- def _get_500m_data(self, file_type):
+ def _create_coeff_array(self, nb_channels):
+ data = xr.DataArray(
+ da.from_array((np.arange(nb_channels * 2).reshape((nb_channels, 2)) + 1.) /
+ np.array([1E4, 1E2]), [nb_channels, 2]),
+ attrs={
+ 'Slope': 1., 'Intercept': 0.,
+ 'FillValue': 0,
+ 'units': 'NUL',
+ 'band_names': 'NUL',
+ 'long_name': b'Calibration coefficient (SCALE and OFFSET)',
+ 'valid_range': [-500, 500],
+ },
+ dims=('_num_channel', '_coefs'))
+ return data
+
+ def _create_channel_data(self, chs, cwls, file_type):
dim_0 = 2
dim_1 = 5
- chs = [2]
- cwls = [0.65]
data = {}
for index, _cwl in enumerate(cwls):
data['CALChannel' + '%02d' % chs[index]] = self.make_test_data(cwls[index], chs[index], 'CAL',
[dim_0, dim_1], file_type)
+ data['Calibration/CALChannel' + '%02d' % chs[index]] = self.make_test_data(cwls[index], chs[index], 'CAL',
+ [dim_0, dim_1], file_type)
data['NOMChannel' + '%02d' % chs[index]] = self.make_test_data(cwls[index], chs[index], 'NOM',
[dim_0, dim_1], file_type)
+ data['Data/NOMChannel' + '%02d' % chs[index]] = self.make_test_data(cwls[index], chs[index], 'NOM',
+ [dim_0, dim_1], file_type)
data['CALIBRATION_COEF(SCALE+OFFSET)'] = self.make_test_data(cwls[index], chs[index], 'COEF',
[dim_0, dim_1], file_type)
+ data['Calibration/CALIBRATION_COEF(SCALE+OFFSET)'] = self.make_test_data(cwls[index], chs[index], 'COEF',
+ [dim_0, dim_1], file_type)
+ return data
+
+ def _get_500m_data(self, file_type):
+ chs = [2]
+ cwls = [0.65]
+ data = self._create_channel_data(chs, cwls, file_type)
return data
def _get_1km_data(self, file_type):
- dim_0 = 2
- dim_1 = 5
chs = np.linspace(1, 3, 3)
cwls = [0.47, 0.65, 0.83]
- data = {}
- for index, _cwl in enumerate(cwls):
- data['CALChannel' + '%02d' % chs[index]] = self.make_test_data(cwls[index], chs[index], 'CAL',
- [dim_0, dim_1], file_type)
- data['NOMChannel' + '%02d' % chs[index]] = self.make_test_data(cwls[index], chs[index], 'NOM',
- [dim_0, dim_1], file_type)
- data['CALIBRATION_COEF(SCALE+OFFSET)'] = self.make_test_data(cwls[index], chs[index], 'COEF',
- [dim_0, dim_1], file_type)
+ data = self._create_channel_data(chs, cwls, file_type)
return data
def _get_2km_data(self, file_type):
- dim_0 = 2
- dim_1 = 5
chs = np.linspace(1, 7, 7)
cwls = [0.47, 0.65, 0.83, 1.37, 1.61, 2.22, 3.72]
- data = {}
- for index, _cwl in enumerate(cwls):
- data['CALChannel' + '%02d' % chs[index]] = self.make_test_data(cwls[index], chs[index], 'CAL',
- [dim_0, dim_1], file_type)
- data['NOMChannel' + '%02d' % chs[index]] = self.make_test_data(cwls[index], chs[index], 'NOM',
- [dim_0, dim_1], file_type)
- data['CALIBRATION_COEF(SCALE+OFFSET)'] = self.make_test_data(cwls[index], chs[index], 'COEF',
- [dim_0, dim_1], file_type)
+ data = self._create_channel_data(chs, cwls, file_type)
return data
def _get_4km_data(self, file_type):
- dim_0 = 2
- dim_1 = 5
chs = np.linspace(1, 14, 14)
cwls = [0.47, 0.65, 0.83, 1.37, 1.61, 2.22, 3.72, 3.72, 6.25, 7.10, 8.50, 10.8, 12, 13.5]
- data = {}
- for index, _cwl in enumerate(cwls):
- data['CALChannel' + '%02d' % chs[index]] = self.make_test_data(cwls[index], chs[index], 'CAL',
- [dim_0, dim_1], file_type)
- data['NOMChannel' + '%02d' % chs[index]] = self.make_test_data(cwls[index], chs[index], 'NOM',
- [dim_0, dim_1], file_type)
- data['CALIBRATION_COEF(SCALE+OFFSET)'] = self.make_test_data(cwls[index], chs[index], 'COEF',
- [dim_0, dim_1], file_type)
+ data = self._create_channel_data(chs, cwls, file_type)
return data
+ def _get_geo_data(self, file_type):
+ dim_0 = 2
+ dim_1 = 5
+ data = {'NOMSunAzimuth': self.make_test_data('NUL', 'NUL', 'GEO',
+ [dim_0, dim_1], file_type),
+ 'Navigation/NOMSunAzimuth': self.make_test_data('NUL', 'NUL', 'GEO',
+ [dim_0, dim_1], file_type)}
+ return data
+
def get_test_content(self, filename, filename_info, filetype_info):
"""Mimic reader input file content."""
global_attrs = {
'/attr/NOMCenterLat': np.array(0.0),
'/attr/NOMCenterLon': np.array(104.7),
- '/attr/NOMSatHeight': np.array(3.5786E7),
+ '/attr/NOMSatHeight': np.array(42164140.0),
'/attr/dEA': np.array(6378.14),
'/attr/dObRecFlat': np.array(298.257223563),
'/attr/OBIType': 'REGC',
@@ -210,8 +213,12 @@ def get_test_content(self, filename, filename_info, filetype_info):
data = self._get_1km_data('1000')
elif self.filetype_info['file_type'] == 'agri_l1_2000m':
data = self._get_2km_data('2000')
+ global_attrs['/attr/Observing Beginning Time'] = '00:30:01'
+ global_attrs['/attr/Observing Ending Time'] = '00:34:07'
elif self.filetype_info['file_type'] == 'agri_l1_4000m':
data = self._get_4km_data('4000')
+ elif self.filetype_info['file_type'] == 'agri_l1_4000m_geo':
+ data = self._get_geo_data('4000')
test_content = {}
test_content.update(global_attrs)
@@ -220,313 +227,208 @@ def get_test_content(self, filename, filename_info, filetype_info):
return test_content
-class Test_HDF_AGRI_L1_cal(unittest.TestCase):
+def _create_filenames_from_resolutions(satname, *resolutions):
+ """Create filenames from the given resolutions."""
+ if 'GEO' in resolutions:
+ return [f"{satname}-_AGRI--_N_REGC_1047E_L1-_GEO-_MULT_NOM_20190603003000_20190603003416_4000M_V0001.HDF"]
+ pattern = (f"{satname}-_AGRI--_N_REGC_1047E_L1-_FDI-_MULT_NOM_20190603003000_20190603003416_"
+ "{resolution:04d}M_V0001.HDF")
+ return [pattern.format(resolution=resolution) for resolution in resolutions]
+
+
+class Test_HDF_AGRI_L1_cal:
"""Test VIRR L1B Reader."""
- yaml_file = "agri_l1.yaml"
+ yaml_file = "agri_fy4a_l1.yaml"
- def setUp(self):
+ def setup_method(self):
"""Wrap HDF5 file handler with our own fake handler."""
- from satpy.readers.agri_l1 import HDF_AGRI_L1
from satpy._config import config_search_paths
+ from satpy.readers.agri_l1 import HDF_AGRI_L1
+ from satpy.readers.fy4_base import FY4Base
self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file))
# http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library
- self.p = mock.patch.object(HDF_AGRI_L1, '__bases__', (FakeHDF5FileHandler2,))
- self.fake_handler = self.p.start()
+ self.fy4 = mock.patch.object(FY4Base, '__bases__', (FakeHDF5FileHandler2,))
+ self.p = mock.patch.object(HDF_AGRI_L1.__class__, (self.fy4,))
+ self.fake_handler = self.fy4.start()
self.p.is_local = True
+ self.satname = 'FY4A'
+
+ self.expected = {
+ 1: np.array([[2.01, 2.02, 2.03, 2.04, 2.05], [2.06, 2.07, 2.08, 2.09, 2.1]]),
+ 2: np.array([[4.03, 4.06, 4.09, 4.12, 4.15], [4.18, 4.21, 4.24, 4.27, 4.3]]),
+ 3: np.array([[6.05, 6.1, 6.15, 6.2, 6.25], [6.3, 6.35, 6.4, 6.45, 6.5]]),
+ 4: np.array([[8.07, 8.14, 8.21, 8.28, 8.35], [8.42, 8.49, 8.56, 8.63, 8.7]]),
+ 5: np.array([[10.09, 10.18, 10.27, 10.36, 10.45], [10.54, 10.63, 10.72, 10.81, 10.9]]),
+ 6: np.array([[12.11, 12.22, 12.33, 12.44, 12.55], [12.66, 12.77, 12.88, 12.99, 13.1]]),
+ 7: np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]),
+ 8: np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]),
+ 9: np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]),
+ 10: np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]),
+ 11: np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]),
+ 12: np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]),
+ 13: np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]),
+ 14: np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]])
+ }
- def tearDown(self):
+ def teardown_method(self):
"""Stop wrapping the HDF5 file handler."""
self.p.stop()
- def test_fy4a_all_resolutions(self):
- """Test loading data when all resolutions are available."""
- from satpy.tests.utils import make_dsq
- from satpy.readers import load_reader
- from satpy.dataset.data_dict import get_key
- filenames = [
- 'FY4A-_AGRI--_N_REGC_1047E_L1-_FDI-_MULT_NOM_20190603003000_20190603003416_0500M_V0001.HDF',
- 'FY4A-_AGRI--_N_REGC_1047E_L1-_FDI-_MULT_NOM_20190603003000_20190603003416_1000M_V0001.HDF',
- 'FY4A-_AGRI--_N_REGC_1047E_L1-_FDI-_MULT_NOM_20190603003000_20190603003416_2000M_V0001.HDF',
- 'FY4A-_AGRI--_N_REGC_1047E_L1-_FDI-_MULT_NOM_20190603003000_20190603003416_4000M_V0001.HDF',
- ]
- reader = load_reader(self.reader_configs)
- files = reader.select_files_from_pathnames(filenames)
- self.assertEqual(4, len(files))
- reader.create_filehandlers(files)
- # Make sure we have some files
- self.assertTrue(reader.file_handlers)
+ def test_times_correct(self):
+ """Test that the reader handles the two possible time formats correctly."""
+ reader = self._create_reader_for_resolutions(1000)
+ np.testing.assert_almost_equal(reader.start_time.microsecond, 807000)
+ reader = self._create_reader_for_resolutions(2000)
+ np.testing.assert_almost_equal(reader.start_time.microsecond, 0)
- available_datasets = reader.available_dataset_ids
+ def test_fy4a_channels_are_loaded_with_right_resolution(self):
+ """Test all channels are loaded with the right resolution."""
+ reader = self._create_reader_for_resolutions(*RESOLUTION_LIST)
- # 500m
- band_names = ['C' + '%02d' % ch for ch in np.linspace(2, 2, 1)]
- for band_name in band_names:
- ds_q = make_dsq(name=band_name, resolution=500)
- res = get_key(ds_q, available_datasets, num_results=0, best=False)
- self.assertEqual(2, len(res))
+ available_datasets = reader.available_dataset_ids
- # 1km
- band_names = ['C' + '%02d' % ch for ch in np.linspace(1, 3, 3)]
- for band_name in band_names:
- ds_q = make_dsq(name=band_name, resolution=1000)
- res = get_key(ds_q, available_datasets, num_results=0, best=False)
- self.assertEqual(2, len(res))
+ for resolution_to_test in RESOLUTION_LIST:
+ self._check_keys_for_dsq(available_datasets, resolution_to_test)
- # 2km
- band_names = ['C' + '%02d' % ch for ch in np.linspace(1, 7, 7)]
- for band_name in band_names:
- ds_q = make_dsq(name=band_name, resolution=2000)
- res = get_key(ds_q, available_datasets, num_results=0, best=False)
- if band_name < 'C07':
- self.assertEqual(2, len(res))
- else:
- self.assertEqual(3, len(res))
+ def test_agri_all_bands_have_right_units(self):
+ """Test all bands have the right units."""
+ reader = self._create_reader_for_resolutions(*RESOLUTION_LIST)
- band_names = ['C' + '%02d' % ch for ch in np.linspace(1, 14, 14)]
+ band_names = ALL_BAND_NAMES
res = reader.load(band_names)
- self.assertEqual(14, len(res))
+ assert len(res) == 14
for band_name in band_names:
- self.assertEqual((2, 5), res[band_name].shape)
- if band_name < 'C07':
- self.assertEqual('reflectance', res[band_name].attrs['calibration'])
- else:
- self.assertEqual('brightness_temperature', res[band_name].attrs['calibration'])
- if band_name < 'C07':
- self.assertEqual('%', res[band_name].attrs['units'])
- else:
- self.assertEqual('K', res[band_name].attrs['units'])
+ assert res[band_name].shape == (2, 5)
+ self._check_units(band_name, res)
+
+ def test_agri_orbital_parameters_are_correct(self):
+ """Test orbital parameters are set correctly."""
+ reader = self._create_reader_for_resolutions(*RESOLUTION_LIST)
+
+ band_names = ALL_BAND_NAMES
+ res = reader.load(band_names)
# check whether the data type of orbital_parameters is float
orbital_parameters = res[band_names[0]].attrs['orbital_parameters']
for attr in orbital_parameters:
- self.assertEqual(type(orbital_parameters[attr]), float)
- self.assertEqual(orbital_parameters['satellite_nominal_latitude'], 0.)
- self.assertEqual(orbital_parameters['satellite_nominal_longitude'], 104.7)
- self.assertEqual(orbital_parameters['satellite_nominal_altitude'], 3.5786E7)
-
- def test_fy4a_counts_calib(self):
- """Test loading data at counts calibration."""
- from satpy.tests.utils import make_dsq
- from satpy.readers import load_reader
- filenames = [
- 'FY4A-_AGRI--_N_REGC_1047E_L1-_FDI-_MULT_NOM_20190603003000_20190603003416_0500M_V0001.HDF',
- 'FY4A-_AGRI--_N_REGC_1047E_L1-_FDI-_MULT_NOM_20190603003000_20190603003416_1000M_V0001.HDF',
- 'FY4A-_AGRI--_N_REGC_1047E_L1-_FDI-_MULT_NOM_20190603003000_20190603003416_2000M_V0001.HDF',
- 'FY4A-_AGRI--_N_REGC_1047E_L1-_FDI-_MULT_NOM_20190603003000_20190603003416_4000M_V0001.HDF',
- ]
- reader = load_reader(self.reader_configs)
- files = reader.select_files_from_pathnames(filenames)
- self.assertEqual(4, len(files))
- reader.create_filehandlers(files)
- # Make sure we have some files
- self.assertTrue(reader.file_handlers)
-
- ds_ids = []
- band_names = ['C' + '%02d' % ch for ch in np.linspace(1, 14, 14)]
- for band_name in band_names:
- ds_ids.append(make_dsq(name=band_name, calibration='counts'))
- res = reader.load(ds_ids)
- self.assertEqual(14, len(res))
-
- for band_name in band_names:
- self.assertEqual((2, 5), res[band_name].shape)
- self.assertEqual('counts', res[band_name].attrs['calibration'])
- self.assertEqual(res[band_name].dtype, np.uint16)
- self.assertEqual('1', res[band_name].attrs['units'])
+ assert isinstance(orbital_parameters[attr], float)
+ assert orbital_parameters['satellite_nominal_latitude'] == 0.
+ assert orbital_parameters['satellite_nominal_longitude'] == 104.7
+ assert orbital_parameters['satellite_nominal_altitude'] == 42164140.0
- def test_fy4a_4km_resolutions(self):
- """Test loading data when only 4km resolutions are available."""
- from satpy.tests.utils import make_dsq
- from satpy.readers import load_reader
+ @staticmethod
+ def _check_keys_for_dsq(available_datasets, resolution_to_test):
from satpy.dataset.data_dict import get_key
- filenames = [
- 'FY4A-_AGRI--_N_REGC_1047E_L1-_FDI-_MULT_NOM_20190603003000_20190603003416_4000M_V0001.HDF',
- ]
- reader = load_reader(self.reader_configs)
- files = reader.select_files_from_pathnames(filenames)
- self.assertEqual(1, len(files))
- reader.create_filehandlers(files)
- # Make sure we have some files
- self.assertTrue(reader.file_handlers)
+ from satpy.tests.utils import make_dsq
- # Verify that the resolution is only 4km
- available_datasets = reader.available_dataset_ids
- band_names = ['C' + '%02d' % ch for ch in np.linspace(1, 14, 14)]
+ band_names = CHANNELS_BY_RESOLUTION[resolution_to_test]
for band_name in band_names:
- for resolution in [500, 1000, 2000]:
- ds_q = make_dsq(name=band_name, resolution=resolution)
- with pytest.raises(KeyError):
- res = get_key(ds_q, available_datasets, num_results=0, best=False)
-
- ds_q = make_dsq(name=band_name, resolution=4000)
+ ds_q = make_dsq(name=band_name, resolution=resolution_to_test)
res = get_key(ds_q, available_datasets, num_results=0, best=False)
if band_name < 'C07':
- self.assertEqual(2, len(res))
- else:
- self.assertEqual(3, len(res))
-
- res = reader.load(band_names)
- self.assertEqual(14, len(res))
- expected = {
- 1: np.array([[2.01, 2.02, 2.03, 2.04, 2.05], [2.06, 2.07, 2.08, 2.09, 2.1]]),
- 2: np.array([[4.03, 4.06, 4.09, 4.12, 4.15], [4.18, 4.21, 4.24, 4.27, 4.3]]),
- 3: np.array([[6.05, 6.1, 6.15, 6.2, 6.25], [6.3, 6.35, 6.4, 6.45, 6.5]]),
- 4: np.array([[8.07, 8.14, 8.21, 8.28, 8.35], [8.42, 8.49, 8.56, 8.63, 8.7]]),
- 5: np.array([[10.09, 10.18, 10.27, 10.36, 10.45], [10.54, 10.63, 10.72, 10.81, 10.9]]),
- 6: np.array([[12.11, 12.22, 12.33, 12.44, 12.55], [12.66, 12.77, 12.88, 12.99, 13.1]])
- }
- for i in range(7, 15):
- expected[i] = np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]])
-
- for index, band_name in enumerate(band_names):
- self.assertEqual((2, 5), res[band_name].shape)
- if band_name < 'C07':
- self.assertEqual('reflectance', res[band_name].attrs['calibration'])
+ assert len(res) == 2
else:
- self.assertEqual('brightness_temperature', res[band_name].attrs['calibration'])
- if band_name < 'C07':
- self.assertEqual('%', res[band_name].attrs['units'])
- else:
- self.assertEqual('K', res[band_name].attrs['units'])
- self.assertTrue(np.allclose(res[band_name].values, expected[index + 1], equal_nan=True))
+ assert len(res) == 3
- def test_fy4a_2km_resolutions(self):
- """Test loading data when only 2km resolutions are available."""
+ def test_agri_counts_calibration(self):
+ """Test loading data at counts calibration."""
from satpy.tests.utils import make_dsq
- from satpy.readers import load_reader
- from satpy.dataset.data_dict import get_key
- filenames = [
- 'FY4A-_AGRI--_N_REGC_1047E_L1-_FDI-_MULT_NOM_20190603003000_20190603003416_2000M_V0001.HDF',
- ]
- reader = load_reader(self.reader_configs)
- files = reader.select_files_from_pathnames(filenames)
- self.assertEqual(1, len(files))
- reader.create_filehandlers(files)
- # Make sure we have some files
- self.assertTrue(reader.file_handlers)
-
- # Verify that the resolution is only 2km
- available_datasets = reader.available_dataset_ids
- band_names = ['C' + '%02d' % ch for ch in np.linspace(1, 7, 7)]
+ reader = self._create_reader_for_resolutions(*RESOLUTION_LIST)
+ ds_ids = []
+ band_names = CHANNELS_BY_RESOLUTION[4000]
for band_name in band_names:
- for resolution in [500, 1000, 4000]:
- ds_q = make_dsq(name=band_name, resolution=resolution)
- with pytest.raises(KeyError):
- res = get_key(ds_q, available_datasets, num_results=0, best=False)
+ ds_ids.append(make_dsq(name=band_name, calibration='counts'))
+ res = reader.load(ds_ids)
+ assert len(res) == 14
- ds_q = make_dsq(name=band_name, resolution=2000)
- res = get_key(ds_q, available_datasets, num_results=0, best=False)
- if band_name < 'C07':
- self.assertEqual(2, len(res))
- else:
- self.assertEqual(3, len(res))
+ for band_name in band_names:
+ assert res[band_name].shape == (2, 5)
+ assert res[band_name].attrs['calibration'] == "counts"
+ assert res[band_name].dtype == np.uint16
+ assert res[band_name].attrs['units'] == "1"
+
+ @pytest.mark.parametrize("satname", ['FY4A', 'FY4B'])
+ def test_agri_geo(self, satname):
+ """Test loading data for angles."""
+ from satpy.tests.utils import make_dsq
+ self.satname = satname
+ reader = self._create_reader_for_resolutions('GEO')
+ band_name = 'solar_azimuth_angle'
+ ds_ids = [make_dsq(name=band_name)]
+ res = reader.load(ds_ids)
+ assert len(res) == 1
- res = reader.load(band_names)
- self.assertEqual(7, len(res))
- expected = {
- 1: np.array([[2.01, 2.02, 2.03, 2.04, 2.05], [2.06, 2.07, 2.08, 2.09, 2.1]]),
- 2: np.array([[4.03, 4.06, 4.09, 4.12, 4.15], [4.18, 4.21, 4.24, 4.27, 4.3]]),
- 3: np.array([[6.05, 6.1, 6.15, 6.2, 6.25], [6.3, 6.35, 6.4, 6.45, 6.5]]),
- 4: np.array([[8.07, 8.14, 8.21, 8.28, 8.35], [8.42, 8.49, 8.56, 8.63, 8.7]]),
- 5: np.array([[10.09, 10.18, 10.27, 10.36, 10.45], [10.54, 10.63, 10.72, 10.81, 10.9]]),
- 6: np.array([[12.11, 12.22, 12.33, 12.44, 12.55], [12.66, 12.77, 12.88, 12.99, 13.1]]),
- 7: np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]])
- }
+ np.testing.assert_almost_equal(np.nanmin(res[band_name]), 0.)
+ np.testing.assert_almost_equal(np.nanmax(res[band_name]), 324.)
- for index, band_name in enumerate(band_names):
- self.assertEqual((2, 5), res[band_name].shape)
- if band_name < 'C07':
- self.assertEqual('reflectance', res[band_name].attrs['calibration'])
- else:
- self.assertEqual('brightness_temperature', res[band_name].attrs['calibration'])
- if band_name < 'C07':
- self.assertEqual('%', res[band_name].attrs['units'])
- else:
- self.assertEqual('K', res[band_name].attrs['units'])
- self.assertTrue(np.allclose(res[band_name].values, expected[index + 1], equal_nan=True))
+ assert res[band_name].shape == (2, 5)
+ assert res[band_name].dtype == np.float32
- def test_fy4a_1km_resolutions(self):
- """Test loading data when only 1km resolutions are available."""
- from satpy.tests.utils import make_dsq
+ def _create_reader_for_resolutions(self, *resolutions):
from satpy.readers import load_reader
- from satpy.dataset.data_dict import get_key
- filenames = [
- 'FY4A-_AGRI--_N_REGC_1047E_L1-_FDI-_MULT_NOM_20190603003000_20190603003416_1000M_V0001.HDF',
- ]
+ filenames = _create_filenames_from_resolutions(self.satname, *resolutions)
reader = load_reader(self.reader_configs)
files = reader.select_files_from_pathnames(filenames)
- self.assertEqual(1, len(files))
+ assert len(filenames) == len(files)
reader.create_filehandlers(files)
# Make sure we have some files
- self.assertTrue(reader.file_handlers)
+ assert reader.file_handlers
+ return reader
- # Verify that the resolution is only 1km
- available_datasets = reader.available_dataset_ids
- band_names = ['C' + '%02d' % ch for ch in np.linspace(1, 3, 3)]
-
- for band_name in band_names:
- for resolution in [500, 2000, 4000]:
- ds_q = make_dsq(name=band_name, resolution=resolution)
- with pytest.raises(KeyError):
- res = get_key(ds_q, available_datasets, num_results=0, best=False)
-
- ds_q = make_dsq(name=band_name, resolution=1000)
- res = get_key(ds_q, available_datasets, num_results=0, best=False)
- self.assertEqual(2, len(res))
+ @pytest.mark.parametrize("resolution_to_test", RESOLUTION_LIST)
+ @pytest.mark.parametrize("satname", ['FY4A', 'FY4B'])
+ def test_agri_for_one_resolution(self, resolution_to_test, satname):
+ """Test loading data when only one resolution is available."""
+ self.satname = satname
+ reader = self._create_reader_for_resolutions(resolution_to_test)
+ available_datasets = reader.available_dataset_ids
+ band_names = CHANNELS_BY_RESOLUTION[resolution_to_test]
+ self._assert_which_channels_are_loaded(available_datasets, band_names, resolution_to_test)
res = reader.load(band_names)
- self.assertEqual(3, len(res))
- expected = {
- 1: np.array([[2.01, 2.02, 2.03, 2.04, 2.05], [2.06, 2.07, 2.08, 2.09, 2.1]]),
- 2: np.array([[4.03, 4.06, 4.09, 4.12, 4.15], [4.18, 4.21, 4.24, 4.27, 4.3]]),
- 3: np.array([[6.05, 6.1, 6.15, 6.2, 6.25], [6.3, 6.35, 6.4, 6.45, 6.5]])
- }
+ assert len(res) == len(band_names)
+ self._check_calibration_and_units(band_names, res)
+ for band_name in band_names:
+ np.testing.assert_allclose(res[band_name].attrs['area'].area_extent,
+ AREA_EXTENTS_BY_RESOLUTION[satname][resolution_to_test])
+ def _check_calibration_and_units(self, band_names, result):
for index, band_name in enumerate(band_names):
- self.assertEqual(1, res[band_name].attrs['sensor'].islower())
- self.assertEqual((2, 5), res[band_name].shape)
- self.assertEqual('reflectance', res[band_name].attrs['calibration'])
- self.assertEqual('%', res[band_name].attrs['units'])
- self.assertTrue(np.allclose(res[band_name].values, expected[index + 1], equal_nan=True))
-
- def test_fy4a_500m_resolutions(self):
- """Test loading data when only 500m resolutions are available."""
- from satpy.tests.utils import make_dsq
- from satpy.readers import load_reader
+ assert result[band_name].attrs['sensor'].islower()
+ assert result[band_name].shape == (2, 5)
+ np.testing.assert_allclose(result[band_name].values, self.expected[index + 1], equal_nan=True)
+ self._check_units(band_name, result)
+
+ @staticmethod
+ def _check_units(band_name, result):
+ if band_name < 'C07':
+ assert result[band_name].attrs['calibration'] == "reflectance"
+ else:
+ assert result[band_name].attrs['calibration'] == 'brightness_temperature'
+ if band_name < 'C07':
+ assert result[band_name].attrs['units'] == "%"
+ else:
+ assert result[band_name].attrs['units'] == "K"
+
+ @staticmethod
+ def _assert_which_channels_are_loaded(available_datasets, band_names, resolution_to_test):
from satpy.dataset.data_dict import get_key
- filenames = [
- 'FY4A-_AGRI--_N_REGC_1047E_L1-_FDI-_MULT_NOM_20190603003000_20190603003416_0500M_V0001.HDF',
- ]
- reader = load_reader(self.reader_configs)
- files = reader.select_files_from_pathnames(filenames)
- self.assertEqual(1, len(files))
- reader.create_filehandlers(files)
- # Make sure we have some files
- self.assertTrue(reader.file_handlers)
-
- # Verify that the resolution is only 500m
- available_datasets = reader.available_dataset_ids
- band_names = ['C' + '%02d' % ch for ch in np.linspace(2, 2, 1)]
+ from satpy.tests.utils import make_dsq
+ other_resolutions = RESOLUTION_LIST.copy()
+ other_resolutions.remove(resolution_to_test)
for band_name in band_names:
- for resolution in [1000, 2000, 4000]:
+ for resolution in other_resolutions:
ds_q = make_dsq(name=band_name, resolution=resolution)
with pytest.raises(KeyError):
- res = get_key(ds_q, available_datasets, num_results=0, best=False)
+ _ = get_key(ds_q, available_datasets, num_results=0, best=False)
- ds_q = make_dsq(name=band_name, resolution=500)
+ ds_q = make_dsq(name=band_name, resolution=resolution_to_test)
res = get_key(ds_q, available_datasets, num_results=0, best=False)
- self.assertEqual(2, len(res))
-
- res = reader.load(band_names)
- self.assertEqual(1, len(res))
- expected = np.array([[2.01, 2.02, 2.03, 2.04, 2.05], [2.06, 2.07, 2.08, 2.09, 2.1]])
-
- for band_name in band_names:
- self.assertEqual((2, 5), res[band_name].shape)
- self.assertEqual('reflectance', res[band_name].attrs['calibration'])
- self.assertEqual('%', res[band_name].attrs['units'])
- self.assertTrue(np.allclose(res[band_name].values, expected, equal_nan=True))
+ if band_name < 'C07':
+ assert len(res) == 2
+ else:
+ assert len(res) == 3
diff --git a/satpy/tests/reader_tests/test_ahi_hrit.py b/satpy/tests/reader_tests/test_ahi_hrit.py
index b399989e77..1681906785 100644
--- a/satpy/tests/reader_tests/test_ahi_hrit.py
+++ b/satpy/tests/reader_tests/test_ahi_hrit.py
@@ -17,24 +17,30 @@
# satpy. If not, see .
"""The hrit ahi reader tests package."""
-import numpy as np
-import dask.array as da
-from xarray import DataArray
import unittest
from unittest import mock
+import dask.array as da
+import numpy as np
+from xarray import DataArray
+
class TestHRITJMAFileHandler(unittest.TestCase):
"""Test the HRITJMAFileHandler."""
@mock.patch('satpy.readers.hrit_jma.HRITFileHandler.__init__')
- def _get_reader(self, mocked_init, mda, filename_info=None):
+ def _get_reader(self, mocked_init, mda, filename_info=None, filetype_info=None, reader_kwargs=None):
from satpy.readers.hrit_jma import HRITJMAFileHandler
if not filename_info:
filename_info = {}
+ if not filetype_info:
+ filetype_info = {}
+ if not reader_kwargs:
+ reader_kwargs = {}
HRITJMAFileHandler.filename = 'filename'
HRITJMAFileHandler.mda = mda
- return HRITJMAFileHandler('filename', filename_info, {})
+ HRITJMAFileHandler._start_time = filename_info.get('start_time')
+ return HRITJMAFileHandler('filename', filename_info, filetype_info, **reader_kwargs)
def _get_acq_time(self, nlines):
"""Get sample header entry for scanline acquisition times.
@@ -86,7 +92,7 @@ def _get_mda(self, loff=5500.0, coff=5500.0, nlines=11000, ncols=11000,
def test_init(self):
"""Test creating the file handler."""
- from satpy.readers.hrit_jma import UNKNOWN_AREA, HIMAWARI8
+ from satpy.readers.hrit_jma import HIMAWARI8, UNKNOWN_AREA
# Test addition of extra metadata
mda = self._get_mda()
@@ -136,8 +142,7 @@ def test_init(self):
@mock.patch('satpy.readers.hrit_jma.HRITJMAFileHandler.__init__')
def test_get_platform(self, mocked_init):
"""Test platform identification."""
- from satpy.readers.hrit_jma import HRITJMAFileHandler
- from satpy.readers.hrit_jma import PLATFORMS, UNKNOWN_PLATFORM
+ from satpy.readers.hrit_jma import PLATFORMS, UNKNOWN_PLATFORM, HRITJMAFileHandler
mocked_init.return_value = None
reader = HRITJMAFileHandler()
@@ -153,8 +158,7 @@ def test_get_platform(self, mocked_init):
def test_get_area_def(self):
"""Test getting an AreaDefinition."""
- from satpy.readers.hrit_jma import (FULL_DISK, NORTH_HEMIS, SOUTH_HEMIS,
- AREA_NAMES)
+ from satpy.readers.hrit_jma import AREA_NAMES, FULL_DISK, NORTH_HEMIS, SOUTH_HEMIS
cases = [
# Non-segmented, full disk
@@ -280,9 +284,6 @@ def test_get_dataset(self, base_get_dataset):
self.assertEqual(res.attrs['units'], '%')
self.assertEqual(res.attrs['sensor'], 'ahi')
self.assertEqual(res.attrs['platform_name'], HIMAWARI8)
- self.assertEqual(res.attrs['satellite_longitude'], 140.7)
- self.assertEqual(res.attrs['satellite_latitude'], 0.)
- self.assertEqual(res.attrs['satellite_altitude'], 35785831.0)
self.assertDictEqual(res.attrs['orbital_parameters'], {'projection_longitude': 140.7,
'projection_latitude': 0.,
'projection_altitude': 35785831.0})
@@ -322,3 +323,26 @@ def test_get_acq_time(self):
np.testing.assert_allclose(reader.acq_time.astype(np.int64),
acq_time_exp.astype(np.int64),
atol=45000)
+
+ def test_start_time_from_filename(self):
+ """Test that by default the datetime in the filename is returned."""
+ import datetime as dt
+ start_time = dt.datetime(2022, 1, 20, 12, 10)
+ for platform in ['Himawari-8', 'MTSAT-2']:
+ mda = self._get_mda(platform=platform)
+ reader = self._get_reader(
+ mda=mda,
+ filename_info={'start_time': start_time})
+ assert reader._start_time == start_time
+
+ def test_start_time_from_aqc_time(self):
+ """Test that by the datetime from the metadata returned when `use_acquisition_time_as_start_time=True`."""
+ import datetime as dt
+ start_time = dt.datetime(2022, 1, 20, 12, 10)
+ for platform in ['Himawari-8', 'MTSAT-2']:
+ mda = self._get_mda(platform=platform)
+ reader = self._get_reader(
+ mda=mda,
+ filename_info={'start_time': start_time},
+ reader_kwargs={'use_acquisition_time_as_start_time': True})
+ assert reader.start_time == reader.acq_time[0].astype(dt.datetime)
diff --git a/satpy/tests/reader_tests/test_ahi_hsd.py b/satpy/tests/reader_tests/test_ahi_hsd.py
index c3e91bfbe5..f916db5002 100644
--- a/satpy/tests/reader_tests/test_ahi_hsd.py
+++ b/satpy/tests/reader_tests/test_ahi_hsd.py
@@ -16,15 +16,86 @@
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
"""The ahi_hsd reader tests package."""
+from __future__ import annotations
+import contextlib
import unittest
-from unittest import mock
import warnings
-import numpy as np
-import dask.array as da
from datetime import datetime
+from typing import Any, Dict
+from unittest import mock
+
+import dask.array as da
+import numpy as np
+import pytest
+
from satpy.readers.ahi_hsd import AHIHSDFileHandler
from satpy.readers.utils import get_geostationary_mask
+from satpy.tests.utils import make_dataid
+
+InfoDict = Dict[str, Any]
+
+FAKE_BASIC_INFO: InfoDict = {
+ 'blocklength': 0,
+ 'satellite': 'Himawari-8',
+ 'observation_area': 'FLDK',
+ 'observation_start_time': 58413.12523839,
+ 'observation_end_time': 58413.12562439,
+ 'observation_timeline': '0300',
+}
+FAKE_DATA_INFO: InfoDict = {
+ 'blocklength': 50,
+ 'compression_flag_for_data': 0,
+ 'hblock_number': 2,
+ 'number_of_bits_per_pixel': 16,
+ 'number_of_columns': 11000,
+ 'number_of_lines': 1100,
+ 'spare': '',
+}
+FAKE_PROJ_INFO: InfoDict = {
+ 'CFAC': 40932549,
+ 'COFF': 5500.5,
+ 'LFAC': 40932549,
+ 'LOFF': 5500.5,
+ 'blocklength': 127,
+ 'coeff_for_sd': 1737122264.0,
+ 'distance_from_earth_center': 42164.0,
+ 'earth_equatorial_radius': 6378.137,
+ 'earth_polar_radius': 6356.7523,
+ 'hblock_number': 3,
+ 'req2_rpol2': 1.006739501,
+ 'req2_rpol2_req2': 0.0066943844,
+ 'resampling_size': 4,
+ 'resampling_types': 0,
+ 'rpol2_req2': 0.993305616,
+ 'spare': '',
+ 'sub_lon': 140.7,
+}
+FAKE_NAV_INFO: InfoDict = {
+ 'SSP_longitude': 140.65699999999998,
+ 'SSP_latitude': 0.0042985719753897015,
+ 'distance_earth_center_to_satellite': 42165.04,
+ 'nadir_longitude': 140.25253875463318,
+ 'nadir_latitude': 0.01674775121155575,
+}
+FAKE_CAL_INFO: InfoDict = {'blocklength': 0, 'band_number': [4]}
+FAKE_IRVISCAL_INFO: InfoDict = {}
+FAKE_INTERCAL_INFO: InfoDict = {'blocklength': 0}
+FAKE_SEGMENT_INFO: InfoDict = {'blocklength': 0}
+FAKE_NAVCORR_INFO: InfoDict = {'blocklength': 0, 'numof_correction_info_data': [1]}
+FAKE_NAVCORR_SUBINFO: InfoDict = {}
+FAKE_OBS_TIME_INFO: InfoDict = {'blocklength': 0, 'number_of_observation_times': [1]}
+FAKE_OBS_LINETIME_INFO: InfoDict = {}
+FAKE_ERROR_INFO: InfoDict = {'blocklength': 0, 'number_of_error_info_data': [1]}
+FAKE_ERROR_LINE_INFO: InfoDict = {}
+FAKE_SPARE_INFO: InfoDict = {'blocklength': 0}
+
+
+def _new_unzip(fname, prefix=''):
+ """Fake unzipping."""
+ if fname[-3:] == 'bz2':
+ return prefix + fname[:-4]
+ return fname
class TestAHIHSDNavigation(unittest.TestCase):
@@ -129,108 +200,43 @@ def test_segment(self, fromfile, np2str):
5500000.035542117, -2200000.0142168473))
-class TestAHIHSDFileHandler(unittest.TestCase):
- """Test case for the file reading."""
-
- def new_unzip(fname):
- """Fake unzipping."""
- if fname[-3:] == 'bz2':
- return fname[:-4]
- return fname
-
- @staticmethod
- def _create_fake_file_handler(in_fname, filename_info=None, filetype_info=None):
- if filename_info is None:
- filename_info = {'segment': 8, 'total_segments': 10}
- if filetype_info is None:
- filetype_info = {'file_type': 'hsd_b01'}
- fh = AHIHSDFileHandler(in_fname, filename_info, filetype_info)
-
- # Check that the filename is altered for bz2 format files
- assert in_fname != fh.filename
-
- fh.proj_info = {
- 'CFAC': 40932549,
- 'COFF': 5500.5,
- 'LFAC': 40932549,
- 'LOFF': 5500.5,
- 'blocklength': 127,
- 'coeff_for_sd': 1737122264.0,
- 'distance_from_earth_center': 42164.0,
- 'earth_equatorial_radius': 6378.137,
- 'earth_polar_radius': 6356.7523,
- 'hblock_number': 3,
- 'req2_rpol2': 1.006739501,
- 'req2_rpol2_req2': 0.0066943844,
- 'resampling_size': 4,
- 'resampling_types': 0,
- 'rpol2_req2': 0.993305616,
- 'spare': '',
- 'sub_lon': 140.7
- }
- fh.nav_info = {
- 'SSP_longitude': 140.66,
- 'SSP_latitude': 0.03,
- 'distance_earth_center_to_satellite': 42165.04,
- 'nadir_longitude': 140.67,
- 'nadir_latitude': 0.04
- }
- fh.data_info = {
- 'blocklength': 50,
- 'compression_flag_for_data': 0,
- 'hblock_number': 2,
- 'number_of_bits_per_pixel': 16,
- 'number_of_columns': 11000,
- 'number_of_lines': 1100,
- 'spare': ''
- }
- fh.basic_info = {
- 'observation_area': np.array(['FLDK']),
- 'observation_start_time': np.array([58413.12523839]),
- 'observation_end_time': np.array([58413.12562439]),
- 'observation_timeline': np.array([300]),
- }
- fh.observation_area = fh.basic_info['observation_area']
- return fh
-
- @mock.patch('satpy.readers.ahi_hsd.np2str')
- @mock.patch('satpy.readers.ahi_hsd.np.fromfile')
- @mock.patch('satpy.readers.ahi_hsd.unzip_file',
- mock.MagicMock(side_effect=new_unzip))
- def setUp(self, fromfile, np2str):
- """Create a test file handler."""
- np2str.side_effect = lambda x: x
- m = mock.mock_open()
- with mock.patch('satpy.readers.ahi_hsd.open', m, create=True):
- # Check if file handler raises exception for invalid calibration mode
- with self.assertRaises(ValueError):
- AHIHSDFileHandler('somefile',
- {'segment': 8, 'total_segments': 10},
- filetype_info={'file_type': 'hsd_b01'},
- calib_mode='BAD_MODE')
- in_fname = 'test_file.bz2'
- self.fh = self._create_fake_file_handler(in_fname)
+class TestAHIHSDFileHandler:
+ """Tests for the AHI HSD file handler."""
+
+ def test_bad_calibration(self):
+ """Test that a bad calibration mode causes an exception."""
+ with pytest.raises(ValueError):
+ with _fake_hsd_handler(fh_kwargs={"calib_mode": "BAD_MODE"}):
+ pass
+
+ @pytest.mark.parametrize(
+ ("round_actual_position", "expected_result"),
+ [
+ (False, (140.65699999999998, 0.0042985719753897015, 35786903.00011936)),
+ (True, (140.657, 0.0, 35786850.0))
+ ]
+ )
+ def test_actual_satellite_position(self, round_actual_position, expected_result):
+ """Test that rounding of the actual satellite position can be controlled."""
+ with _fake_hsd_handler(fh_kwargs={"round_actual_position": round_actual_position}) as fh:
+ ds_id = make_dataid(name="B01")
+ ds_info = {
+ "units": "%",
+ "standard_name": "some_name",
+ "wavelength": (0.1, 0.2, 0.3),
+ }
+ metadata = fh._get_metadata(ds_id, ds_info)
+ orb_params = metadata["orbital_parameters"]
+ assert orb_params["satellite_actual_longitude"] == expected_result[0]
+ assert orb_params["satellite_actual_latitude"] == expected_result[1]
+ assert orb_params["satellite_actual_altitude"] == expected_result[2]
- def test_time_properties(self):
- """Test start/end/scheduled time properties."""
- self.assertEqual(self.fh.start_time, datetime(2018, 10, 22, 3, 0, 20, 596896))
- self.assertEqual(self.fh.end_time, datetime(2018, 10, 22, 3, 0, 53, 947296))
- self.assertEqual(self.fh.scheduled_time, datetime(2018, 10, 22, 3, 0, 0, 0))
+ @mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._check_fpos')
+ def test_read_header(self, *mocks):
+ """Test header reading."""
+ with _fake_hsd_handler() as fh:
+ fh._read_header(mock.MagicMock())
- def test_scanning_frequencies(self):
- """Test scanning frequencies."""
- self.fh.observation_area = 'JP04'
- self.assertEqual(self.fh.scheduled_time, datetime(2018, 10, 22, 3, 7, 30, 0))
- self.fh.observation_area = 'R304'
- self.assertEqual(self.fh.scheduled_time, datetime(2018, 10, 22, 3, 7, 30, 0))
- self.fh.observation_area = 'R420'
- self.assertEqual(self.fh.scheduled_time, datetime(2018, 10, 22, 3, 9, 30, 0))
- self.fh.observation_area = 'R520'
- self.assertEqual(self.fh.scheduled_time, datetime(2018, 10, 22, 3, 9, 30, 0))
- self.fh.observation_area = 'FLDK'
- self.assertEqual(self.fh.scheduled_time, datetime(2018, 10, 22, 3, 0, 0, 0))
-
- @mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._read_header')
@mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._read_data')
@mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._mask_invalid')
@mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler.calibrate')
@@ -238,36 +244,48 @@ def test_read_band(self, calibrate, *mocks):
"""Test masking of space pixels."""
nrows = 25
ncols = 100
- self.fh.data_info['number_of_columns'] = ncols
- self.fh.data_info['number_of_lines'] = nrows
calibrate.return_value = np.ones((nrows, ncols))
- m = mock.mock_open()
- with mock.patch('satpy.readers.ahi_hsd.open', m, create=True):
- im = self.fh.read_band(info=mock.MagicMock(), key=mock.MagicMock())
+ with _fake_hsd_handler() as fh:
+ fh.data_info['number_of_columns'] = ncols
+ fh.data_info['number_of_lines'] = nrows
+ im = fh.read_band(mock.MagicMock(), mock.MagicMock())
# Note: Within the earth's shape get_geostationary_mask() is True but the numpy.ma mask
# is False
mask = im.to_masked_array().mask
- ref_mask = np.logical_not(get_geostationary_mask(self.fh.area).compute())
- self.assertTrue(np.all(mask == ref_mask))
+ ref_mask = np.logical_not(get_geostationary_mask(fh.area).compute())
+ np.testing.assert_equal(mask, ref_mask)
# Test attributes
orb_params_exp = {'projection_longitude': 140.7,
'projection_latitude': 0.,
'projection_altitude': 35785863.0,
- 'satellite_actual_longitude': 140.66,
- 'satellite_actual_latitude': 0.03,
- 'nadir_longitude': 140.67,
- 'nadir_latitude': 0.04}
- self.assertTrue(set(orb_params_exp.items()).issubset(set(im.attrs['orbital_parameters'].items())))
- self.assertTrue(np.isclose(im.attrs['orbital_parameters']['satellite_actual_altitude'], 35786903.00581372))
+ 'satellite_actual_longitude': 140.657,
+ 'satellite_actual_latitude': 0.0,
+ 'satellite_actual_altitude': 35786850,
+ 'nadir_longitude': 140.252539,
+ 'nadir_latitude': 0.01674775}
+ actual_obs_params = im.attrs['orbital_parameters']
+ for key, value in orb_params_exp.items():
+ assert key in actual_obs_params
+ np.testing.assert_allclose(value, actual_obs_params[key])
+
+ time_params_exp = {
+ 'nominal_start_time': datetime(2018, 10, 22, 3, 0, 0, 0),
+ 'nominal_end_time': datetime(2018, 10, 22, 3, 0, 0, 0),
+ 'observation_start_time': datetime(2018, 10, 22, 3, 0, 20, 596896),
+ 'observation_end_time': datetime(2018, 10, 22, 3, 0, 53, 947296),
+ }
+ actual_time_params = im.attrs['time_parameters']
+ for key, value in time_params_exp.items():
+ assert key in actual_time_params
+ assert value == actual_time_params[key]
# Test if masking space pixels disables with appropriate flag
- self.fh.mask_space = False
+ fh.mask_space = False
with mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._mask_space') as mask_space:
- self.fh.read_band(info=mock.MagicMock(), key=mock.MagicMock())
+ fh.read_band(mock.MagicMock(), mock.MagicMock())
mask_space.assert_not_called()
- @mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._read_header')
@mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._read_data')
@mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._mask_invalid')
@mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler.calibrate')
@@ -277,62 +295,86 @@ def test_scene_loading(self, calibrate, *mocks):
nrows = 25
ncols = 100
calibrate.return_value = np.ones((nrows, ncols))
- m = mock.mock_open()
- with mock.patch('satpy.readers.ahi_hsd.open', m, create=True), \
- mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler') as fh_cls:
- fh_cls.return_value = self.fh
- self.fh.filename_info['total_segments'] = 1
- self.fh.filename_info['segment'] = 1
- self.fh.data_info['number_of_columns'] = ncols
- self.fh.data_info['number_of_lines'] = nrows
- scn = Scene(reader='ahi_hsd', filenames=['HS_H08_20210225_0700_B07_FLDK_R20_S0110.DAT'])
- scn.load(['B07'])
- im = scn['B07']
-
- # Make sure space masking worked
- mask = im.to_masked_array().mask
- ref_mask = np.logical_not(get_geostationary_mask(self.fh.area).compute())
- self.assertTrue(np.all(mask == ref_mask))
+ with _fake_hsd_handler() as fh:
+ with mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler') as fh_cls:
+ fh_cls.return_value = fh
+ fh.filename_info['total_segments'] = 1
+ fh.filename_info['segment'] = 1
+ fh.data_info['number_of_columns'] = ncols
+ fh.data_info['number_of_lines'] = nrows
+ scn = Scene(reader='ahi_hsd', filenames=['HS_H08_20210225_0700_B07_FLDK_R20_S0110.DAT'])
+ scn.load(['B07'])
+ im = scn['B07']
+
+ # Make sure space masking worked
+ mask = im.to_masked_array().mask
+ ref_mask = np.logical_not(get_geostationary_mask(fh.area).compute())
+ np.testing.assert_equal(mask, ref_mask)
+
+ def test_time_properties(self):
+ """Test start/end/scheduled time properties."""
+ with _fake_hsd_handler() as fh:
+ assert fh.start_time == datetime(2018, 10, 22, 3, 0)
+ assert fh.end_time == datetime(2018, 10, 22, 3, 0)
+ assert fh.observation_start_time == datetime(2018, 10, 22, 3, 0, 20, 596896)
+ assert fh.observation_end_time == datetime(2018, 10, 22, 3, 0, 53, 947296)
+ assert fh.nominal_start_time == datetime(2018, 10, 22, 3, 0, 0, 0)
+ assert fh.nominal_end_time == datetime(2018, 10, 22, 3, 0, 0, 0)
+
+ def test_scanning_frequencies(self):
+ """Test scanning frequencies."""
+ with _fake_hsd_handler() as fh:
+ fh.observation_area = 'JP04'
+ assert fh.nominal_start_time == datetime(2018, 10, 22, 3, 7, 30, 0)
+ assert fh.nominal_end_time == datetime(2018, 10, 22, 3, 7, 30, 0)
+ fh.observation_area = 'R304'
+ assert fh.nominal_start_time == datetime(2018, 10, 22, 3, 7, 30, 0)
+ assert fh.nominal_end_time == datetime(2018, 10, 22, 3, 7, 30, 0)
+ fh.observation_area = 'R420'
+ assert fh.nominal_start_time == datetime(2018, 10, 22, 3, 9, 30, 0)
+ assert fh.nominal_end_time == datetime(2018, 10, 22, 3, 9, 30, 0)
+ fh.observation_area = 'R520'
+ assert fh.nominal_start_time == datetime(2018, 10, 22, 3, 9, 30, 0)
+ assert fh.nominal_end_time == datetime(2018, 10, 22, 3, 9, 30, 0)
+ fh.observation_area = 'FLDK'
+ assert fh.nominal_start_time == datetime(2018, 10, 22, 3, 0, 0, 0)
+ assert fh.nominal_end_time == datetime(2018, 10, 22, 3, 0, 0, 0)
def test_blocklen_error(self, *mocks):
"""Test erraneous blocklength."""
open_name = '%s.open' % __name__
fpos = 50
- with mock.patch(open_name, create=True) as mock_open:
- with mock_open(mock.MagicMock(), 'r') as fp_:
- # Expected and actual blocklength match
- fp_.tell.return_value = 50
- with warnings.catch_warnings(record=True) as w:
- self.fh._check_fpos(fp_, fpos, 0, 'header 1')
- self.assertTrue(len(w) == 0)
-
- # Expected and actual blocklength do not match
- fp_.tell.return_value = 100
- with warnings.catch_warnings(record=True) as w:
- self.fh._check_fpos(fp_, fpos, 0, 'header 1')
- self.assertTrue(len(w) > 0)
-
- @mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._check_fpos')
- def test_read_header(self, *mocks):
- """Test header reading."""
- nhdr = [
- {'blocklength': 0},
- {'blocklength': 0},
- {'blocklength': 0},
- {'blocklength': 0},
- {'blocklength': 0, 'band_number': [4]},
- {'blocklength': 0},
- {'blocklength': 0},
- {'blocklength': 0},
- {'blocklength': 0, 'numof_correction_info_data': [1]},
- {'blocklength': 0},
- {'blocklength': 0, 'number_of_observation_times': [1]},
- {'blocklength': 0},
- {'blocklength': 0, 'number_of_error_info_data': [1]},
- {'blocklength': 0},
- {'blocklength': 0}]
- with mock.patch('numpy.fromfile', side_effect=nhdr):
- self.fh._read_header(mock.MagicMock())
+ with _fake_hsd_handler() as fh, \
+ mock.patch(open_name, create=True) as mock_open, \
+ mock_open(mock.MagicMock(), 'r') as fp_:
+ # Expected and actual blocklength match
+ fp_.tell.return_value = 50
+ with warnings.catch_warnings(record=True) as w:
+ fh._check_fpos(fp_, fpos, 0, 'header 1')
+ assert len(w) == 0
+
+ # Expected and actual blocklength do not match
+ fp_.tell.return_value = 100
+ with warnings.catch_warnings(record=True) as w:
+ fh._check_fpos(fp_, fpos, 0, 'header 1')
+ assert len(w) > 0
+
+ def test_is_valid_time(self):
+ """Test that valid times are correctly identified."""
+ assert AHIHSDFileHandler._is_valid_timeline(FAKE_BASIC_INFO['observation_timeline'])
+ assert not AHIHSDFileHandler._is_valid_timeline('65526')
+
+ def test_time_rounding(self):
+ """Test rounding of the nominal time."""
+ mocker = mock.MagicMock()
+ in_date = datetime(2020, 1, 1, 12, 0, 0)
+
+ with mock.patch('satpy.readers.ahi_hsd.AHIHSDFileHandler._is_valid_timeline', mocker):
+ with _fake_hsd_handler() as fh:
+ mocker.return_value = True
+ assert fh._modify_observation_time_for_nominal(in_date) == datetime(2020, 1, 1, 3, 0, 0)
+ mocker.return_value = False
+ assert fh._modify_observation_time_for_nominal(in_date) == datetime(2020, 1, 1, 12, 0, 0)
class TestAHICalibration(unittest.TestCase):
@@ -379,7 +421,7 @@ def test_default_calibrate(self, *mocks):
# Radiance
rad_exp = np.array([[15.2, 11.5],
- [7.8, 0]])
+ [7.8, -3.3]])
rad = self.fh.calibrate(data=self.counts,
calibration='radiance')
self.assertTrue(np.allclose(rad, rad_exp))
@@ -403,7 +445,7 @@ def test_updated_calibrate(self):
# Standard operation
self.fh.calib_mode = 'UPDATE'
rad_exp = np.array([[30.4, 23.0],
- [15.6, 0.]])
+ [15.6, -6.6]])
rad = self.fh.calibrate(data=self.counts, calibration='radiance')
self.assertTrue(np.allclose(rad, rad_exp))
@@ -425,7 +467,7 @@ def test_updated_calibrate(self):
}
rad = self.fh.calibrate(data=self.counts, calibration='radiance')
rad_exp = np.array([[15.2, 11.5],
- [7.8, 0]])
+ [7.8, -3.3]])
self.assertTrue(np.allclose(rad, rad_exp))
def test_user_calibration(self):
@@ -436,7 +478,7 @@ def test_user_calibration(self):
self.fh.band_name = 'B13'
rad = self.fh.calibrate(data=self.counts, calibration='radiance').compute()
rad_exp = np.array([[16.10526316, 12.21052632],
- [8.31578947, 0.10526316]])
+ [8.31578947, -3.36842105]])
self.assertTrue(np.allclose(rad, rad_exp))
# This is for DN calibration
@@ -445,7 +487,78 @@ def test_user_calibration(self):
'type': 'DN'}
self.fh.band_name = 'B13'
rad = self.fh.calibrate(data=self.counts, calibration='radiance').compute()
- print(rad)
rad_exp = np.array([[15.2, 12.],
- [8.8, 0.]])
+ [8.8, -0.8]])
self.assertTrue(np.allclose(rad, rad_exp))
+
+
+@contextlib.contextmanager
+def _fake_hsd_handler(fh_kwargs=None):
+ """Create a test file handler."""
+ m = mock.mock_open()
+ with mock.patch('satpy.readers.ahi_hsd.np.fromfile', _custom_fromfile), \
+ mock.patch('satpy.readers.ahi_hsd.unzip_file', mock.MagicMock(side_effect=_new_unzip)), \
+ mock.patch('satpy.readers.ahi_hsd.open', m, create=True):
+ in_fname = 'test_file.bz2'
+ fh = _create_fake_file_handler(in_fname, fh_kwargs=fh_kwargs)
+ yield fh
+
+
+def _custom_fromfile(*args, **kwargs):
+ from satpy.readers.ahi_hsd import (
+ _BASIC_INFO_TYPE,
+ _CAL_INFO_TYPE,
+ _DATA_INFO_TYPE,
+ _ERROR_INFO_TYPE,
+ _ERROR_LINE_INFO_TYPE,
+ _INTER_CALIBRATION_INFO_TYPE,
+ _IRCAL_INFO_TYPE,
+ _NAV_INFO_TYPE,
+ _NAVIGATION_CORRECTION_INFO_TYPE,
+ _NAVIGATION_CORRECTION_SUBINFO_TYPE,
+ _OBSERVATION_LINE_TIME_INFO_TYPE,
+ _OBSERVATION_TIME_INFO_TYPE,
+ _PROJ_INFO_TYPE,
+ _SEGMENT_INFO_TYPE,
+ _SPARE_TYPE,
+ _VISCAL_INFO_TYPE,
+ )
+ dtype = kwargs.get("dtype")
+ fake_info_map = {
+ _BASIC_INFO_TYPE: FAKE_BASIC_INFO,
+ _DATA_INFO_TYPE: FAKE_DATA_INFO,
+ _NAV_INFO_TYPE: FAKE_NAV_INFO,
+ _PROJ_INFO_TYPE: FAKE_PROJ_INFO,
+ _CAL_INFO_TYPE: FAKE_CAL_INFO,
+ _VISCAL_INFO_TYPE: FAKE_IRVISCAL_INFO,
+ _IRCAL_INFO_TYPE: FAKE_IRVISCAL_INFO,
+ _INTER_CALIBRATION_INFO_TYPE: FAKE_INTERCAL_INFO,
+ _SEGMENT_INFO_TYPE: FAKE_SEGMENT_INFO,
+ _NAVIGATION_CORRECTION_INFO_TYPE: FAKE_NAVCORR_INFO,
+ _NAVIGATION_CORRECTION_SUBINFO_TYPE: FAKE_NAVCORR_SUBINFO,
+ _OBSERVATION_TIME_INFO_TYPE: FAKE_OBS_TIME_INFO,
+ _OBSERVATION_LINE_TIME_INFO_TYPE: FAKE_OBS_LINETIME_INFO,
+ _ERROR_INFO_TYPE: FAKE_ERROR_INFO,
+ _ERROR_LINE_INFO_TYPE: FAKE_ERROR_LINE_INFO,
+ _SPARE_TYPE: FAKE_SPARE_INFO,
+ }
+ info_dict = fake_info_map[dtype]
+ fake_arr = np.zeros((1,), dtype=dtype)
+ for key, value in info_dict.items():
+ fake_arr[key] = value
+ return fake_arr
+
+
+def _create_fake_file_handler(in_fname, filename_info=None, filetype_info=None, fh_kwargs=None):
+ if filename_info is None:
+ filename_info = {'segment': 8, 'total_segments': 10}
+ if filetype_info is None:
+ filetype_info = {'file_type': 'hsd_b01'}
+ if fh_kwargs is None:
+ fh_kwargs = {}
+ fh = AHIHSDFileHandler(in_fname, filename_info, filetype_info, **fh_kwargs)
+
+ # Check that the filename is altered and 2 digit segment prefix added for bz2 format files
+ assert in_fname != fh.filename
+ assert str(filename_info['segment']).zfill(2) == fh.filename[0:2]
+ return fh
diff --git a/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py b/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py
index ff5f89a038..e4ef6ec72f 100644
--- a/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py
+++ b/satpy/tests/reader_tests/test_ahi_l1b_gridded_bin.py
@@ -17,15 +17,17 @@
# satpy. If not, see .
"""The ahi_l1b_gridded_bin reader tests package."""
+import os
+import shutil
+import tempfile
import unittest
from unittest import mock
-import numpy as np
+
import dask.array as da
+import numpy as np
from pyresample.geometry import AreaDefinition
-from satpy.readers.ahi_l1b_gridded_bin import AHIGriddedFileHandler, AHI_LUT_NAMES
-import os
-import shutil
-import tempfile
+
+from satpy.readers.ahi_l1b_gridded_bin import AHI_LUT_NAMES, AHIGriddedFileHandler
class TestAHIGriddedArea(unittest.TestCase):
@@ -155,8 +157,6 @@ def new_unzip(fname):
"""Fake unzipping."""
if fname[-3:] == 'bz2':
return fname[:-4]
- else:
- return fname
@mock.patch('satpy.readers.ahi_l1b_gridded_bin.unzip_file',
mock.MagicMock(side_effect=new_unzip))
@@ -209,22 +209,29 @@ def test_get_dataset(self, mocked_read):
self.assertEqual(res.attrs['name'], self.key['name'])
self.assertEqual(res.attrs['wavelength'], self.info['wavelength'])
+ @mock.patch('os.path.exists', return_value=True)
+ @mock.patch('os.remove')
+ def test_destructor(self, exist_patch, remove_patch):
+ """Check that file handler deletes files if needed."""
+ del self.fh
+ remove_patch.assert_called()
+
class TestAHIGriddedLUTs(unittest.TestCase):
"""Test case for the downloading and preparing LUTs."""
def mocked_ftp_dl(fname):
"""Fake download of LUT tar file by creating a local tar."""
- import tempfile
- import tarfile
import os
+ import tarfile
+ import tempfile
with tarfile.open(fname, "w:gz") as tar_handle:
for namer in AHI_LUT_NAMES:
tmpf = os.path.join(tempfile.tempdir, namer)
with open(tmpf, 'w') as tmp_fid:
tmp_fid.write("TEST\n")
- tar_handle.add(tmpf, arcname='count2tbb/'+namer)
+ tar_handle.add(tmpf, arcname='count2tbb_v102/'+namer)
os.remove(tmpf)
def setUp(self):
@@ -262,14 +269,16 @@ def test_get_luts(self):
tempdir = tempfile.gettempdir()
print(self.fh.lut_dir)
self.fh._get_luts()
- self.assertFalse(os.path.exists(os.path.join(tempdir, 'count2tbb/')))
+ self.assertFalse(os.path.exists(os.path.join(tempdir, 'count2tbb_v102/')))
for lut_name in AHI_LUT_NAMES:
self.assertTrue(os.path.isfile(os.path.join(self.fh.lut_dir, lut_name)))
- @mock.patch('ftplib.FTP')
- def test_download_luts(self, mock_ftp):
+ @mock.patch('urllib.request.urlopen')
+ @mock.patch('shutil.copyfileobj')
+ def test_download_luts(self, mock_dl, mock_shutil):
"""Test that the FTP library is called for downloading LUTS."""
m = mock.mock_open()
with mock.patch('satpy.readers.ahi_l1b_gridded_bin.open', m, create=True):
self.fh._download_luts('/test_file')
- mock_ftp.assert_called()
+ mock_dl.assert_called()
+ mock_shutil.assert_called()
diff --git a/satpy/tests/reader_tests/test_ami_l1b.py b/satpy/tests/reader_tests/test_ami_l1b.py
index efe33e5165..50f6f2af03 100644
--- a/satpy/tests/reader_tests/test_ami_l1b.py
+++ b/satpy/tests/reader_tests/test_ami_l1b.py
@@ -17,13 +17,13 @@
# satpy. If not, see .
"""The ami_l1b reader tests package."""
-import numpy as np
-import xarray as xr
-import dask.array as da
-
import unittest
from unittest import mock
+import dask.array as da
+import numpy as np
+import xarray as xr
+
class FakeDataset(object):
"""Mimic xarray Dataset object."""
diff --git a/satpy/tests/reader_tests/test_amsr2_l1b.py b/satpy/tests/reader_tests/test_amsr2_l1b.py
index 6e61e4a155..f3e9de538f 100644
--- a/satpy/tests/reader_tests/test_amsr2_l1b.py
+++ b/satpy/tests/reader_tests/test_amsr2_l1b.py
@@ -18,13 +18,14 @@
"""Module for testing the satpy.readers.amsr2_l1b module."""
import os
+import unittest
+from unittest import mock
+
import numpy as np
+
from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler
from satpy.tests.utils import convert_file_content_to_data_array
-import unittest
-from unittest import mock
-
DEFAULT_FILE_DTYPE = np.uint16
DEFAULT_FILE_SHAPE = (10, 300)
DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1],
@@ -161,6 +162,8 @@ def test_load_basic(self):
(DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1] // 2))
self.assertTupleEqual(d.attrs['area'].lats.shape,
(DEFAULT_FILE_SHAPE[0], DEFAULT_FILE_SHAPE[1] // 2))
+ assert d.attrs['sensor'] == 'amsr2'
+ assert d.attrs['platform_name'] == 'GCOM-W1'
def test_load_89ghz(self):
"""Test loading of 89GHz channels."""
diff --git a/satpy/tests/reader_tests/test_amsr2_l2.py b/satpy/tests/reader_tests/test_amsr2_l2.py
index 49a39e229d..711754c989 100644
--- a/satpy/tests/reader_tests/test_amsr2_l2.py
+++ b/satpy/tests/reader_tests/test_amsr2_l2.py
@@ -19,11 +19,12 @@
import os
import unittest
-import numpy as np
from unittest import mock
-from satpy.tests.utils import convert_file_content_to_data_array
-from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler
+import numpy as np
+
+from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler
+from satpy.tests.utils import convert_file_content_to_data_array
DEFAULT_FILE_DTYPE = np.uint16
DEFAULT_FILE_SHAPE = (10, 30)
@@ -75,8 +76,8 @@ class TestAMSR2L2Reader(unittest.TestCase):
def setUp(self):
"""Wrap HDF5 file handler with our own fake handler."""
from satpy._config import config_search_paths
- from satpy.readers.amsr2_l2 import AMSR2L2FileHandler
from satpy.readers.amsr2_l1b import AMSR2L1BFileHandler
+ from satpy.readers.amsr2_l2 import AMSR2L2FileHandler
self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file))
# http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library
self.p = mock.patch.object(AMSR2L2FileHandler, '__bases__', (FakeHDF5FileHandler2,
diff --git a/satpy/tests/reader_tests/test_amsr2_l2_gaasp.py b/satpy/tests/reader_tests/test_amsr2_l2_gaasp.py
index 8fed2f315e..ac271e7206 100644
--- a/satpy/tests/reader_tests/test_amsr2_l2_gaasp.py
+++ b/satpy/tests/reader_tests/test_amsr2_l2_gaasp.py
@@ -18,12 +18,13 @@
"""Tests for the 'amsr2_l2_gaasp' reader."""
import os
-from unittest import mock
from datetime import datetime
-import pytest
-import xarray as xr
+from unittest import mock
+
import dask.array as da
import numpy as np
+import pytest
+import xarray as xr
MBT_FILENAME = "AMSR2-MBT_v2r2_GW1_s202008120558310_e202008120607010_c202008120637340.nc"
PRECIP_FILENAME = "AMSR2-PRECIP_v2r2_GW1_s202008120558310_e202008120607010_c202008120637340.nc"
diff --git a/satpy/tests/reader_tests/test_ascat_l2_soilmoisture_bufr.py b/satpy/tests/reader_tests/test_ascat_l2_soilmoisture_bufr.py
index 730a2dafe4..a65d0638f5 100644
--- a/satpy/tests/reader_tests/test_ascat_l2_soilmoisture_bufr.py
+++ b/satpy/tests/reader_tests/test_ascat_l2_soilmoisture_bufr.py
@@ -21,6 +21,7 @@
import sys
import unittest
from datetime import datetime
+
import numpy as np
# TDB: this test is based on test_seviri_l2_bufr.py and test_iasi_l2.py
@@ -128,6 +129,7 @@ class TesitAscatL2SoilmoistureBufr(unittest.TestCase):
def setUp(self):
"""Create temporary file to perform tests with."""
import tempfile
+
from satpy.readers.ascat_l2_soilmoisture_bufr import AscatSoilMoistureBufr
self.base_dir = tempfile.mkdtemp()
self.fname = save_test_data(self.base_dir)
@@ -149,12 +151,9 @@ def test_scene(self):
from satpy import Scene
fname = os.path.join(self.base_dir, FILENAME)
scn = Scene(reader='ascat_l2_soilmoisture_bufr', filenames=[fname])
- self.assertTrue('start_time' in scn.attrs)
- self.assertTrue('end_time' in scn.attrs)
- self.assertTrue('sensor' in scn.attrs)
- self.assertTrue('scatterometer' in scn.attrs['sensor'])
- self.assertTrue(datetime(2020, 12, 21, 9, 33, 0) == scn.attrs['start_time'])
- self.assertTrue(datetime(2020, 12, 21, 9, 33, 59) == scn.attrs['end_time'])
+ self.assertTrue('scatterometer' in scn.sensor_names)
+ self.assertTrue(datetime(2020, 12, 21, 9, 33, 0) == scn.start_time)
+ self.assertTrue(datetime(2020, 12, 21, 9, 33, 59) == scn.end_time)
@unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows")
def test_scene_load_available_datasets(self):
diff --git a/satpy/tests/reader_tests/test_atms_l1b_nc.py b/satpy/tests/reader_tests/test_atms_l1b_nc.py
new file mode 100644
index 0000000000..111991cb2d
--- /dev/null
+++ b/satpy/tests/reader_tests/test_atms_l1b_nc.py
@@ -0,0 +1,173 @@
+# Copyright (c) 2022 Satpy developers
+#
+# satpy is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# satpy is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with satpy. If not, see .
+"""The atms_l1b_nc reader tests package."""
+
+from datetime import datetime
+
+import numpy as np
+import pytest
+import xarray as xr
+
+from satpy.readers.atms_l1b_nc import AtmsL1bNCFileHandler
+
+
+@pytest.fixture
+def reader(l1b_file):
+ """Return reader of ATMS level1b data."""
+ return AtmsL1bNCFileHandler(
+ filename=l1b_file,
+ filename_info={"creation_time": datetime(2020, 1, 2, 3, 4, 5)},
+ filetype_info={"antenna_temperature": "antenna_temp"},
+ )
+
+
+@pytest.fixture
+def l1b_file(tmp_path, atms_fake_dataset):
+ """Return file path to level1b file."""
+ l1b_file_path = tmp_path / "test_file_atms_l1b.nc"
+ atms_fake_dataset.to_netcdf(l1b_file_path)
+ yield l1b_file_path
+
+
+@pytest.fixture
+def atms_fake_dataset():
+ """Return fake ATMS dataset."""
+ atrack = 2
+ xtrack = 3
+ channel = 22
+ lon = np.full((atrack, xtrack), 1.)
+ lat = np.full((atrack, xtrack), 2.)
+ sat_azi = np.full((atrack, xtrack), 3.)
+ antenna_temp = np.zeros((atrack, xtrack, channel))
+ for idx in range(channel):
+ antenna_temp[:, :, idx] = 100 + float(idx)
+ return xr.Dataset(
+ data_vars={
+ "antenna_temp": (("atrack", "xtrack", "channel"), antenna_temp),
+ "lon": (("atrack", "xtrack"), lon),
+ "lat": (("atrack", "xtrack"), lat),
+ "sat_azi": (("atrack", "xtrack"), sat_azi),
+ },
+ attrs={
+ "time_coverage_start": "2000-01-02T03:04:05Z",
+ "time_coverage_end": "2000-01-02T04:05:06Z",
+ "platform": "JPSS-1",
+ "instrument": "ATMS",
+ },
+ )
+
+
+class TestAtsmsL1bNCFileHandler:
+ """Test the AtmsL1bNCFileHandler reader."""
+
+ def test_start_time(self, reader):
+ """Test start time."""
+ assert reader.start_time == datetime(2000, 1, 2, 3, 4, 5)
+
+ def test_end_time(self, reader):
+ """Test end time."""
+ assert reader.end_time == datetime(2000, 1, 2, 4, 5, 6)
+
+ def test_sensor(self, reader):
+ """Test sensor."""
+ assert reader.sensor == "ATMS"
+
+ def test_platform_name(self, reader):
+ """Test platform name."""
+ assert reader.platform_name == "JPSS-1"
+
+ def test_antenna_temperature(self, reader, atms_fake_dataset):
+ """Test antenna temperature."""
+ np.testing.assert_array_equal(
+ reader.antenna_temperature,
+ atms_fake_dataset.antenna_temp.values,
+ )
+
+ @pytest.mark.parametrize("param,expect", (
+ ("start_time", datetime(2000, 1, 2, 3, 4, 5)),
+ ("end_time", datetime(2000, 1, 2, 4, 5, 6)),
+ ("platform_name", "JPSS-1"),
+ ("sensor", "ATMS"),
+ ))
+ def test_attrs(self, reader, param, expect):
+ """Test attributes."""
+ assert reader.attrs[param] == expect
+
+ @pytest.mark.parametrize("dims", (
+ ("xtrack", "atrack"),
+ ("x", "y"),
+ ))
+ def test_standardize_dims(self, reader, dims):
+ """Test standardize dims."""
+ data = xr.DataArray(
+ np.arange(6).reshape(2, 3),
+ dims=dims,
+ )
+ standardized = reader._standardize_dims(data)
+ assert standardized.dims == ("y", "x")
+
+ def test_drop_coords(self, reader):
+ """Test drop coordinates."""
+ coords = "dummy"
+ data = xr.DataArray(
+ np.ones(10),
+ dims=("y"),
+ coords={coords: 0},
+ )
+ assert coords in data.coords
+ data = reader._drop_coords(data)
+ assert coords not in data.coords
+
+ @pytest.mark.parametrize("param,expect", (
+ ("start_time", datetime(2000, 1, 2, 3, 4, 5)),
+ ("end_time", datetime(2000, 1, 2, 4, 5, 6)),
+ ("platform_name", "JPSS-1"),
+ ("sensor", "ATMS"),
+ ("creation_time", datetime(2020, 1, 2, 3, 4, 5)),
+ ("type", "test_data"),
+ ("name", "test"),
+ ))
+ def test_merge_attributes(self, reader, param, expect):
+ """Test merge attributes."""
+ data = xr.DataArray(
+ np.ones(10),
+ dims=("y"),
+ attrs={"type": "test_data"},
+ )
+ dataset_info = {"name": "test"}
+ data = reader._merge_attributes(data, dataset_info)
+ assert data.attrs[param] == expect
+
+ @pytest.mark.parametrize("param,expect", (
+ ("1", 100.),
+ ("sat_azi", 3.),
+ ))
+ def test_select_dataset(self, reader, param, expect):
+ """Test select dataset."""
+ np.testing.assert_array_equal(
+ reader._select_dataset(param),
+ np.full((2, 3), expect),
+ )
+
+ def test_get_dataset(self, reader):
+ """Test get dataset."""
+ dataset_id = {"name": "1"}
+ dataset = reader.get_dataset(dataset_id, {})
+ np.testing.assert_array_equal(
+ dataset,
+ np.full((2, 3), 100.),
+ )
+ assert dataset.dims == ("y", "x")
+ assert dataset.attrs["sensor"] == "ATMS"
diff --git a/satpy/tests/reader_tests/test_avhrr_l0_hrpt.py b/satpy/tests/reader_tests/test_avhrr_l0_hrpt.py
index 0b13973f89..e5241ba025 100644
--- a/satpy/tests/reader_tests/test_avhrr_l0_hrpt.py
+++ b/satpy/tests/reader_tests/test_avhrr_l0_hrpt.py
@@ -26,7 +26,7 @@
import numpy as np
import xarray as xr
-from satpy.readers.hrpt import dtype, HRPTFile
+from satpy.readers.hrpt import HRPTFile, dtype
from satpy.tests.reader_tests.test_avhrr_l1b_gaclac import PygacPatcher
from satpy.tests.utils import make_dataid
@@ -110,7 +110,7 @@ def setUp(self) -> None:
# Import things to patch here to make them patchable. Otherwise another function
# might import it first which would prevent a successful patch.
- from pygac.calibration import calibrate_solar, calibrate_thermal, Calibrator
+ from pygac.calibration import Calibrator, calibrate_solar, calibrate_thermal
self.Calibrator = Calibrator
self.calibrate_thermal = calibrate_thermal
self.calibrate_thermal.side_effect = fake_calibrate_thermal
diff --git a/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py b/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py
index be9fbbd10b..4a543b449c 100644
--- a/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py
+++ b/satpy/tests/reader_tests/test_avhrr_l1b_gaclac.py
@@ -267,8 +267,8 @@ def test_get_dataset_latlon(self, *mocks):
@mock.patch('satpy.readers.avhrr_l1b_gaclac.GACLACFile._get_angle')
def test_get_dataset_angles(self, get_angle, *mocks):
"""Test getting the angles."""
- from satpy.tests.utils import make_dataid
from satpy.readers.avhrr_l1b_gaclac import ANGLES
+ from satpy.tests.utils import make_dataid
ones = np.ones((3, 3))
get_angle.return_value = ones
diff --git a/satpy/tests/reader_tests/test_clavrx.py b/satpy/tests/reader_tests/test_clavrx.py
index 0e96146e23..eae796ea6a 100644
--- a/satpy/tests/reader_tests/test_clavrx.py
+++ b/satpy/tests/reader_tests/test_clavrx.py
@@ -18,14 +18,15 @@
"""Module for testing the satpy.readers.clavrx module."""
import os
-import numpy as np
+import unittest
+from unittest import mock
+
import dask.array as da
+import numpy as np
import xarray as xr
-from satpy.tests.reader_tests.test_hdf4_utils import FakeHDF4FileHandler
-from pyresample.geometry import AreaDefinition
+from pyresample.geometry import AreaDefinition, SwathDefinition
-import unittest
-from unittest import mock
+from satpy.tests.reader_tests.test_hdf4_utils import FakeHDF4FileHandler
DEFAULT_FILE_DTYPE = np.uint16
DEFAULT_FILE_SHAPE = (10, 300)
@@ -95,10 +96,11 @@ def get_test_content(self, filename, filename_info, filetype_info):
file_content['variable3'] = xr.DataArray(
da.from_array(DEFAULT_FILE_DATA, chunks=4096).astype(np.byte),
attrs={
+ 'SCALED': 0,
'_FillValue': -128,
'flag_meanings': 'clear water supercooled mixed ice unknown',
'flag_values': [0, 1, 2, 3, 4, 5],
- 'units': '1',
+ 'units': 'none',
})
file_content['variable3/shape'] = DEFAULT_FILE_SHAPE
@@ -113,10 +115,10 @@ class TestCLAVRXReaderPolar(unittest.TestCase):
def setUp(self):
"""Wrap HDF4 file handler with our own fake handler."""
from satpy._config import config_search_paths
- from satpy.readers.clavrx import CLAVRXFileHandler
+ from satpy.readers.clavrx import CLAVRXHDF4FileHandler
self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file))
# http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library
- self.p = mock.patch.object(CLAVRXFileHandler, '__bases__', (FakeHDF4FileHandlerPolar,))
+ self.p = mock.patch.object(CLAVRXHDF4FileHandler, '__bases__', (FakeHDF4FileHandlerPolar,))
self.fake_handler = self.p.start()
self.p.is_local = True
@@ -150,15 +152,15 @@ def test_available_datasets(self):
# mimic the YAML file being configured for more datasets
fake_dataset_info = [
- (None, {'name': 'variable1', 'resolution': None, 'file_type': ['level2']}),
- (True, {'name': 'variable2', 'resolution': 742, 'file_type': ['level2']}),
- (True, {'name': 'variable2', 'resolution': 1, 'file_type': ['level2']}),
- (None, {'name': 'variable2', 'resolution': 1, 'file_type': ['level2']}),
- (None, {'name': '_fake1', 'file_type': ['level2']}),
+ (None, {'name': 'variable1', 'resolution': None, 'file_type': ['clavrx_hdf4']}),
+ (True, {'name': 'variable2', 'resolution': 742, 'file_type': ['clavrx_hdf4']}),
+ (True, {'name': 'variable2', 'resolution': 1, 'file_type': ['clavrx_hdf4']}),
+ (None, {'name': 'variable2', 'resolution': 1, 'file_type': ['clavrx_hdf4']}),
+ (None, {'name': '_fake1', 'file_type': ['clavrx_hdf4']}),
(None, {'name': 'variable1', 'file_type': ['level_fake']}),
- (True, {'name': 'variable3', 'file_type': ['level2']}),
+ (True, {'name': 'variable3', 'file_type': ['clavrx_hdf4']}),
]
- new_ds_infos = list(r.file_handlers['level2'][0].available_datasets(
+ new_ds_infos = list(r.file_handlers['clavrx_hdf4'][0].available_datasets(
fake_dataset_info))
self.assertEqual(len(new_ds_infos), 9)
@@ -204,8 +206,9 @@ def test_available_datasets(self):
def test_load_all(self):
"""Test loading all test datasets."""
- from satpy.readers import load_reader
import xarray as xr
+
+ from satpy.readers import load_reader
r = load_reader(self.reader_configs)
with mock.patch('satpy.readers.clavrx.SDS', xr.DataArray):
loadables = r.select_files_from_pathnames([
@@ -217,8 +220,12 @@ def test_load_all(self):
'variable3'])
self.assertEqual(len(datasets), 3)
for v in datasets.values():
- assert 'calibration' not in v.attrs
self.assertEqual(v.attrs['units'], '1')
+ self.assertEqual(v.attrs['platform_name'], 'npp')
+ self.assertEqual(v.attrs['sensor'], 'viirs')
+ self.assertIsInstance(v.attrs['area'], SwathDefinition)
+ self.assertEqual(v.attrs['area'].lons.attrs['rows_per_scan'], 16)
+ self.assertEqual(v.attrs['area'].lats.attrs['rows_per_scan'], 16)
self.assertIsNotNone(datasets['variable3'].attrs.get('flag_meanings'))
@@ -287,6 +294,7 @@ def get_test_content(self, filename, filename_info, filetype_info):
DEFAULT_FILE_DATA.astype(np.byte),
dims=('y', 'x'),
attrs={
+ 'SCALED': 0,
'_FillValue': -128,
'flag_meanings': 'clear water supercooled mixed ice unknown',
'flag_values': [0, 1, 2, 3, 4, 5],
@@ -305,10 +313,10 @@ class TestCLAVRXReaderGeo(unittest.TestCase):
def setUp(self):
"""Wrap HDF4 file handler with our own fake handler."""
from satpy._config import config_search_paths
- from satpy.readers.clavrx import CLAVRXFileHandler
+ from satpy.readers.clavrx import CLAVRXHDF4FileHandler
self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file))
# http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library
- self.p = mock.patch.object(CLAVRXFileHandler, '__bases__', (FakeHDF4FileHandlerGeo,))
+ self.p = mock.patch.object(CLAVRXHDF4FileHandler, '__bases__', (FakeHDF4FileHandlerGeo,))
self.fake_handler = self.p.start()
self.p.is_local = True
@@ -330,8 +338,9 @@ def test_init(self):
def test_no_nav_donor(self):
"""Test exception raised when no donor file is available."""
- from satpy.readers import load_reader
import xarray as xr
+
+ from satpy.readers import load_reader
r = load_reader(self.reader_configs)
with mock.patch('satpy.readers.clavrx.SDS', xr.DataArray):
loadables = r.select_files_from_pathnames([
@@ -342,8 +351,9 @@ def test_no_nav_donor(self):
def test_load_all_old_donor(self):
"""Test loading all test datasets with old donor."""
- from satpy.readers import load_reader
import xarray as xr
+
+ from satpy.readers import load_reader
r = load_reader(self.reader_configs)
with mock.patch('satpy.readers.clavrx.SDS', xr.DataArray):
loadables = r.select_files_from_pathnames([
@@ -368,15 +378,26 @@ def test_load_all_old_donor(self):
datasets = r.load(['variable1', 'variable2', 'variable3'])
self.assertEqual(len(datasets), 3)
for v in datasets.values():
- assert 'calibration' not in v.attrs
+ self.assertNotIn('calibration', v.attrs)
self.assertEqual(v.attrs['units'], '1')
self.assertIsInstance(v.attrs['area'], AreaDefinition)
- self.assertIsNotNone(datasets['variable3'].attrs.get('flag_meanings'))
+ if v.attrs.get("flag_values"):
+ self.assertIn('_FillValue', v.attrs)
+ else:
+ self.assertNotIn('_FillValue', v.attrs)
+ if v.attrs["name"] == 'variable1':
+ self.assertIsInstance(v.attrs["valid_range"], list)
+ else:
+ self.assertNotIn('valid_range', v.attrs)
+ if 'flag_values' in v.attrs:
+ self.assertTrue(np.issubdtype(v.dtype, np.integer))
+ self.assertIsNotNone(v.attrs.get('flag_meanings'))
def test_load_all_new_donor(self):
"""Test loading all test datasets with new donor."""
- from satpy.readers import load_reader
import xarray as xr
+
+ from satpy.readers import load_reader
r = load_reader(self.reader_configs)
with mock.patch('satpy.readers.clavrx.SDS', xr.DataArray):
loadables = r.select_files_from_pathnames([
@@ -401,7 +422,10 @@ def test_load_all_new_donor(self):
datasets = r.load(['variable1', 'variable2', 'variable3'])
self.assertEqual(len(datasets), 3)
for v in datasets.values():
- assert 'calibration' not in v.attrs
+ self.assertNotIn('calibration', v.attrs)
self.assertEqual(v.attrs['units'], '1')
self.assertIsInstance(v.attrs['area'], AreaDefinition)
+ self.assertTrue(v.attrs['area'].is_geostationary)
+ self.assertEqual(v.attrs['platform_name'], 'himawari8')
+ self.assertEqual(v.attrs['sensor'], 'ahi')
self.assertIsNotNone(datasets['variable3'].attrs.get('flag_meanings'))
diff --git a/satpy/tests/reader_tests/test_clavrx_nc.py b/satpy/tests/reader_tests/test_clavrx_nc.py
new file mode 100644
index 0000000000..8726d97eae
--- /dev/null
+++ b/satpy/tests/reader_tests/test_clavrx_nc.py
@@ -0,0 +1,201 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Module for testing the satpy.readers.clavrx module."""
+
+import os
+from unittest import mock
+
+import numpy as np
+import pytest
+import xarray as xr
+from pyresample.geometry import AreaDefinition
+
+DEFAULT_FILE_DTYPE = np.uint16
+DEFAULT_FILE_SHAPE = (10, 300)
+DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1],
+ dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE)
+DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32)
+DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE)
+DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0)
+DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE)
+DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0)
+AHI_FILE = 'clavrx_H08_20210603_1500_B01_FLDK_R.level2.nc'
+
+
+def fake_test_content(filename, **kwargs):
+ """Mimic reader input file content."""
+ attrs = {
+ 'platform': 'HIM8',
+ 'sensor': 'AHI',
+ # this is a Level 2 file that came from a L1B file
+ 'L1B': 'clavrx_H08_20210603_1500_B01_FLDK_R',
+ }
+
+ longitude = xr.DataArray(DEFAULT_LON_DATA,
+ dims=('scan_lines_along_track_direction',
+ 'pixel_elements_along_scan_direction'),
+ attrs={'_FillValue': np.nan,
+ 'scale_factor': 1.,
+ 'add_offset': 0.,
+ 'standard_name': 'longitude',
+ 'units': 'degrees_east'
+ })
+
+ latitude = xr.DataArray(DEFAULT_LAT_DATA,
+ dims=('scan_lines_along_track_direction',
+ 'pixel_elements_along_scan_direction'),
+ attrs={'_FillValue': np.nan,
+ 'scale_factor': 1.,
+ 'add_offset': 0.,
+ 'standard_name': 'latitude',
+ 'units': 'degrees_south'
+ })
+
+ variable1 = xr.DataArray(DEFAULT_FILE_DATA.astype(np.float32),
+ dims=('scan_lines_along_track_direction',
+ 'pixel_elements_along_scan_direction'),
+ attrs={'_FillValue': np.nan,
+ 'scale_factor': 1.,
+ 'add_offset': 0.,
+ 'units': '1',
+ 'valid_range': [-32767, 32767],
+ })
+
+ # data with fill values
+ variable2 = xr.DataArray(DEFAULT_FILE_DATA.astype(np.float32),
+ dims=('scan_lines_along_track_direction',
+ 'pixel_elements_along_scan_direction'),
+ attrs={'_FillValue': np.nan,
+ 'scale_factor': 1.,
+ 'add_offset': 0.,
+ 'units': '1',
+ 'valid_range': [-32767, 32767],
+ })
+ variable2 = variable2.where(variable2 % 2 != 0)
+
+ # category
+ variable3 = xr.DataArray(DEFAULT_FILE_DATA.astype(np.byte),
+ dims=('scan_lines_along_track_direction',
+ 'pixel_elements_along_scan_direction'),
+ attrs={'SCALED': 0,
+ '_FillValue': -128,
+ 'flag_meanings': 'clear water supercooled mixed ice unknown',
+ 'flag_values': [0, 1, 2, 3, 4, 5],
+ 'units': '1',
+ })
+
+ ds_vars = {
+ 'longitude': longitude,
+ 'latitude': latitude,
+ 'variable1': variable1,
+ 'variable2': variable2,
+ 'variable3': variable3
+ }
+
+ ds = xr.Dataset(ds_vars, attrs=attrs)
+ ds = ds.assign_coords({"latitude": latitude, "longitude": longitude})
+
+ return ds
+
+
+class TestCLAVRXReaderGeo:
+ """Test CLAVR-X Reader with Geo files."""
+
+ yaml_file = "clavrx.yaml"
+
+ def setup_method(self):
+ """Read fake data."""
+ from satpy._config import config_search_paths
+ self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file))
+
+ @pytest.mark.parametrize(
+ ("filenames", "expected_loadables"),
+ [([AHI_FILE], 1)]
+ )
+ def test_reader_creation(self, filenames, expected_loadables):
+ """Test basic initialization."""
+ from satpy.readers import load_reader
+ with mock.patch('satpy.readers.clavrx.xr.open_dataset') as od:
+ od.side_effect = fake_test_content
+ r = load_reader(self.reader_configs)
+ loadables = r.select_files_from_pathnames(filenames)
+ assert len(loadables) == expected_loadables
+ r.create_filehandlers(loadables)
+ # make sure we have some files
+ assert r.file_handlers
+
+ @pytest.mark.parametrize(
+ ("filenames", "expected_datasets"),
+ [([AHI_FILE], ['variable1', 'variable2', 'variable3']), ]
+ )
+ def test_available_datasets(self, filenames, expected_datasets):
+ """Test that variables are dynamically discovered."""
+ from satpy.readers import load_reader
+ with mock.patch('satpy.readers.clavrx.xr.open_dataset') as od:
+ od.side_effect = fake_test_content
+ r = load_reader(self.reader_configs)
+ loadables = r.select_files_from_pathnames(filenames)
+ r.create_filehandlers(loadables)
+ avails = list(r.available_dataset_names)
+ for var_name in expected_datasets:
+ assert var_name in avails
+
+ @pytest.mark.parametrize(
+ ("filenames", "loadable_ids"),
+ [([AHI_FILE], ['variable1', 'variable2', 'variable3']), ]
+ )
+ def test_load_all_new_donor(self, filenames, loadable_ids):
+ """Test loading all test datasets with new donor."""
+ from satpy.readers import load_reader
+ with mock.patch('satpy.readers.clavrx.xr.open_dataset') as od:
+ od.side_effect = fake_test_content
+ r = load_reader(self.reader_configs)
+ loadables = r.select_files_from_pathnames(filenames)
+ r.create_filehandlers(loadables)
+ with mock.patch('satpy.readers.clavrx.glob') as g, \
+ mock.patch('satpy.readers.clavrx.netCDF4.Dataset') as d:
+ g.return_value = ['fake_donor.nc']
+ x = np.linspace(-0.1518, 0.1518, 300)
+ y = np.linspace(0.1518, -0.1518, 10)
+ proj = mock.Mock(
+ semi_major_axis=6378137,
+ semi_minor_axis=6356752.3142,
+ perspective_point_height=35791000,
+ longitude_of_projection_origin=140.7,
+ sweep_angle_axis='y',
+ )
+ d.return_value = fake_donor = mock.MagicMock(
+ variables={'goes_imager_projection': proj, 'x': x, 'y': y},
+ )
+ fake_donor.__getitem__.side_effect = lambda key: fake_donor.variables[key]
+ datasets = r.load(loadable_ids)
+ assert len(datasets) == 3
+ for v in datasets.values():
+ assert 'calibration' not in v.attrs
+ assert v.attrs['units'] == '1'
+ assert isinstance(v.attrs['area'], AreaDefinition)
+ assert v.attrs['platform_name'] == 'himawari8'
+ assert v.attrs['sensor'] == 'AHI'
+ assert 'rows_per_scan' not in v.coords.get('longitude').attrs
+ if v.attrs["name"] in ["variable1", "variable2"]:
+ assert isinstance(v.attrs["valid_range"], list)
+ assert v.dtype == np.float32
+ else:
+ assert (datasets['variable3'].attrs.get('flag_meanings')) is not None
+ assert "_FillValue" in v.attrs.keys()
+ assert np.issubdtype(v.dtype, np.integer)
diff --git a/satpy/tests/reader_tests/test_cmsaf_claas.py b/satpy/tests/reader_tests/test_cmsaf_claas.py
index adcfaa04b2..fd421d7d95 100644
--- a/satpy/tests/reader_tests/test_cmsaf_claas.py
+++ b/satpy/tests/reader_tests/test_cmsaf_claas.py
@@ -17,88 +17,91 @@
# satpy. If not, see .
"""Tests for the 'cmsaf-claas2_l2_nc' reader."""
-import os
import datetime
+import os
+
import numpy as np
-import xarray as xr
-import numpy.testing
import pytest
-from unittest import mock
-from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler
-
-
-class FakeNetCDF4FileHandler2(FakeNetCDF4FileHandler):
- """Class for faking the NetCDF4 Filehandler."""
-
- _nrows = 30
- _ncols = 40
-
- def __init__(self, *args, auto_maskandscale, **kwargs):
- """Init the file handler."""
- # make sure that CLAAS2 reader asks NetCDF4FileHandler for having
- # auto_maskandscale enabled
- assert auto_maskandscale
- super().__init__(*args, **kwargs)
-
- def _get_global_attributes(self):
- data = {}
- attrs = {
- "CMSAF_proj4_params": "+a=6378169.0 +h=35785831.0 "
- "+b=6356583.8 +lon_0=0 +proj=geos",
- "CMSAF_area_extent": np.array(
- [-5456233.41938636, -5453233.01608472,
- 5453233.01608472, 5456233.41938636]),
- "time_coverage_start": "1985-08-13T13:15:00Z",
- "time_coverage_end": "2085-08-13T13:15:00Z",
- }
- for (k, v) in attrs.items():
- data["/attr/" + k] = v
- return data
-
- def _get_data(self):
- data = {
- "cph": xr.DataArray(
- np.arange(self._nrows*self._ncols, dtype="i4").reshape(
- (1, self._nrows, self._ncols))/100,
- dims=("time", "y", "x")),
- "ctt": xr.DataArray(
- np.arange(self._nrows*self._ncols, 0, -1,
- dtype="i4").reshape(
- (self._nrows, self._ncols))/100,
- dims=("y", "x")),
- "time_bnds": xr.DataArray(
- [[12436.91666667, 12436.92534722]],
- dims=("time", "time_bnds"))}
- for k in set(data.keys()):
- data[f"{k:s}/dimensions"] = data[k].dims
- data[f"{k:s}/attr/fruit"] = "apple"
- data[f"{k:s}/attr/scale_factor"] = np.float32(0.01)
- return data
-
- def _get_dimensions(self):
- data = {
- "/dimension/x": self._nrows,
- "/dimension/y": self._ncols,
- "/dimension/time": 1,
- "/dimension/time_bnds": 2,
- }
- return data
-
- def get_test_content(self, filename, filename_info, filetype_info):
- """Get the content of the test data."""
- # mock global attributes
- # - root groups global
- # - other groups global
- # mock data variables
- # mock dimensions
- #
- # ... but only what satpy is using ...
-
- D = {}
- D.update(self._get_data())
- D.update(self._get_dimensions())
- D.update(self._get_global_attributes())
- return D
+import xarray as xr
+from pyresample.geometry import AreaDefinition
+
+from satpy.tests.utils import make_dataid
+
+
+@pytest.fixture(
+ params=[datetime.datetime(2017, 12, 5), datetime.datetime(2017, 12, 6)]
+)
+def start_time(request):
+ """Get start time of the dataset."""
+ return request.param
+
+
+@pytest.fixture
+def start_time_str(start_time):
+ """Get string representation of the start time."""
+ return start_time.strftime("%Y-%m-%dT%H:%M:%SZ")
+
+
+@pytest.fixture()
+def fake_dataset(start_time_str):
+ """Create a CLAAS-like test dataset."""
+ cph = xr.DataArray(
+ [[[0, 1], [2, 0]]],
+ dims=("time", "y", "x")
+ )
+ ctt = xr.DataArray(
+ [[280, 290], [300, 310]],
+ dims=("y", "x")
+ )
+ time_bounds = xr.DataArray(
+ [[12436.91666667, 12436.92534722]],
+ dims=("time", "bndsize")
+ )
+ attrs = {
+ "CMSAF_proj4_params": "+a=6378169.0 +h=35785831.0 "
+ "+b=6356583.8 +lon_0=0 +proj=geos",
+ "CMSAF_area_extent": np.array(
+ [-5456233.41938636, -5453233.01608472,
+ 5453233.01608472, 5456233.41938636]),
+ "time_coverage_start": start_time_str,
+ "time_coverage_end": "2085-08-13T13:15:00Z",
+ }
+ return xr.Dataset(
+ {
+ "cph": cph,
+ "ctt": ctt,
+ "time_bnds": time_bounds
+ },
+ attrs=attrs
+ )
+
+
+@pytest.fixture
+def encoding():
+ """Dataset encoding."""
+ return {
+ "ctt": {"scale_factor": np.float32(0.01)},
+ }
+
+
+@pytest.fixture
+def fake_file(fake_dataset, encoding, tmp_path):
+ """Write a fake dataset to file."""
+ filename = tmp_path / "CPPin20140101001500305SVMSG01MD.nc"
+ fake_dataset.to_netcdf(filename, encoding=encoding)
+ yield filename
+
+
+@pytest.fixture
+def fake_files(fake_dataset, encoding, tmp_path):
+ """Write the same fake dataset into two different files."""
+ filenames = [
+ tmp_path / "CPPin20140101001500305SVMSG01MD.nc",
+ tmp_path / "CPPin20140101003000305SVMSG01MD.nc",
+ ]
+ for filename in filenames:
+ fake_dataset.to_netcdf(filename, encoding=encoding)
+ yield filenames
@pytest.fixture
@@ -113,20 +116,6 @@ def reader():
return reader
-@pytest.fixture(autouse=True, scope="class")
-def fake_handler():
- """Wrap NetCDF4 FileHandler with our own fake handler."""
- # implementation strongly inspired by test_viirs_l1b.py
- from satpy.readers.cmsaf_claas2 import CLAAS2
- p = mock.patch.object(
- CLAAS2,
- "__bases__",
- (FakeNetCDF4FileHandler2,))
- with p:
- p.is_local = True
- yield p
-
-
def test_file_pattern(reader):
"""Test file pattern matching."""
filenames = [
@@ -140,25 +129,105 @@ def test_file_pattern(reader):
assert len(files) == 3
-def test_load(reader):
- """Test loading."""
- from satpy.tests.utils import make_dataid
-
- # testing two filenames to test correctly combined
- filenames = [
- "CTXin20040120091500305SVMSG01MD.nc",
- "CTXin20040120093000305SVMSG01MD.nc"]
-
- loadables = reader.select_files_from_pathnames(filenames)
- reader.create_filehandlers(loadables)
- res = reader.load(
- [make_dataid(name=name) for name in ["cph", "ctt"]])
- assert 2 == len(res)
- assert reader.start_time == datetime.datetime(1985, 8, 13, 13, 15)
- assert reader.end_time == datetime.datetime(2085, 8, 13, 13, 15)
- np.testing.assert_array_almost_equal(
- res["cph"].data,
- np.tile(np.arange(0.0, 12.0, 0.01).reshape((30, 40)), [2, 1]))
- np.testing.assert_array_almost_equal(
- res["ctt"].data,
- np.tile(np.arange(12.0, 0.0, -0.01).reshape((30, 40)), [2, 1]))
+class TestCLAAS2MultiFile:
+ """Test reading multiple CLAAS-2 files."""
+
+ @pytest.fixture
+ def multi_file_reader(self, reader, fake_files):
+ """Create a multi-file reader."""
+ loadables = reader.select_files_from_pathnames(fake_files)
+ reader.create_filehandlers(loadables)
+ return reader
+
+ @pytest.fixture
+ def multi_file_dataset(self, multi_file_reader):
+ """Load datasets from multiple files."""
+ ds_ids = [make_dataid(name=name) for name in ["cph", "ctt"]]
+ datasets = multi_file_reader.load(ds_ids)
+ return datasets
+
+ def test_combine_timestamps(self, multi_file_reader, start_time):
+ """Test combination of timestamps."""
+ assert multi_file_reader.start_time == start_time
+ assert multi_file_reader.end_time == datetime.datetime(2085, 8, 13, 13, 15)
+
+ @pytest.mark.parametrize(
+ "ds_name,expected",
+ [
+ ("cph", [[0, 1], [2, 0], [0, 1], [2, 0]]),
+ ("ctt", [[280, 290], [300, 310], [280, 290], [300, 310]]),
+ ]
+ )
+ def test_combine_datasets(self, multi_file_dataset, ds_name, expected):
+ """Test combination of datasets."""
+ np.testing.assert_array_almost_equal(
+ multi_file_dataset[ds_name].data, expected
+ )
+
+ def test_number_of_datasets(self, multi_file_dataset):
+ """Test number of datasets."""
+ assert 2 == len(multi_file_dataset)
+
+
+class TestCLAAS2SingleFile:
+ """Test reading a single CLAAS2 file."""
+
+ @pytest.fixture
+ def file_handler(self, fake_file):
+ """Return a CLAAS-2 file handler."""
+ from satpy.readers.cmsaf_claas2 import CLAAS2
+ return CLAAS2(fake_file, {}, {})
+
+ @pytest.fixture
+ def area_extent_exp(self, start_time):
+ """Get expected area extent."""
+ if start_time < datetime.datetime(2017, 12, 6):
+ return (-5454733.160460291, -5454733.160460292, 5454733.160460292, 5454733.160460291)
+ return (-5456233.362099582, -5453232.958821001, 5453232.958821001, 5456233.362099582)
+
+ @pytest.fixture
+ def area_exp(self, area_extent_exp):
+ """Get expected area definition."""
+ proj_dict = {
+ "a": 6378169.0,
+ "b": 6356583.8,
+ "h": 35785831.0,
+ "lon_0": 0.0,
+ "proj": "geos",
+ "units": "m",
+ }
+ return AreaDefinition(
+ area_id="msg_seviri_fes_3km",
+ description="MSG SEVIRI Full Earth Scanning service area definition with 3 km resolution",
+ proj_id="geos",
+ projection=proj_dict,
+ area_extent=area_extent_exp,
+ width=3636,
+ height=3636,
+ )
+
+ def test_get_area_def(self, file_handler, area_exp):
+ """Test area definition."""
+ area = file_handler.get_area_def(make_dataid(name="foo"))
+ assert area == area_exp
+
+ @pytest.mark.parametrize(
+ "ds_name,expected",
+ [
+ ("ctt", xr.DataArray([[280, 290], [300, 310]], dims=('y', 'x'))),
+ ("cph", xr.DataArray([[0, 1], [2, 0]], dims=('y', 'x'))),
+ ]
+ )
+ def test_get_dataset(self, file_handler, ds_name, expected):
+ """Test dataset loading."""
+ dsid = make_dataid(name=ds_name)
+ ds = file_handler.get_dataset(dsid, {})
+ xr.testing.assert_allclose(ds, expected)
+
+ def test_start_time(self, file_handler, start_time):
+ """Test start time property."""
+ assert file_handler.start_time == start_time
+
+ def test_end_time(self, file_handler):
+ """Test end time property."""
+ assert file_handler.end_time == datetime.datetime(2085, 8, 13, 13, 15)
diff --git a/satpy/tests/reader_tests/test_electrol_hrit.py b/satpy/tests/reader_tests/test_electrol_hrit.py
index a5c3d7c2c7..eca413d033 100644
--- a/satpy/tests/reader_tests/test_electrol_hrit.py
+++ b/satpy/tests/reader_tests/test_electrol_hrit.py
@@ -25,11 +25,16 @@
import numpy as np
from xarray import DataArray
-from satpy.readers.electrol_hrit import (HRITGOMSEpilogueFileHandler,
- HRITGOMSFileHandler,
- HRITGOMSPrologueFileHandler, epilogue,
- image_acquisition, prologue,
- recarray2dict, satellite_status)
+from satpy.readers.electrol_hrit import (
+ HRITGOMSEpilogueFileHandler,
+ HRITGOMSFileHandler,
+ HRITGOMSPrologueFileHandler,
+ epilogue,
+ image_acquisition,
+ prologue,
+ recarray2dict,
+ satellite_status,
+)
from satpy.tests.utils import make_dataid
# Simplify some type selections
diff --git a/satpy/tests/reader_tests/test_eps_l1b.py b/satpy/tests/reader_tests/test_eps_l1b.py
index 93926603a5..c807a658fc 100644
--- a/satpy/tests/reader_tests/test_eps_l1b.py
+++ b/satpy/tests/reader_tests/test_eps_l1b.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
-# Copyright (c) 2019 Satpy developers
+# Copyright (c) 2019, 2022 Satpy developers
#
# This file is part of satpy.
#
@@ -20,15 +20,15 @@
import os
from contextlib import suppress
from tempfile import mkstemp
-from unittest import TestCase
-from unittest import mock
+from unittest import TestCase, mock
import numpy as np
import pytest
-import satpy
import xarray as xr
-from satpy.readers import eps_l1b as eps
+
+import satpy
from satpy._config import get_config_path
+from satpy.readers import eps_l1b as eps
from satpy.tests.utils import make_dataid
grh_dtype = np.dtype([("record_class", "|i1"),
@@ -107,48 +107,49 @@ def setUp(self):
def test_read_all(self):
"""Test initialization."""
self.fh._read_all()
- assert(self.fh.scanlines == 1080)
- assert(self.fh.pixels == 2048)
+ assert self.fh.scanlines == 1080
+ assert self.fh.pixels == 2048
def test_dataset(self):
"""Test getting a dataset."""
did = make_dataid(name='1', calibration='reflectance')
res = self.fh.get_dataset(did, {})
- assert(isinstance(res, xr.DataArray))
- assert(res.attrs['platform_name'] == 'Metop-C')
- assert(res.attrs['sensor'] == 'avhrr-3')
- assert(res.attrs['name'] == '1')
- assert(res.attrs['calibration'] == 'reflectance')
+ assert isinstance(res, xr.DataArray)
+ assert res.attrs['platform_name'] == 'Metop-C'
+ assert res.attrs['sensor'] == 'avhrr-3'
+ assert res.attrs['name'] == '1'
+ assert res.attrs['calibration'] == 'reflectance'
+ assert res.attrs['units'] == '%'
did = make_dataid(name='4', calibration='brightness_temperature')
res = self.fh.get_dataset(did, {})
- assert(isinstance(res, xr.DataArray))
- assert(res.attrs['platform_name'] == 'Metop-C')
- assert(res.attrs['sensor'] == 'avhrr-3')
- assert(res.attrs['name'] == '4')
- assert(res.attrs['calibration'] == 'brightness_temperature')
+ assert isinstance(res, xr.DataArray)
+ assert res.attrs['platform_name'] == 'Metop-C'
+ assert res.attrs['sensor'] == 'avhrr-3'
+ assert res.attrs['name'] == '4'
+ assert res.attrs['calibration'] == 'brightness_temperature'
+ assert res.attrs['units'] == 'K'
def test_navigation(self):
"""Test the navigation."""
did = make_dataid(name='longitude')
res = self.fh.get_dataset(did, {})
- assert(isinstance(res, xr.DataArray))
- assert(res.attrs['platform_name'] == 'Metop-C')
- assert(res.attrs['sensor'] == 'avhrr-3')
- assert(res.attrs['name'] == 'longitude')
+ assert isinstance(res, xr.DataArray)
+ assert res.attrs['platform_name'] == 'Metop-C'
+ assert res.attrs['sensor'] == 'avhrr-3'
+ assert res.attrs['name'] == 'longitude'
def test_angles(self):
"""Test the navigation."""
did = make_dataid(name='solar_zenith_angle')
res = self.fh.get_dataset(did, {})
- assert(isinstance(res, xr.DataArray))
- assert(res.attrs['platform_name'] == 'Metop-C')
- assert(res.attrs['sensor'] == 'avhrr-3')
- assert(res.attrs['name'] == 'solar_zenith_angle')
+ assert isinstance(res, xr.DataArray)
+ assert res.attrs['platform_name'] == 'Metop-C'
+ assert res.attrs['sensor'] == 'avhrr-3'
+ assert res.attrs['name'] == 'solar_zenith_angle'
@mock.patch('satpy.readers.eps_l1b.EPSAVHRRFile.__getitem__')
- @mock.patch('satpy.readers.eps_l1b.EPSAVHRRFile.__init__')
- def test_get_full_angles_twice(self, mock__init__, mock__getitem__):
+ def test_get_full_angles_twice(self, mock__getitem__):
"""Test get full angles twice."""
geotiemock = mock.Mock()
metop20kmto1km = geotiemock.metop20kmto1km
@@ -160,13 +161,13 @@ def mock_getitem(key):
"ANGULAR_RELATIONS_LAST": np.zeros((7, 4)),
"NAV_SAMPLE_RATE": 20}
return data[key]
- mock__init__.return_value = None
mock__getitem__.side_effect = mock_getitem
- avhrr_reader = satpy.readers.eps_l1b.EPSAVHRRFile()
- avhrr_reader.sun_azi = None
- avhrr_reader.sat_azi = None
- avhrr_reader.sun_zen = None
- avhrr_reader.sat_zen = None
+
+ avhrr_reader = satpy.readers.eps_l1b.EPSAVHRRFile(
+ filename="foo",
+ filename_info={"start_time": "foo", "end_time": "bar"},
+ filetype_info={"foo": "bar"}
+ )
avhrr_reader.scanlines = 7
avhrr_reader.pixels = 2048
@@ -174,9 +175,9 @@ def mock_getitem(key):
# Get dask arrays
sun_azi, sun_zen, sat_azi, sat_zen = avhrr_reader.get_full_angles()
# Convert to numpy array
- sun_zen_np1 = np.array(avhrr_reader.sun_zen)
+ sun_zen_np1 = np.array(sun_zen)
# Convert to numpy array again
- sun_zen_np2 = np.array(avhrr_reader.sun_zen)
+ sun_zen_np2 = np.array(sun_zen)
assert np.allclose(sun_zen_np1, sun_zen_np2)
diff --git a/satpy/tests/reader_tests/test_eum_base.py b/satpy/tests/reader_tests/test_eum_base.py
index da02fdc55c..d5490421a6 100644
--- a/satpy/tests/reader_tests/test_eum_base.py
+++ b/satpy/tests/reader_tests/test_eum_base.py
@@ -19,10 +19,18 @@
import unittest
from datetime import datetime
+
import numpy as np
-from satpy.readers.eum_base import (timecds2datetime, time_cds_short,
- time_cds, time_cds_expanded,
- recarray2dict, get_service_mode)
+
+from satpy.readers.eum_base import (
+ get_service_mode,
+ recarray2dict,
+ time_cds,
+ time_cds_expanded,
+ time_cds_short,
+ timecds2datetime,
+)
+from satpy.readers.seviri_base import mpef_product_header
class TestMakeTimeCdsDictionary(unittest.TestCase):
@@ -70,9 +78,10 @@ def test_fun(self):
class TestRecarray2Dict(unittest.TestCase):
"""Test TestRecarray2Dict."""
- def test_fun(self):
+ def test_timestamps(self):
"""Test function for TestRecarray2Dict."""
# datatype definition
+
pat_dt = np.dtype([
('TrueRepeatCycleStart', time_cds_expanded),
('PlanForwardScanEnd', time_cds_expanded),
@@ -94,6 +103,22 @@ def test_fun(self):
self.assertEqual(recarray2dict(pat), expected)
+ def test_mpef_product_header(self):
+ """Test function for TestRecarray2Dict and mpef product header."""
+ names = ['ImageLocation', 'GsicsCalMode', 'GsicsCalValidity',
+ 'Padding', 'OffsetToData', 'Padding2']
+ mpef_header = np.dtype([(name, mpef_product_header.fields[name][0])
+ for name in names])
+ mph_struct = np.array([('OPE', True, False, 'XX', 1000, '12345678')], dtype=mpef_header)
+ test_mph = {'ImageLocation': "OPE",
+ 'GsicsCalMode': True,
+ 'GsicsCalValidity': False,
+ 'Padding': 'XX',
+ 'OffsetToData': 1000,
+ 'Padding2': '12345678'
+ }
+ self.assertEqual(recarray2dict(mph_struct), test_mph)
+
class TestGetServiceMode(unittest.TestCase):
"""Test the get_service_mode function."""
diff --git a/satpy/tests/reader_tests/test_fci_l1c_fdhsi.py b/satpy/tests/reader_tests/test_fci_l1c_nc.py
similarity index 57%
rename from satpy/tests/reader_tests/test_fci_l1c_fdhsi.py
rename to satpy/tests/reader_tests/test_fci_l1c_nc.py
index 2e322feaef..48203503c4 100644
--- a/satpy/tests/reader_tests/test_fci_l1c_fdhsi.py
+++ b/satpy/tests/reader_tests/test_fci_l1c_nc.py
@@ -15,16 +15,18 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
-"""Tests for the 'fci_l1c_fdhsi' reader."""
+"""Tests for the 'fci_l1c_nc' reader."""
+import logging
import os
-import numpy as np
-import xarray as xr
+from unittest import mock
+
import dask.array as da
+import numpy as np
import numpy.testing
import pytest
-import logging
-from unittest import mock
+import xarray as xr
+
from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler
@@ -32,23 +34,22 @@ class FakeNetCDF4FileHandler2(FakeNetCDF4FileHandler):
"""Class for faking the NetCDF4 Filehandler."""
def _get_test_calib_for_channel_ir(self, chroot, meas):
- from pyspectral.blackbody import (
- H_PLANCK as h,
- K_BOLTZMANN as k,
- C_SPEED as c)
+ from pyspectral.blackbody import C_SPEED as c
+ from pyspectral.blackbody import H_PLANCK as h
+ from pyspectral.blackbody import K_BOLTZMANN as k
xrda = xr.DataArray
data = {}
data[meas + "/radiance_to_bt_conversion_coefficient_wavenumber"] = xrda(955)
data[meas + "/radiance_to_bt_conversion_coefficient_a"] = xrda(1)
data[meas + "/radiance_to_bt_conversion_coefficient_b"] = xrda(0.4)
- data[meas + "/radiance_to_bt_conversion_constant_c1"] = xrda(1e11*2*h*c**2)
- data[meas + "/radiance_to_bt_conversion_constant_c2"] = xrda(1e2*h*c/k)
+ data[meas + "/radiance_to_bt_conversion_constant_c1"] = xrda(1e11 * 2 * h * c ** 2)
+ data[meas + "/radiance_to_bt_conversion_constant_c2"] = xrda(1e2 * h * c / k)
return data
def _get_test_calib_for_channel_vis(self, chroot, meas):
xrda = xr.DataArray
data = {}
- data["state/celestial/earth_sun_distance"] = xrda(149597870.7)
+ data["state/celestial/earth_sun_distance"] = xrda(da.repeat(da.array([149597870.7]), 6000))
data[meas + "/channel_effective_solar_irradiance"] = xrda(50)
return data
@@ -60,6 +61,8 @@ def _get_test_content_for_channel(self, pat, ch):
meas = chroot + "/measured"
rad = meas + "/effective_radiance"
qual = meas + "/pixel_quality"
+ index_map = meas + "/index_map"
+ rad_conv_coeff = meas + "/radiance_unit_conversion_coefficient"
pos = meas + "/{:s}_position_{:s}"
shp = rad + "/shape"
x = meas + "/x"
@@ -69,15 +72,15 @@ def _get_test_content_for_channel(self, pat, ch):
ch_path = rad.format(ch_str)
common_attrs = {
- "scale_factor": 5,
- "add_offset": 10,
- "long_name": "Effective Radiance",
- "units": "mW.m-2.sr-1.(cm-1)-1",
- "ancillary_variables": "pixel_quality"
- }
+ "scale_factor": 5,
+ "add_offset": 10,
+ "long_name": "Effective Radiance",
+ "units": "mW.m-2.sr-1.(cm-1)-1",
+ "ancillary_variables": "pixel_quality"
+ }
if ch == 38:
fire_line = da.ones((1, ncols), dtype="uint16", chunks=1024) * 5000
- data_without_fires = da.ones((nrows-1, ncols), dtype="uint16", chunks=1024)
+ data_without_fires = da.ones((nrows - 1, ncols), dtype="uint16", chunks=1024)
d = xrda(
da.concatenate([fire_line, data_without_fires], axis=0),
dims=("y", "x"),
@@ -97,50 +100,55 @@ def _get_test_content_for_channel(self, pat, ch):
"warm_scale_factor": 1,
"warm_add_offset": 0,
**common_attrs
- }
- )
+ }
+ )
data[ch_path] = d
data[x.format(ch_str)] = xrda(
- da.arange(1, ncols+1, dtype="uint16"),
- dims=("x",),
- attrs={
- "scale_factor": -5.58877772833e-05,
- "add_offset": 0.155619515845,
- }
- )
+ da.arange(1, ncols + 1, dtype="uint16"),
+ dims=("x",),
+ attrs={
+ "scale_factor": -5.58877772833e-05,
+ "add_offset": 0.155619515845,
+ }
+ )
data[y.format(ch_str)] = xrda(
- da.arange(1, nrows+1, dtype="uint16"),
- dims=("y",),
- attrs={
- "scale_factor": -5.58877772833e-05,
- "add_offset": 0.155619515845,
- }
- )
+ da.arange(1, nrows + 1, dtype="uint16"),
+ dims=("y",),
+ attrs={
+ "scale_factor": -5.58877772833e-05,
+ "add_offset": 0.155619515845,
+ }
+ )
data[qual.format(ch_str)] = xrda(
- da.arange(nrows*ncols, dtype="uint8").reshape(nrows, ncols) % 128,
- dims=("y", "x"))
-
+ da.arange(nrows * ncols, dtype="uint8").reshape(nrows, ncols) % 128,
+ dims=("y", "x"))
+ # add dummy data for index map starting from 100
+ data[index_map.format(ch_str)] = xrda(
+ (da.arange(nrows * ncols, dtype="uint16").reshape(nrows, ncols) % 6000) + 100,
+ dims=("y", "x"))
+
+ data[rad_conv_coeff.format(ch_str)] = xrda(1234.56)
data[pos.format(ch_str, "start", "row")] = xrda(0)
data[pos.format(ch_str, "start", "column")] = xrda(0)
data[pos.format(ch_str, "end", "row")] = xrda(nrows)
data[pos.format(ch_str, "end", "column")] = xrda(ncols)
if pat.startswith("ir") or pat.startswith("wv"):
data.update(self._get_test_calib_for_channel_ir(chroot.format(ch_str),
- meas.format(ch_str)))
+ meas.format(ch_str)))
elif pat.startswith("vis") or pat.startswith("nir"):
data.update(self._get_test_calib_for_channel_vis(chroot.format(ch_str),
- meas.format(ch_str)))
+ meas.format(ch_str)))
data[shp.format(ch_str)] = (nrows, ncols)
return data
def _get_test_content_all_channels(self):
chan_patterns = {
- "vis_{:>02d}": (4, 5, 6, 8, 9),
- "nir_{:>02d}": (13, 16, 22),
- "ir_{:>02d}": (38, 87, 97, 105, 123, 133),
- "wv_{:>02d}": (63, 73),
- }
+ "vis_{:>02d}": (4, 5, 6, 8, 9),
+ "nir_{:>02d}": (13, 16, 22),
+ "ir_{:>02d}": (38, 87, 97, 105, 123, 133),
+ "wv_{:>02d}": (63, 73),
+ }
data = {}
for pat in chan_patterns:
for ch_num in chan_patterns[pat]:
@@ -153,16 +161,16 @@ def _get_test_content_areadef(self):
proj = "data/mtg_geos_projection"
attrs = {
- "sweep_angle_axis": "y",
- "perspective_point_height": "35786400.0",
- "semi_major_axis": "6378137.0",
- "longitude_of_projection_origin": "0.0",
- "inverse_flattening": "298.257223563",
- "units": "m"}
+ "sweep_angle_axis": "y",
+ "perspective_point_height": "35786400.0",
+ "semi_major_axis": "6378137.0",
+ "longitude_of_projection_origin": "0.0",
+ "inverse_flattening": "298.257223563",
+ "units": "m"}
data[proj] = xr.DataArray(
- 0,
- dims=(),
- attrs=attrs)
+ 0,
+ dims=(),
+ attrs=attrs)
# also set attributes cached, as this may be how they are accessed with
# the NetCDF4FileHandler
@@ -171,6 +179,23 @@ def _get_test_content_areadef(self):
return data
+ def _get_test_content_aux_data(self):
+ from satpy.readers.fci_l1c_nc import AUX_DATA
+ xrda = xr.DataArray
+ data = {}
+ indices_dim = 6000
+ for key, value in AUX_DATA.items():
+ # skip population of earth_sun_distance as this is already defined for reflectance calculation
+ if key == 'earth_sun_distance':
+ continue
+ data[value] = xrda(da.arange(indices_dim, dtype="float32"), dims=("index"))
+
+ # compute the last data entry to simulate the FCI caching
+ data[list(AUX_DATA.values())[-1]] = data[list(AUX_DATA.values())[-1]].compute()
+
+ data['index'] = xrda(da.arange(indices_dim, dtype="uint16")+100, dims=("index"))
+ return data
+
def _get_global_attributes(self):
data = {}
attrs = {"platform": "MTI1"}
@@ -191,6 +216,7 @@ def get_test_content(self, filename, filename_info, filetype_info):
D = {}
D.update(self._get_test_content_all_channels())
D.update(self._get_test_content_areadef())
+ D.update(self._get_test_content_aux_data())
D.update(self._get_global_attributes())
return D
@@ -217,18 +243,49 @@ def _get_test_calib_for_channel_vis(self, chroot, meas):
return data
+class FakeNetCDF4FileHandler4(FakeNetCDF4FileHandler2):
+ """Mock bad data for IDPF TO-DO's."""
+
+ def _get_test_calib_for_channel_vis(self, chroot, meas):
+ data = super()._get_test_calib_for_channel_vis(chroot, meas)
+ data["state/celestial/earth_sun_distance"] = xr.DataArray(da.repeat(da.array([30000000]), 6000))
+ return data
+
+ def _get_test_content_all_channels(self):
+ data = super()._get_test_content_all_channels()
+ data['data/vis_04/measured/x'].attrs['scale_factor'] *= -1
+ data['data/vis_04/measured/x'].attrs['scale_factor'] = \
+ np.float32(data['data/vis_04/measured/x'].attrs['scale_factor'])
+ data['data/vis_04/measured/x'].attrs['add_offset'] = \
+ np.float32(data['data/vis_04/measured/x'].attrs['add_offset'])
+ data['data/vis_04/measured/y'].attrs['scale_factor'] = \
+ np.float32(data['data/vis_04/measured/y'].attrs['scale_factor'])
+ data['data/vis_04/measured/y'].attrs['add_offset'] = \
+ np.float32(data['data/vis_04/measured/y'].attrs['add_offset'])
+
+ return data
+
+
@pytest.fixture
def reader_configs():
"""Return reader configs for FCI."""
from satpy._config import config_search_paths
return config_search_paths(
- os.path.join("readers", "fci_l1c_fdhsi.yaml"))
+ os.path.join("readers", "fci_l1c_nc.yaml"))
+
+def _get_reader_with_filehandlers(filenames, reader_configs):
+ from satpy.readers import load_reader
+ reader = load_reader(reader_configs)
+ loadables = reader.select_files_from_pathnames(filenames)
+ reader.create_filehandlers(loadables)
+ return reader
-class TestFCIL1CFDHSIReader:
- """Initialize the unittest TestCase for the FCI L1C FDHSI Reader."""
- yaml_file = "fci_l1c_fdhsi.yaml"
+class TestFCIL1cNCReader:
+ """Initialize the unittest TestCase for the FCI L1c NetCDF Reader."""
+
+ yaml_file = "fci_l1c_nc.yaml"
_alt_handler = FakeNetCDF4FileHandler2
@@ -236,9 +293,9 @@ class TestFCIL1CFDHSIReader:
def fake_handler(self):
"""Wrap NetCDF4 FileHandler with our own fake handler."""
# implementation strongly inspired by test_viirs_l1b.py
- from satpy.readers.fci_l1c_fdhsi import FCIFDHSIFileHandler
+ from satpy.readers.fci_l1c_nc import FCIL1cNCFileHandler
p = mock.patch.object(
- FCIFDHSIFileHandler,
+ FCIL1cNCFileHandler,
"__bases__",
(self._alt_handler,))
with p:
@@ -246,8 +303,8 @@ def fake_handler(self):
yield p
-class TestFCIL1CFDHSIReaderGoodData(TestFCIL1CFDHSIReader):
- """Test FCI L1C FDHSI reader."""
+class TestFCIL1cNCReaderGoodData(TestFCIL1cNCReader):
+ """Test FCI L1c NetCDF reader."""
_alt_handler = FakeNetCDF4FileHandler2
@@ -286,7 +343,6 @@ def test_file_pattern(self, reader_configs):
def test_load_counts(self, reader_configs):
"""Test loading with counts."""
from satpy.tests.utils import make_dataid
- from satpy.readers import load_reader
# testing two filenames to test correctly combined
filenames = [
@@ -298,18 +354,16 @@ def test_load_counts(self, reader_configs):
"20170410113934_20170410113942_N__C_0070_0068.nc",
]
- reader = load_reader(reader_configs)
- loadables = reader.select_files_from_pathnames(filenames)
- reader.create_filehandlers(loadables)
+ reader = _get_reader_with_filehandlers(filenames, reader_configs)
res = reader.load(
- [make_dataid(name=name, calibration="counts") for name in
- self._chans["solar"] + self._chans["terran"]], pad_data=False)
+ [make_dataid(name=name, calibration="counts") for name in
+ self._chans["solar"] + self._chans["terran"]], pad_data=False)
assert 16 == len(res)
for ch in self._chans["solar"] + self._chans["terran"]:
- assert res[ch].shape == (200*2, 11136)
+ assert res[ch].shape == (200 * 2, 11136)
assert res[ch].dtype == np.uint16
assert res[ch].attrs["calibration"] == "counts"
- assert res[ch].attrs["units"] == "1"
+ assert res[ch].attrs["units"] == "count"
if ch == 'ir_38':
numpy.testing.assert_array_equal(res[ch][~0], 1)
numpy.testing.assert_array_equal(res[ch][0], 5000)
@@ -319,7 +373,6 @@ def test_load_counts(self, reader_configs):
def test_load_radiance(self, reader_configs):
"""Test loading with radiance."""
from satpy.tests.utils import make_dataid
- from satpy.readers import load_reader
filenames = [
"W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--"
@@ -327,18 +380,17 @@ def test_load_radiance(self, reader_configs):
"20170410113925_20170410113934_N__C_0070_0067.nc",
]
- reader = load_reader(reader_configs)
- loadables = reader.select_files_from_pathnames(filenames)
- reader.create_filehandlers(loadables)
+ reader = _get_reader_with_filehandlers(filenames, reader_configs)
res = reader.load(
- [make_dataid(name=name, calibration="radiance") for name in
- self._chans["solar"] + self._chans["terran"]], pad_data=False)
+ [make_dataid(name=name, calibration="radiance") for name in
+ self._chans["solar"] + self._chans["terran"]], pad_data=False)
assert 16 == len(res)
for ch in self._chans["solar"] + self._chans["terran"]:
assert res[ch].shape == (200, 11136)
assert res[ch].dtype == np.float64
assert res[ch].attrs["calibration"] == "radiance"
- assert res[ch].attrs["units"] == 'mW.m-2.sr-1.(cm-1)-1'
+ assert res[ch].attrs["units"] == 'mW m-2 sr-1 (cm-1)-1'
+ assert res[ch].attrs["radiance_unit_conversion_coefficient"] == 1234.56
if ch == 'ir_38':
numpy.testing.assert_array_equal(res[ch][~0], 15)
numpy.testing.assert_array_equal(res[ch][0], 9700)
@@ -348,7 +400,6 @@ def test_load_radiance(self, reader_configs):
def test_load_reflectance(self, reader_configs):
"""Test loading with reflectance."""
from satpy.tests.utils import make_dataid
- from satpy.readers import load_reader
filenames = [
"W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--"
@@ -356,37 +407,33 @@ def test_load_reflectance(self, reader_configs):
"20170410113925_20170410113934_N__C_0070_0067.nc",
]
- reader = load_reader(reader_configs)
- loadables = reader.select_files_from_pathnames(filenames)
- reader.create_filehandlers(loadables)
+ reader = _get_reader_with_filehandlers(filenames, reader_configs)
res = reader.load(
- [make_dataid(name=name, calibration="reflectance") for name in
- self._chans["solar"]], pad_data=False)
+ [make_dataid(name=name, calibration="reflectance") for name in
+ self._chans["solar"]], pad_data=False)
assert 8 == len(res)
for ch in self._chans["solar"]:
assert res[ch].shape == (200, 11136)
assert res[ch].dtype == np.float64
assert res[ch].attrs["calibration"] == "reflectance"
assert res[ch].attrs["units"] == "%"
- numpy.testing.assert_array_equal(res[ch], 100 * 15 * 1 * np.pi / 50)
+ numpy.testing.assert_array_almost_equal(res[ch], 100 * 15 * 1 * np.pi / 50)
def test_load_bt(self, reader_configs, caplog):
"""Test loading with bt."""
from satpy.tests.utils import make_dataid
- from satpy.readers import load_reader
+
filenames = [
"W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--"
"CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_"
"20170410113925_20170410113934_N__C_0070_0067.nc",
]
- reader = load_reader(reader_configs)
- loadables = reader.select_files_from_pathnames(filenames)
- reader.create_filehandlers(loadables)
+ reader = _get_reader_with_filehandlers(filenames, reader_configs)
with caplog.at_level(logging.WARNING):
res = reader.load(
- [make_dataid(name=name, calibration="brightness_temperature") for
- name in self._chans["terran"]], pad_data=False)
+ [make_dataid(name=name, calibration="brightness_temperature") for
+ name in self._chans["terran"]], pad_data=False)
assert caplog.text == ""
for ch in self._chans["terran"]:
assert res[ch].shape == (200, 11136)
@@ -400,33 +447,100 @@ def test_load_bt(self, reader_configs, caplog):
else:
numpy.testing.assert_array_almost_equal(res[ch], 209.68274099)
+ def test_orbital_parameters_attr(self, reader_configs):
+ """Test the orbital parameter attribute."""
+ from satpy.tests.utils import make_dataid
+
+ filenames = [
+ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--"
+ "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_"
+ "20170410113925_20170410113934_N__C_0070_0067.nc",
+ ]
+
+ reader = _get_reader_with_filehandlers(filenames, reader_configs)
+ res = reader.load(
+ [make_dataid(name=name) for name in
+ self._chans["solar"] + self._chans["terran"]], pad_data=False)
+
+ for ch in self._chans["solar"] + self._chans["terran"]:
+ assert res[ch].attrs["orbital_parameters"] == {
+ 'satellite_actual_longitude': np.mean(np.arange(6000)),
+ 'satellite_actual_latitude': np.mean(np.arange(6000)),
+ 'satellite_actual_altitude': np.mean(np.arange(6000)),
+ 'satellite_nominal_longitude': 0.0,
+ 'satellite_nominal_latitude': 0,
+ 'satellite_nominal_altitude': 35786400.0,
+ 'projection_longitude': 0.0,
+ 'projection_latitude': 0,
+ 'projection_altitude': 35786400.0,
+ }
+
+ def test_load_index_map(self, reader_configs):
+ """Test loading of index_map."""
+ filenames = [
+ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--"
+ "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_"
+ "20170410113925_20170410113934_N__C_0070_0067.nc"
+ ]
+
+ reader = _get_reader_with_filehandlers(filenames, reader_configs)
+ res = reader.load(
+ [name + '_index_map' for name in
+ self._chans["solar"] + self._chans["terran"]], pad_data=False)
+ assert 16 == len(res)
+ for ch in self._chans["solar"] + self._chans["terran"]:
+ assert res[ch + '_index_map'].shape == (200, 11136)
+ numpy.testing.assert_array_equal(res[ch + '_index_map'][1, 1], 5237)
+
+ def test_load_aux_data(self, reader_configs):
+ """Test loading of auxiliary data."""
+ from satpy.readers.fci_l1c_nc import AUX_DATA
+
+ filenames = [
+ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--"
+ "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_"
+ "20170410113925_20170410113934_N__C_0070_0067.nc"
+ ]
+
+ reader = _get_reader_with_filehandlers(filenames, reader_configs)
+ res = reader.load(['vis_04_' + key for key in AUX_DATA.keys()],
+ pad_data=False)
+ for aux in ['vis_04_' + key for key in AUX_DATA.keys()]:
+
+ assert res[aux].shape == (200, 11136)
+ if aux == 'vis_04_earth_sun_distance':
+ numpy.testing.assert_array_equal(res[aux][1, 1], 149597870.7)
+ else:
+ numpy.testing.assert_array_equal(res[aux][1, 1], 5137)
+
def test_load_composite(self):
"""Test that composites are loadable."""
- # when dedicated composites for FCI FDHSI are implemented in satpy,
+ # when dedicated composites for FCI are implemented in satpy,
# this method should probably move to a dedicated class and module
# in the tests.compositor_tests package
- from satpy.composites.config_loader import CompositorLoader
- cl = CompositorLoader()
- (comps, mods) = cl.load_compositors(["fci"])
+ from satpy.composites.config_loader import load_compositor_configs_for_sensors
+ comps, mods = load_compositor_configs_for_sensors(['fci'])
assert len(comps["fci"]) > 0
assert len(mods["fci"]) > 0
def test_load_quality_only(self, reader_configs):
"""Test that loading quality only works."""
- from satpy.readers import load_reader
-
filenames = [
"W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--"
"CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_"
"20170410113925_20170410113934_N__C_0070_0067.nc",
]
- reader = load_reader(reader_configs)
- loadables = reader.select_files_from_pathnames(filenames)
- reader.create_filehandlers(loadables)
- res = reader.load(["ir_123_pixel_quality"], pad_data=False)
- assert res["ir_123_pixel_quality"].attrs["name"] == "ir_123_pixel_quality"
+ reader = _get_reader_with_filehandlers(filenames, reader_configs)
+ res = reader.load(
+ [name + '_pixel_quality' for name in
+ self._chans["solar"] + self._chans["terran"]], pad_data=False)
+ assert 16 == len(res)
+ for ch in self._chans["solar"] + self._chans["terran"]:
+ assert res[ch + '_pixel_quality'].shape == (200, 11136)
+ numpy.testing.assert_array_equal(res[ch + '_pixel_quality'][1, 1], 1)
+ assert res[ch + '_pixel_quality'].attrs["name"] == ch + '_pixel_quality'
def test_platform_name(self, reader_configs):
"""Test that platform name is exposed.
@@ -434,58 +548,43 @@ def test_platform_name(self, reader_configs):
Test that the FCI reader exposes the platform name. Corresponds
to GH issue 1014.
"""
- from satpy.readers import load_reader
-
filenames = [
"W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--"
"CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_"
"20170410113925_20170410113934_N__C_0070_0067.nc",
]
- reader = load_reader(reader_configs)
- loadables = reader.select_files_from_pathnames(filenames)
- reader.create_filehandlers(loadables)
+ reader = _get_reader_with_filehandlers(filenames, reader_configs)
res = reader.load(["ir_123"], pad_data=False)
assert res["ir_123"].attrs["platform_name"] == "MTG-I1"
def test_excs(self, reader_configs):
"""Test that exceptions are raised where expected."""
from satpy.tests.utils import make_dataid
- from satpy.readers import load_reader
-
filenames = [
"W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--"
"CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_"
"20170410113925_20170410113934_N__C_0070_0067.nc",
]
- reader = load_reader(reader_configs)
- loadables = reader.select_files_from_pathnames(filenames)
- fhs = reader.create_filehandlers(loadables)
+ reader = _get_reader_with_filehandlers(filenames, reader_configs)
with pytest.raises(ValueError):
- fhs["fci_l1c_fdhsi"][0].get_dataset(make_dataid(name="invalid"), {})
+ reader.file_handlers["fci_l1c_fdhsi"][0].get_dataset(make_dataid(name="invalid"), {})
with pytest.raises(ValueError):
- fhs["fci_l1c_fdhsi"][0]._get_dataset_quality(make_dataid(name="invalid"),
- {})
- with pytest.raises(ValueError):
- fhs["fci_l1c_fdhsi"][0].get_dataset(
- make_dataid(name="ir_123", calibration="unknown"),
- {"units": "unknown"})
+ reader.file_handlers["fci_l1c_fdhsi"][0].get_dataset(
+ make_dataid(name="ir_123", calibration="unknown"),
+ {"units": "unknown"})
def test_area_definition_computation(self, reader_configs):
"""Test that the geolocation computation is correct."""
- from satpy.readers import load_reader
-
filenames = [
"W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--"
"CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_"
"20170410113925_20170410113934_N__C_0070_0067.nc",
]
- reader = load_reader(reader_configs)
- loadables = reader.select_files_from_pathnames(filenames)
- reader.create_filehandlers(loadables)
+ reader = _get_reader_with_filehandlers(filenames, reader_configs)
res = reader.load(['ir_105', 'vis_06'], pad_data=False)
# test that area_ids are harmonisation-conform ___
@@ -507,15 +606,14 @@ def test_area_definition_computation(self, reader_configs):
assert area_def.crs.ellipsoid.is_semi_minor_computed
-class TestFCIL1CFDHSIReaderBadData(TestFCIL1CFDHSIReader):
- """Test the FCI L1C FDHSI Reader for bad data input."""
+class TestFCIL1cNCReaderBadData(TestFCIL1cNCReader):
+ """Test the FCI L1c NetCDF Reader for bad data input."""
_alt_handler = FakeNetCDF4FileHandler3
def test_handling_bad_data_ir(self, reader_configs, caplog):
"""Test handling of bad IR data."""
from satpy.tests.utils import make_dataid
- from satpy.readers import load_reader
filenames = [
"W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--"
@@ -523,19 +621,16 @@ def test_handling_bad_data_ir(self, reader_configs, caplog):
"20170410113925_20170410113934_N__C_0070_0067.nc",
]
- reader = load_reader(reader_configs)
- loadables = reader.select_files_from_pathnames(filenames)
- reader.create_filehandlers(loadables)
+ reader = _get_reader_with_filehandlers(filenames, reader_configs)
with caplog.at_level("ERROR"):
reader.load([make_dataid(
- name="ir_123",
- calibration="brightness_temperature")], pad_data=False)
+ name="ir_123",
+ calibration="brightness_temperature")], pad_data=False)
assert "cannot produce brightness temperature" in caplog.text
def test_handling_bad_data_vis(self, reader_configs, caplog):
"""Test handling of bad VIS data."""
from satpy.tests.utils import make_dataid
- from satpy.readers import load_reader
filenames = [
"W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--"
@@ -543,11 +638,47 @@ def test_handling_bad_data_vis(self, reader_configs, caplog):
"20170410113925_20170410113934_N__C_0070_0067.nc",
]
- reader = load_reader(reader_configs)
- loadables = reader.select_files_from_pathnames(filenames)
- reader.create_filehandlers(loadables)
+ reader = _get_reader_with_filehandlers(filenames, reader_configs)
with caplog.at_level("ERROR"):
reader.load([make_dataid(
- name="vis_04",
- calibration="reflectance")], pad_data=False)
+ name="vis_04",
+ calibration="reflectance")], pad_data=False)
assert "cannot produce reflectance" in caplog.text
+
+
+class TestFCIL1cNCReaderBadDataFromIDPF(TestFCIL1cNCReader):
+ """Test the FCI L1c NetCDF Reader for bad data input."""
+
+ _alt_handler = FakeNetCDF4FileHandler4
+
+ def test_handling_bad_earthsun_distance(self, reader_configs, caplog):
+ """Test handling of bad earth-sun distance data."""
+ from satpy.tests.utils import make_dataid
+
+ filenames = [
+ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--"
+ "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_"
+ "20170410113925_20170410113934_N__C_0070_0067.nc",
+ ]
+
+ reader = _get_reader_with_filehandlers(filenames, reader_configs)
+
+ res = reader.load([make_dataid(name=["vis_04"], calibration="reflectance")], pad_data=False)
+ numpy.testing.assert_array_almost_equal(res["vis_04"], 100 * 15 * 1 * np.pi / 50)
+
+ def test_bad_xy_coords(self, reader_configs):
+ """Test that the geolocation computation is correct."""
+ filenames = [
+ "W_XX-EUMETSAT-Darmstadt,IMG+SAT,MTI1+FCI-1C-RRAD-FDHSI-FD--"
+ "CHK-BODY--L2P-NC4E_C_EUMT_20170410114434_GTT_DEV_"
+ "20170410113925_20170410113934_N__C_0070_0067.nc",
+ ]
+
+ reader = _get_reader_with_filehandlers(filenames, reader_configs)
+ res = reader.load(['vis_04'], pad_data=False)
+
+ area_def = res['vis_04'].attrs['area']
+ # test area extents computation
+ np.testing.assert_array_almost_equal(np.array(area_def.area_extent),
+ np.array([-5568062.270889, 5168057.806632,
+ 16704186.298937, 5568062.270889]))
diff --git a/satpy/tests/reader_tests/test_fci_l2_nc.py b/satpy/tests/reader_tests/test_fci_l2_nc.py
index 2aae66f250..9ebbdb32e7 100644
--- a/satpy/tests/reader_tests/test_fci_l2_nc.py
+++ b/satpy/tests/reader_tests/test_fci_l2_nc.py
@@ -18,7 +18,6 @@
"""The fci_cld_l2_nc reader tests package."""
-import datetime
import os
import unittest
import uuid
@@ -26,8 +25,32 @@
from unittest import mock
import numpy as np
+import pytest
from netCDF4 import Dataset
-from satpy.readers.fci_l2_nc import FciL2NCFileHandler, FciL2NCSegmentFileHandler, PRODUCT_DATA_DURATION_MINUTES
+from pyresample import geometry
+
+from satpy.readers.fci_l2_nc import FciL2NCFileHandler, FciL2NCSegmentFileHandler
+from satpy.tests.utils import make_dataid
+
+AREA_DEF = geometry.AreaDefinition(
+ 'mtg_fci_fdss_2km',
+ 'MTG FCI Full Disk Scanning Service area definition with 2 km resolution',
+ "",
+ {'h': 35786400., 'lon_0': 0.0, 'ellps': 'WGS84', 'proj': 'geos', 'units': 'm'},
+ 5568,
+ 5568,
+ (-5567999.9942, 5567999.9942, 5567999.9942, -5567999.9942)
+)
+
+SEG_AREA_DEF = geometry.AreaDefinition(
+ 'mtg_fci_fdss_32km',
+ 'MTG FCI Full Disk Scanning Service area definition with 32 km resolution',
+ "",
+ {'h': 35786400., 'lon_0': 0.0, 'ellps': 'WGS84', 'proj': 'geos', 'units': 'm'},
+ 348,
+ 348,
+ (-5567999.9942, 5567999.9942, 5567999.9942, -5567999.9942)
+)
class TestFciL2NCFileHandler(unittest.TestCase):
@@ -47,8 +70,6 @@ def setUp(self):
# add global attributes
nc.data_source = 'test_data_source'
nc.platform = 'test_platform'
- nc.time_coverage_start = '20170920173040'
- nc.time_coverage_end = '20170920174117'
# Add datasets
x = nc.createVariable('x', np.float32, dimensions=('number_of_columns',))
@@ -56,9 +77,12 @@ def setUp(self):
x[:] = np.arange(10)
y = nc.createVariable('y', np.float32, dimensions=('number_of_rows',))
- x.standard_name = 'projection_y_coordinate'
+ y.standard_name = 'projection_y_coordinate'
y[:] = np.arange(100)
+ s = nc.createVariable('product_quality', np.int8)
+ s[:] = 99.
+
one_layer_dataset = nc.createVariable('test_one_layer', np.float32,
dimensions=('number_of_rows', 'number_of_columns'))
one_layer_dataset[:] = np.ones((100, 10))
@@ -73,124 +97,124 @@ def setUp(self):
two_layers_dataset[1, :, :] = 2 * np.ones((100, 10))
mtg_geos_projection = nc.createVariable('mtg_geos_projection', int, dimensions=())
- mtg_geos_projection.longitude_of_projection_origin = 10.0
+ mtg_geos_projection.longitude_of_projection_origin = 0.0
mtg_geos_projection.semi_major_axis = 6378137.
- mtg_geos_projection.semi_minor_axis = 6356752.
+ mtg_geos_projection.inverse_flattening = 298.257223563
mtg_geos_projection.perspective_point_height = 35786400.
- self.reader = FciL2NCFileHandler(
- filename=self.test_file,
- filename_info={
- 'creation_time': datetime.datetime(year=2017, month=9, day=20,
- hour=12, minute=30, second=30),
- },
- filetype_info={}
- )
+ self.fh = FciL2NCFileHandler(filename=self.test_file, filename_info={}, filetype_info={})
def tearDown(self):
"""Remove the previously created test file."""
- # First delete the reader, forcing the file to be closed if still open
- del self.reader
+ # First delete the file handler, forcing the file to be closed if still open
+ del self.fh
# Then we can safely remove the file from the system
with suppress(OSError):
os.remove(self.test_file)
def test_all_basic(self):
"""Test all basic functionalities."""
- self.assertEqual(PRODUCT_DATA_DURATION_MINUTES, 20)
-
- self.assertEqual(self.reader._start_time,
- datetime.datetime(year=2017, month=9, day=20,
- hour=17, minute=30, second=40))
+ self.assertEqual(self.fh.spacecraft_name, 'test_platform')
+ self.assertEqual(self.fh.sensor_name, 'test_data_source')
+ self.assertEqual(self.fh.ssp_lon, 0.0)
- self.assertEqual(self.reader._end_time,
- datetime.datetime(year=2017, month=9, day=20,
- hour=17, minute=41, second=17))
-
- self.assertEqual(self.reader._spacecraft_name, 'test_platform')
- self.assertEqual(self.reader._sensor_name, 'test_data_source')
- self.assertEqual(self.reader.ssp_lon, 10.0)
-
- global_attributes = self.reader._get_global_attributes()
+ global_attributes = self.fh._get_global_attributes()
expected_global_attributes = {
'filename': self.test_file,
- 'start_time': datetime.datetime(year=2017, month=9, day=20,
- hour=17, minute=30, second=40),
- 'end_time': datetime.datetime(year=2017, month=9, day=20,
- hour=17, minute=41, second=17),
'spacecraft_name': 'test_platform',
- 'ssp_lon': 10.0,
+ 'ssp_lon': 0.0,
'sensor': 'test_data_source',
- 'creation_time': datetime.datetime(year=2017, month=9, day=20,
- hour=12, minute=30, second=30),
'platform_name': 'test_platform'
}
self.assertEqual(global_attributes, expected_global_attributes)
- @mock.patch('satpy.readers.fci_l2_nc.get_area_definition')
+ @mock.patch('satpy.readers.fci_l2_nc.geometry.AreaDefinition')
@mock.patch('satpy.readers.fci_l2_nc.make_ext')
def test_area_definition(self, me_, gad_):
"""Test the area definition computation."""
- self.reader._compute_area_def()
+ self.fh._compute_area_def(make_dataid(name='test_area_def', resolution=2000))
# Asserts that the make_ext function was called with the correct arguments
me_.assert_called_once()
- name, args, kwargs = me_.mock_calls[0]
- self.assertTrue(np.allclose(args[0], 0.0))
- self.assertTrue(np.allclose(args[1], 515.6620))
- self.assertTrue(np.allclose(args[2], 0.0))
- self.assertTrue(np.allclose(args[3], 5672.28217))
- self.assertTrue(np.allclose(args[4], 35786400.))
-
- p_dict = {
- 'nlines': 100,
- 'ncols': 10,
- 'ssp_lon': 10.0,
- 'a': 6378137.,
- 'b': 6356752.,
- 'h': 35786400.,
- 'a_name': 'FCI Area',
- 'a_desc': 'Area for FCI instrument',
- 'p_id': 'geos'
- }
+ args, kwargs = me_.call_args
+ np.testing.assert_allclose(args, [-0.0, -515.6620, 5672.28217, 0.0, 35786400.])
+
+ proj_dict = {'a': 6378137.,
+ 'lon_0': 0.0,
+ 'h': 35786400,
+ "rf": 298.257223563,
+ 'proj': 'geos',
+ 'units': 'm',
+ 'sweep': 'y'}
# Asserts that the get_area_definition function was called with the correct arguments
gad_.assert_called_once()
- name, args, kwargs = gad_.mock_calls[0]
- self.assertEqual(args[0], p_dict)
- # The second argument must be the return result of the make_ext function
- self.assertEqual(args[1]._extract_mock_name(), 'make_ext()')
+ args, kwargs = gad_.call_args
+ self.assertEqual(args[0], 'mtg_fci_fdss_2km')
+ self.assertEqual(args[1], 'MTG FCI Full Disk Scanning Service area definition with 2 km resolution')
+ self.assertEqual(args[2], '')
+ self.assertEqual(args[3], proj_dict)
+ self.assertEqual(args[4], 10)
+ self.assertEqual(args[5], 100)
def test_dataset(self):
- """Test the execution of the get_dataset function."""
- # Checks the correct execution of the get_dataset function with a valid file_key
- dataset = self.reader.get_dataset(None,
- {'file_key': 'test_one_layer',
- 'fill_value': -999, 'mask_value': 0.,
- 'file_type': 'test_file_type'})
-
- self.assertTrue(np.allclose(dataset.values, np.ones((100, 10))))
+ """Test the correct execution of the get_dataset function with a valid file_key."""
+ dataset = self.fh.get_dataset(make_dataid(name='test_one_layer', resolution=2000),
+ {'name': 'test_one_layer',
+ 'file_key': 'test_one_layer',
+ 'fill_value': -999,
+ 'file_type': 'test_file_type'})
+
+ np.testing.assert_allclose(dataset.values, np.ones((100, 10)))
self.assertEqual(dataset.attrs['test_attr'], 'attr')
self.assertEqual(dataset.attrs['units'], 'test_units')
self.assertEqual(dataset.attrs['fill_value'], -999)
- # Checks the correct execution of the get_dataset function with a valid file_key & layer
- dataset = self.reader.get_dataset(None,
- {'file_key': 'test_two_layers', 'layer': 1,
- 'fill_value': -999, 'mask_value': 0,
- 'file_type': 'test_file_type'})
- self.assertTrue(np.allclose(dataset.values, 2 * np.ones((100, 10))))
+ def test_dataset_with_layer(self):
+ """Check the correct execution of the get_dataset function with a valid file_key & layer."""
+ dataset = self.fh.get_dataset(make_dataid(name='test_two_layers', resolution=2000),
+ {'name': 'test_two_layers',
+ 'file_key': 'test_two_layers', 'layer': 1,
+ 'fill_value': -999,
+ 'file_type': 'test_file_type'})
+ np.testing.assert_allclose(dataset.values, 2 * np.ones((100, 10)))
self.assertEqual(dataset.attrs['units'], None)
self.assertEqual(dataset.attrs['spacecraft_name'], 'test_platform')
- # Checks the correct execution of the get_dataset function with an invalid file_key
- invalid_dataset = self.reader.get_dataset(None,
- {'file_key': 'test_invalid',
- 'fill_value': -999, 'mask_value': 0,
- 'file_type': 'test_file_type'})
- # Checks that the function returns None
+ def test_dataset_with_invalid_filekey(self):
+ """Test the correct execution of the get_dataset function with an invalid file_key."""
+ invalid_dataset = self.fh.get_dataset(make_dataid(name='test_invalid', resolution=2000),
+ {'name': 'test_invalid',
+ 'file_key': 'test_invalid',
+ 'fill_value': -999,
+ 'file_type': 'test_file_type'})
self.assertEqual(invalid_dataset, None)
+ def test_dataset_with_total_cot(self):
+ """Test the correct execution of the get_dataset function for total COT (add contributions from two layers)."""
+ dataset = self.fh.get_dataset(make_dataid(name='retrieved_cloud_optical_thickness', resolution=2000),
+ {'name': 'retrieved_cloud_optical_thickness',
+ 'file_key': 'test_two_layers',
+ 'fill_value': -999,
+ 'file_type': 'test_file_type'})
+ # Checks that the function returns None
+ expected_sum = np.empty((100, 10))
+ expected_sum[:] = np.log10(10**2 + 10**1)
+ np.testing.assert_allclose(dataset.values, expected_sum)
+
+ def test_dataset_with_scalar(self):
+ """Test the execution of the get_dataset function for scalar values."""
+ # Checks returned scalar value
+ dataset = self.fh.get_dataset(make_dataid(name='test_scalar'),
+ {'name': 'product_quality',
+ 'file_key': 'product_quality',
+ 'file_type': 'test_file_type'})
+ self.assertEqual(dataset.values, 99.)
+
+ # Checks that no AreaDefintion is implemented for scalar values
+ with pytest.raises(NotImplementedError):
+ self.fh.get_area_def(None)
+
class TestFciL2NCSegmentFileHandler(unittest.TestCase):
"""Test the FciL2NCSegmentFileHandler reader."""
@@ -201,25 +225,26 @@ def setUp(self):
self.seg_test_file = str(uuid.uuid4()) + ".nc"
with Dataset(self.seg_test_file, 'w') as nc:
# Create dimensions
- nc.createDimension('number_of_FoR_cols', 10)
- nc.createDimension('number_of_FoR_rows', 100)
+ nc.createDimension('number_of_FoR_cols', 348)
+ nc.createDimension('number_of_FoR_rows', 348)
nc.createDimension('number_of_channels', 8)
nc.createDimension('number_of_categories', 6)
# add global attributes
nc.data_source = 'test_fci_data_source'
nc.platform = 'test_fci_platform'
- nc.time_coverage_start = '20170920173040'
- nc.time_coverage_end = '20170920174117'
# Add datasets
x = nc.createVariable('x', np.float32, dimensions=('number_of_FoR_cols',))
x.standard_name = 'projection_x_coordinate'
- x[:] = np.arange(10)
+ x[:] = np.arange(348)
y = nc.createVariable('y', np.float32, dimensions=('number_of_FoR_rows',))
- x.standard_name = 'projection_y_coordinate'
- y[:] = np.arange(100)
+ y.standard_name = 'projection_y_coordinate'
+ y[:] = np.arange(348)
+
+ s = nc.createVariable('product_quality', np.int8)
+ s[:] = 99.
chans = nc.createVariable('channels', np.float32, dimensions=('number_of_channels',))
chans.standard_name = 'fci_channels'
@@ -232,152 +257,180 @@ def setUp(self):
test_dataset = nc.createVariable('test_values', np.float32,
dimensions=('number_of_FoR_rows', 'number_of_FoR_cols',
'number_of_channels', 'number_of_categories'))
- test_dataset[:] = np.ones((100, 10, 8, 6))
+
+ test_dataset[:] = self._get_unique_array(range(8), range(6))
test_dataset.test_attr = 'attr'
test_dataset.units = 'test_units'
- self.segment_reader = FciL2NCSegmentFileHandler(
- filename=self.seg_test_file,
- filename_info={
- 'creation_time': datetime.datetime(year=2017, month=9, day=20,
- hour=12, minute=30, second=30),
- },
- filetype_info={}
- )
-
def tearDown(self):
"""Remove the previously created test file."""
- # First delete the reader, forcing the file to be closed if still open
- del self.segment_reader
+ # First delete the fh, forcing the file to be closed if still open
+ del self.fh
# Then can safely remove it from the system
with suppress(OSError):
os.remove(self.seg_test_file)
def test_all_basic(self):
"""Test all basic functionalities."""
- self.assertEqual(PRODUCT_DATA_DURATION_MINUTES, 20)
-
- self.assertEqual(self.segment_reader._start_time,
- datetime.datetime(year=2017, month=9, day=20,
- hour=17, minute=30, second=40))
+ self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={})
- self.assertEqual(self.segment_reader._end_time,
- datetime.datetime(year=2017, month=9, day=20,
- hour=17, minute=41, second=17))
+ assert self.fh.spacecraft_name == 'test_fci_platform'
+ assert self.fh.sensor_name == 'test_fci_data_source'
+ assert self.fh.ssp_lon == 0.0
- self.assertEqual(self.segment_reader._spacecraft_name, 'test_fci_platform')
- self.assertEqual(self.segment_reader._sensor_name, 'test_fci_data_source')
- self.assertEqual(self.segment_reader.ssp_lon, 0.0)
-
- global_attributes = self.segment_reader._get_global_attributes()
+ global_attributes = self.fh._get_global_attributes()
expected_global_attributes = {
'filename': self.seg_test_file,
- 'start_time': datetime.datetime(year=2017, month=9, day=20,
- hour=17, minute=30, second=40),
- 'end_time': datetime.datetime(year=2017, month=9, day=20,
- hour=17, minute=41, second=17),
'spacecraft_name': 'test_fci_platform',
'ssp_lon': 0.0,
'sensor': 'test_fci_data_source',
- 'creation_time': datetime.datetime(year=2017, month=9, day=20,
- hour=12, minute=30, second=30),
'platform_name': 'test_fci_platform'
}
self.assertEqual(global_attributes, expected_global_attributes)
def test_dataset(self):
- """Test the execution of the get_dataset function."""
+ """Test the correct execution of the get_dataset function with valid file_key."""
+ self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={})
+
# Checks the correct execution of the get_dataset function with a valid file_key
- dataset = self.segment_reader.get_dataset(None,
- {'file_key': 'test_values',
- 'fill_value': -999, 'mask_value': 0, })
- self.assertTrue(np.allclose(dataset.values, np.ones((100, 10, 8, 6))))
+ dataset = self.fh.get_dataset(make_dataid(name='test_values', resolution=32000),
+ {'name': 'test_values',
+ 'file_key': 'test_values',
+ 'fill_value': -999, })
+ expected_dataset = self._get_unique_array(range(8), range(6))
+ np.testing.assert_allclose(dataset.values, expected_dataset)
self.assertEqual(dataset.attrs['test_attr'], 'attr')
self.assertEqual(dataset.attrs['units'], 'test_units')
self.assertEqual(dataset.attrs['fill_value'], -999)
+ # Checks that no AreaDefintion is implemented
+ with pytest.raises(NotImplementedError):
+ self.fh.get_area_def(None)
+
+ def test_dataset_with_invalid_filekey(self):
+ """Test the correct execution of the get_dataset function with an invalid file_key."""
+ self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={})
+
# Checks the correct execution of the get_dataset function with an invalid file_key
- invalid_dataset = self.segment_reader.get_dataset(None,
- {'file_key': 'test_invalid',
- 'fill_value': -999, 'mask_value': 0})
+ invalid_dataset = self.fh.get_dataset(make_dataid(name='test_invalid', resolution=32000),
+ {'name': 'test_invalid',
+ 'file_key': 'test_invalid',
+ 'fill_value': -999, })
# Checks that the function returns None
self.assertEqual(invalid_dataset, None)
+ def test_dataset_with_adef(self):
+ """Test the correct execution of the get_dataset function with `with_area_definition=True`."""
+ self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={},
+ with_area_definition=True)
-class TestFciL2NCErrorFileHandler(unittest.TestCase):
- """Test the FciL2NCFileHandler reader."""
-
- def setUp(self):
- """Set up the test by creating a test file and opening it with the reader."""
- # Easiest way to test the reader is to create a test netCDF file on the fly
- self.test_error_file = str(uuid.uuid4()) + ".nc"
- with Dataset(self.test_error_file, 'w') as nc_err:
- # Create dimensions
- nc_err.createDimension('number_of_FoR_cols', 10)
- nc_err.createDimension('number_of_FoR_rows', 100)
- nc_err.createDimension('number_of_channels', 8)
- nc_err.createDimension('number_of_categories', 6)
- # add erroneous global attributes
- nc_err.data_source = 'test_fci_data_source' # Error in key name
- nc_err.platform_err = 'test_fci_platform' # Error in key name
- nc_err.time_coverage_start = '2017092017304000' # Error in time format
- nc_err.time_coverage_end_err = '20170920174117' # Error in key name
-
- # Add datasets
- x = nc_err.createVariable('x', np.float32, dimensions=('number_of_FoR_cols',))
- x.standard_name = 'projection_x_coordinate'
- x[:] = np.arange(10)
-
- y = nc_err.createVariable('y', np.float32, dimensions=('number_of_FoR_rows',))
- x.standard_name = 'projection_y_coordinate'
- y[:] = np.arange(100)
-
- chans = nc_err.createVariable('channels', np.float32, dimensions=('number_of_channels',))
- chans.standard_name = 'fci_channels'
- chans[:] = np.arange(8)
-
- cats = nc_err.createVariable('categories', np.float32, dimensions=('number_of_categories',))
- cats.standard_name = 'product_categories'
- cats[:] = np.arange(6)
-
- test_dataset = nc_err.createVariable('test_values', np.float32,
- dimensions=('number_of_FoR_rows', 'number_of_FoR_cols',
- 'number_of_channels', 'number_of_categories'))
- test_dataset[:] = np.ones((100, 10, 8, 6))
- test_dataset.test_attr = 'attr'
- test_dataset.units = 'test_units'
-
- self.error_reader = FciL2NCSegmentFileHandler(
- filename=self.test_error_file,
- filename_info={
- 'creation_time': datetime.datetime(year=2017, month=9, day=20,
- hour=12, minute=30, second=30),
- },
- filetype_info={}
- )
-
- def tearDown(self):
- """Remove the previously created test file."""
- # First delete the reader, forcing the file to be closed if still open
- del self.error_reader
- # Then can safely remove it from the system
- with suppress(OSError):
- os.remove(self.test_error_file)
-
- def test_errors(self):
- """Test that certain properties cause errors."""
- self.assertRaises(TypeError, self.error_reader._start_time,
- datetime.datetime(year=2017, month=9, day=20,
- hour=17, minute=30, second=40))
-
- self.assertRaises(TypeError, self.error_reader._end_time,
- datetime.datetime(year=2017, month=9, day=20,
- hour=17, minute=41, second=17))
-
- self.assertRaises(TypeError, self.error_reader._spacecraft_name)
+ # Checks the correct execution of the get_dataset function with a valid file_key
+ dataset = self.fh.get_dataset(make_dataid(name='test_values', resolution=32000),
+ {'name': 'test_values',
+ 'file_key': 'test_values',
+ 'fill_value': -999,
+ 'coordinates': ('test_lon', 'test_lat'), })
+ expected_dataset = self._get_unique_array(range(8), range(6))
+ np.testing.assert_allclose(dataset.values, expected_dataset)
+ self.assertEqual(dataset.attrs['test_attr'], 'attr')
+ self.assertEqual(dataset.attrs['units'], 'test_units')
+ self.assertEqual(dataset.attrs['fill_value'], -999)
- self.assertRaises(TypeError, self.error_reader._sensor_name)
+ # Checks returned AreaDefinition against reference
+ adef = self.fh.get_area_def(None)
+ self.assertEqual(adef, SEG_AREA_DEF)
+
+ def test_dataset_with_adef_and_wrongs_dims(self):
+ """Test the correct execution of the get_dataset function with dims that don't match expected AreaDefinition."""
+ self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={},
+ with_area_definition=True)
+ with pytest.raises(NotImplementedError):
+ self.fh.get_dataset(make_dataid(name='test_wrong_dims', resolution=6000),
+ {'name': 'test_wrong_dims', 'file_key': 'test_values', 'fill_value': -999}
+ )
+
+ def test_dataset_with_scalar(self):
+ """Test the execution of the get_dataset function for scalar values."""
+ self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={})
+ # Checks returned scalar value
+ dataset = self.fh.get_dataset(make_dataid(name='test_scalar'),
+ {'name': 'product_quality',
+ 'file_key': 'product_quality',
+ 'file_type': 'test_file_type'})
+ self.assertEqual(dataset.values, 99.)
+
+ # Checks that no AreaDefintion is implemented for scalar values
+ with pytest.raises(NotImplementedError):
+ self.fh.get_area_def(None)
+
+ def test_dataset_slicing_catid(self):
+ """Test the correct execution of the _slice_dataset function with 'category_id' set."""
+ self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={})
+
+ dataset = self.fh.get_dataset(make_dataid(name='test_values', resolution=32000),
+ {'name': 'test_values',
+ 'file_key': 'test_values',
+ 'fill_value': -999,
+ 'category_id': 5})
+ expected_dataset = self._get_unique_array(range(8), 5)
+ np.testing.assert_allclose(dataset.values, expected_dataset)
+
+ def test_dataset_slicing_chid_catid(self):
+ """Test the correct execution of the _slice_dataset function with 'channel_id' and 'category_id' set."""
+ self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={})
+
+ dataset = self.fh.get_dataset(make_dataid(name='test_values', resolution=32000),
+ {'name': 'test_values',
+ 'file_key': 'test_values',
+ 'fill_value': -999,
+ 'channel_id': 0, 'category_id': 1})
+ expected_dataset = self._get_unique_array(0, 1)
+ np.testing.assert_allclose(dataset.values, expected_dataset)
+
+ def test_dataset_slicing_visid_catid(self):
+ """Test the correct execution of the _slice_dataset function with 'vis_channel_id' and 'category_id' set."""
+ self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={})
+
+ self.fh.nc = self.fh.nc.rename_dims({'number_of_channels': 'number_of_vis_channels'})
+ dataset = self.fh.get_dataset(make_dataid(name='test_values', resolution=32000),
+ {'name': 'test_values',
+ 'file_key': 'test_values',
+ 'fill_value': -999,
+ 'vis_channel_id': 3, 'category_id': 3})
+ expected_dataset = self._get_unique_array(3, 3)
+ np.testing.assert_allclose(dataset.values, expected_dataset)
+
+ def test_dataset_slicing_irid(self):
+ """Test the correct execution of the _slice_dataset function with 'ir_channel_id' set."""
+ self.fh = FciL2NCSegmentFileHandler(filename=self.seg_test_file, filename_info={}, filetype_info={})
+
+ self.fh.nc = self.fh.nc.rename_dims({'number_of_channels': 'number_of_ir_channels'})
+ dataset = self.fh.get_dataset(make_dataid(name='test_values', resolution=32000),
+ {'name': 'test_values',
+ 'file_key': 'test_values',
+ 'fill_value': -999,
+ 'ir_channel_id': 4})
+ expected_dataset = self._get_unique_array(4, range(6))
+ np.testing.assert_allclose(dataset.values, expected_dataset)
+
+ @staticmethod
+ def _get_unique_array(iarr, jarr):
+ if not hasattr(iarr, '__iter__'):
+ iarr = [iarr]
+
+ if not hasattr(jarr, '__iter__'):
+ jarr = [jarr]
+
+ array = np.zeros((348, 348, 8, 6))
+ for i in iarr:
+ for j in jarr:
+ array[:, :, i, j] = (i * 10) + j
+
+ array = array[:, :, list(iarr), :]
+ array = array[:, :, :, list(jarr)]
+
+ return np.squeeze(array)
class TestFciL2NCReadingByteData(unittest.TestCase):
@@ -392,6 +445,10 @@ def setUp(self):
nc_byte.createDimension('number_of_columns', 1)
nc_byte.createDimension('number_of_rows', 1)
+ # add global attributes
+ nc_byte.data_source = 'test_data_source'
+ nc_byte.platform = 'test_platform'
+
# Add datasets
x = nc_byte.createVariable('x', np.float32, dimensions=('number_of_columns',))
x.standard_name = 'projection_x_coordinate'
@@ -402,9 +459,9 @@ def setUp(self):
y[:] = np.arange(1)
mtg_geos_projection = nc_byte.createVariable('mtg_geos_projection', int, dimensions=())
- mtg_geos_projection.longitude_of_projection_origin = 10.0
+ mtg_geos_projection.longitude_of_projection_origin = 0.0
mtg_geos_projection.semi_major_axis = 6378137.
- mtg_geos_projection.semi_minor_axis = 6356752.
+ mtg_geos_projection.inverse_flattening = 298.257223563
mtg_geos_projection.perspective_point_height = 35786400.
test_dataset = nc_byte.createVariable('cloud_mask_test_flag', np.float32,
@@ -415,16 +472,13 @@ def setUp(self):
self.byte_reader = FciL2NCFileHandler(
filename=self.test_byte_file,
- filename_info={
- 'creation_time': datetime.datetime(year=2017, month=9, day=20,
- hour=12, minute=30, second=30),
- },
+ filename_info={},
filetype_info={}
)
def tearDown(self):
"""Remove the previously created test file."""
- # First delete the reader, forcing the file to be closed if still open
+ # First delete the file handler, forcing the file to be closed if still open
del self.byte_reader
# Then can safely remove it from the system
with suppress(OSError):
@@ -433,9 +487,10 @@ def tearDown(self):
def test_byte_extraction(self):
"""Test the execution of the get_dataset function."""
# Value of 1 is expected to be returned for this test
- dataset = self.byte_reader.get_dataset(None,
- {'file_key': 'cloud_mask_test_flag',
- 'fill_value': -999, 'mask_value': 0.,
+ dataset = self.byte_reader.get_dataset(make_dataid(name='cloud_mask_test_flag', resolution=2000),
+ {'name': 'cloud_mask_test_flag',
+ 'file_key': 'cloud_mask_test_flag',
+ 'fill_value': -999,
'file_type': 'nc_fci_test_clm',
'extract_byte': 1,
})
@@ -443,8 +498,9 @@ def test_byte_extraction(self):
self.assertEqual(dataset.values, 1)
# Value of 0 is expected fto be returned or this test
- dataset = self.byte_reader.get_dataset(None,
- {'file_key': 'cloud_mask_test_flag',
+ dataset = self.byte_reader.get_dataset(make_dataid(name='cloud_mask_test_flag', resolution=2000),
+ {'name': 'cloud_mask_test_flag',
+ 'file_key': 'cloud_mask_test_flag',
'fill_value': -999, 'mask_value': 0.,
'file_type': 'nc_fci_test_clm',
'extract_byte': 23,
diff --git a/satpy/tests/reader_tests/test_fy4_base.py b/satpy/tests/reader_tests/test_fy4_base.py
new file mode 100644
index 0000000000..7b69bee498
--- /dev/null
+++ b/satpy/tests/reader_tests/test_fy4_base.py
@@ -0,0 +1,60 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2019 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""The fy4_base reader tests package."""
+
+from unittest import mock
+
+import pytest
+
+from satpy.readers.fy4_base import FY4Base
+from satpy.tests.reader_tests.test_agri_l1 import FakeHDF5FileHandler2
+
+
+class Test_FY4Base:
+ """Tests for the FengYun4 base class for the components missed by AGRI/GHI tests."""
+
+ def setup(self):
+ """Initialise the tests."""
+ self.p = mock.patch.object(FY4Base, '__bases__', (FakeHDF5FileHandler2,))
+ self.fake_handler = self.p.start()
+ self.p.is_local = True
+
+ self.file_type = {'file_type': 'agri_l1_0500m'}
+
+ def teardown(self):
+ """Stop wrapping the HDF5 file handler."""
+ self.p.stop()
+
+ def test_badsensor(self):
+ """Test case where we pass a bad sensor name, must be GHI or AGRI."""
+ fy4 = FY4Base(None, {'platform_id': 'FY4A', 'instrument': 'FCI'}, self.file_type)
+ with pytest.raises(ValueError):
+ fy4.calibrate_to_reflectance(None, None, None)
+ with pytest.raises(ValueError):
+ fy4.calibrate_to_bt(None, None, None)
+
+ def test_badcalibration(self):
+ """Test case where we pass a bad calibration type, radiance is not supported."""
+ fy4 = FY4Base(None, {'platform_id': 'FY4A', 'instrument': 'AGRI'}, self.file_type)
+ with pytest.raises(NotImplementedError):
+ fy4.calibrate(None, {'calibration': 'radiance'}, None, None)
+
+ def test_badplatform(self):
+ """Test case where we pass a bad calibration type, radiance is not supported."""
+ with pytest.raises(KeyError):
+ FY4Base(None, {'platform_id': 'FY3D', 'instrument': 'AGRI'}, self.file_type)
diff --git a/satpy/tests/reader_tests/test_generic_image.py b/satpy/tests/reader_tests/test_generic_image.py
index 347ea763ce..ecab67b08c 100644
--- a/satpy/tests/reader_tests/test_generic_image.py
+++ b/satpy/tests/reader_tests/test_generic_image.py
@@ -19,9 +19,10 @@
import os
import unittest
-import xarray as xr
import dask.array as da
import numpy as np
+import xarray as xr
+
from satpy.tests.utils import make_dataid
@@ -34,13 +35,14 @@ def setUp(self):
from datetime import datetime
from pyresample.geometry import AreaDefinition
+
from satpy.scene import Scene
self.date = datetime(2018, 1, 1)
# Create area definition
pcs_id = 'ETRS89 / LAEA Europe'
- proj4_dict = {'init': 'epsg:3035'}
+ proj4_dict = "EPSG:3035"
self.x_size = 100
self.y_size = 100
area_extent = (2426378.0132, 1528101.2618, 6293974.6215, 5446513.5222)
@@ -132,9 +134,9 @@ def test_png_scene(self):
scn = Scene(reader='generic_image', filenames=[fname])
scn.load(['image'])
self.assertEqual(scn['image'].shape, (1, self.y_size, self.x_size))
- self.assertEqual(scn.attrs['sensor'], set(['images']))
- self.assertEqual(scn.attrs['start_time'], None)
- self.assertEqual(scn.attrs['end_time'], None)
+ self.assertEqual(scn.sensor_names, {'images'})
+ self.assertEqual(scn.start_time, None)
+ self.assertEqual(scn.end_time, None)
self.assertNotIn('area', scn['image'].attrs)
fname = os.path.join(self.base_dir, '20180101_0000_test_la.png')
@@ -142,9 +144,9 @@ def test_png_scene(self):
scn.load(['image'])
data = da.compute(scn['image'].data)
self.assertEqual(scn['image'].shape, (1, self.y_size, self.x_size))
- self.assertEqual(scn.attrs['sensor'], set(['images']))
- self.assertEqual(scn.attrs['start_time'], self.date)
- self.assertEqual(scn.attrs['end_time'], self.date)
+ self.assertEqual(scn.sensor_names, {'images'})
+ self.assertEqual(scn.start_time, self.date)
+ self.assertEqual(scn.end_time, self.date)
self.assertNotIn('area', scn['image'].attrs)
self.assertEqual(np.sum(np.isnan(data)), 100)
@@ -156,18 +158,18 @@ def test_geotiff_scene(self):
scn = Scene(reader='generic_image', filenames=[fname])
scn.load(['image'])
self.assertEqual(scn['image'].shape, (3, self.y_size, self.x_size))
- self.assertEqual(scn.attrs['sensor'], set(['images']))
- self.assertEqual(scn.attrs['start_time'], self.date)
- self.assertEqual(scn.attrs['end_time'], self.date)
+ self.assertEqual(scn.sensor_names, {'images'})
+ self.assertEqual(scn.start_time, self.date)
+ self.assertEqual(scn.end_time, self.date)
self.assertEqual(scn['image'].area, self.area_def)
fname = os.path.join(self.base_dir, 'test_rgba.tif')
scn = Scene(reader='generic_image', filenames=[fname])
scn.load(['image'])
self.assertEqual(scn['image'].shape, (3, self.y_size, self.x_size))
- self.assertEqual(scn.attrs['sensor'], set(['images']))
- self.assertEqual(scn.attrs['start_time'], None)
- self.assertEqual(scn.attrs['end_time'], None)
+ self.assertEqual(scn.sensor_names, {'images'})
+ self.assertEqual(scn.start_time, None)
+ self.assertEqual(scn.end_time, None)
self.assertEqual(scn['image'].area, self.area_def)
def test_geotiff_scene_nan(self):
diff --git a/satpy/tests/reader_tests/test_geocat.py b/satpy/tests/reader_tests/test_geocat.py
index 1aee501b18..b9323a39e2 100644
--- a/satpy/tests/reader_tests/test_geocat.py
+++ b/satpy/tests/reader_tests/test_geocat.py
@@ -18,13 +18,14 @@
"""Module for testing the satpy.readers.geocat module."""
import os
+import unittest
+from unittest import mock
+
import numpy as np
+
from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler
from satpy.tests.utils import convert_file_content_to_data_array
-import unittest
-from unittest import mock
-
DEFAULT_FILE_DTYPE = np.uint16
DEFAULT_FILE_SHAPE = (10, 300)
DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1],
@@ -136,8 +137,9 @@ def test_init(self):
def test_load_all_old_goes(self):
"""Test loading all test datasets from old GOES files."""
- from satpy.readers import load_reader
import xarray as xr
+
+ from satpy.readers import load_reader
r = load_reader(self.reader_configs)
with mock.patch('satpy.readers.geocat.netCDF4.Variable', xr.DataArray):
loadables = r.select_files_from_pathnames([
@@ -155,9 +157,10 @@ def test_load_all_old_goes(self):
def test_load_all_himawari8(self):
"""Test loading all test datasets from H8 NetCDF file."""
- from satpy.readers import load_reader
- from pyresample.geometry import AreaDefinition
import xarray as xr
+ from pyresample.geometry import AreaDefinition
+
+ from satpy.readers import load_reader
r = load_reader(self.reader_configs)
with mock.patch('satpy.readers.geocat.netCDF4.Variable', xr.DataArray):
loadables = r.select_files_from_pathnames([
@@ -176,9 +179,10 @@ def test_load_all_himawari8(self):
def test_load_all_goes17_hdf4(self):
"""Test loading all test datasets from GOES-17 HDF4 file."""
- from satpy.readers import load_reader
- from pyresample.geometry import AreaDefinition
import xarray as xr
+ from pyresample.geometry import AreaDefinition
+
+ from satpy.readers import load_reader
r = load_reader(self.reader_configs)
with mock.patch('satpy.readers.geocat.netCDF4.Variable', xr.DataArray):
loadables = r.select_files_from_pathnames([
diff --git a/satpy/tests/reader_tests/test_geos_area.py b/satpy/tests/reader_tests/test_geos_area.py
index 0bf22c78ba..d7d4c2a510 100644
--- a/satpy/tests/reader_tests/test_geos_area.py
+++ b/satpy/tests/reader_tests/test_geos_area.py
@@ -18,15 +18,18 @@
"""Geostationary project utility module tests package."""
import unittest
-from satpy.readers._geos_area import (get_xy_from_linecol,
- get_area_extent,
- get_area_definition,
- sampling_to_lfac_cfac,
- get_geos_area_naming,
- get_resolution_and_unit_strings)
import numpy as np
+from satpy.readers._geos_area import (
+ get_area_definition,
+ get_area_extent,
+ get_geos_area_naming,
+ get_resolution_and_unit_strings,
+ get_xy_from_linecol,
+ sampling_to_lfac_cfac,
+)
+
class TestGEOSProjectionUtil(unittest.TestCase):
"""Tests for the area utilities."""
diff --git a/satpy/tests/reader_tests/test_ghi_l1.py b/satpy/tests/reader_tests/test_ghi_l1.py
new file mode 100644
index 0000000000..47e873f0d9
--- /dev/null
+++ b/satpy/tests/reader_tests/test_ghi_l1.py
@@ -0,0 +1,382 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2019 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""The agri_l1 reader tests package."""
+
+import os
+from unittest import mock
+
+import dask.array as da
+import numpy as np
+import pytest
+import xarray as xr
+
+from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler
+
+ALL_BAND_NAMES = ["C01", "C02", "C03", "C04", "C05", "C06", "C07"]
+RESOLUTION_LIST = [250, 500, 2000]
+
+CHANNELS_BY_RESOLUTION = {250: ["C01"],
+ 500: ["C01", "C02", "C03", "C04", "C05", "C06"],
+ 2000: ALL_BAND_NAMES,
+ 'GEO': 'solar_azimuth_angle'
+ }
+
+AREA_EXTENTS_BY_RESOLUTION = {
+ 250: (896278.676104, 562456.016066, 895155.242397, 452480.774883),
+ 500: (896153.676104, 562331.016066, 895155.242397, 452480.774883),
+ 2000: (895403.676104, 561581.016066, 895155.242397, 452480.774883)
+}
+
+
+class FakeHDF5FileHandler2(FakeHDF5FileHandler):
+ """Swap-in HDF5 File Handler."""
+
+ def make_test_data(self, cwl, ch, prefix, dims, file_type):
+ """Make test data."""
+ if prefix == 'CAL':
+ data = xr.DataArray(
+ da.from_array((np.arange(10.) + 1.) / 10., [dims[0] * dims[1]]),
+ attrs={
+ 'Slope': np.array(1.), 'Intercept': np.array(0.),
+ 'FillValue': np.array(-65535.0),
+ 'units': 'NUL',
+ 'center_wavelength': '{}um'.format(cwl).encode('utf-8'),
+ 'band_names': 'band{}(band number is range from 1 to 14)'
+ .format(ch).encode('utf-8'),
+ 'long_name': 'Calibration table of {}um Channel'.format(cwl).encode('utf-8'),
+ 'valid_range': np.array([0, 1.5]),
+ },
+ dims='_const')
+
+ elif prefix == 'NOM':
+ data = xr.DataArray(
+ da.from_array(np.arange(10, dtype=np.uint16).reshape((2, 5)) + 1,
+ [dim for dim in dims]),
+ attrs={
+ 'Slope': np.array(1.), 'Intercept': np.array(0.),
+ 'FillValue': np.array(65535),
+ 'units': 'DN',
+ 'center_wavelength': '{}um'.format(cwl).encode('utf-8'),
+ 'band_names': 'band{}(band number is range from 1 to 7)'
+ .format(ch).encode('utf-8'),
+ 'long_name': 'Calibration table of {}um Channel'.format(cwl).encode('utf-8'),
+ 'valid_range': np.array([0, 4095]),
+ },
+ dims=('_RegLength', '_RegWidth'))
+
+ elif prefix == 'GEO':
+ data = xr.DataArray(
+ da.from_array(np.arange(10, dtype=np.float32).reshape((2, 5)) + 1,
+ [dim for dim in dims]),
+ attrs={
+ 'Slope': np.array(1.), 'Intercept': np.array(0.),
+ 'FillValue': np.array(65535.),
+ 'units': 'NUL',
+ 'band_names': 'NUL',
+ 'valid_range': np.array([0., 360.]),
+ },
+ dims=('_RegLength', '_RegWidth'))
+
+ elif prefix == 'COEF':
+ if file_type == '250':
+ data = self._create_coeff_array(1)
+
+ elif file_type == '500':
+ data = self._create_coeff_array(6)
+
+ elif file_type == '2000':
+ data = self._create_coeff_array(7)
+
+ return data
+
+ def _create_coeff_array(self, nb_channels):
+ data = xr.DataArray(
+ da.from_array((np.arange(nb_channels * 2).reshape((nb_channels, 2)) + 1.) /
+ np.array([1E4, 1E2]), [nb_channels, 2]),
+ attrs={
+ 'Slope': 1., 'Intercept': 0.,
+ 'FillValue': 0,
+ 'units': 'NUL',
+ 'band_names': 'NUL',
+ 'long_name': b'Calibration coefficient (SCALE and OFFSET)',
+ 'valid_range': [-500, 500],
+ },
+ dims=('_num_channel', '_coefs'))
+ return data
+
+ def _create_channel_data(self, chs, cwls, file_type):
+ dim_0 = 2
+ dim_1 = 5
+ data = {}
+ for index, _cwl in enumerate(cwls):
+ data['Calibration/CALChannel' + '%02d' % chs[index]] = self.make_test_data(cwls[index], chs[index], 'CAL',
+ [dim_0, dim_1], file_type)
+ data['Data/NOMChannel' + '%02d' % chs[index]] = self.make_test_data(cwls[index], chs[index], 'NOM',
+ [dim_0, dim_1], file_type)
+ data['Calibration/CALIBRATION_COEF(SCALE+OFFSET)'] = self.make_test_data(cwls[index], chs[index], 'COEF',
+ [dim_0, dim_1], file_type)
+ return data
+
+ def _get_250m_data(self, file_type):
+ chs = [1]
+ cwls = [0.675]
+ data = self._create_channel_data(chs, cwls, file_type)
+
+ return data
+
+ def _get_500m_data(self, file_type):
+ chs = [1, 2, 3, 4, 5, 6]
+ cwls = [0.675, 0.47, 0.545, 0.645, 1.378, 1.61]
+ data = self._create_channel_data(chs, cwls, file_type)
+
+ return data
+
+ def _get_2km_data(self, file_type):
+ chs = [1, 2, 3, 4, 5, 6, 7]
+ cwls = [0.675, 0.47, 0.545, 0.645, 1.378, 1.61, 11.4]
+ data = self._create_channel_data(chs, cwls, file_type)
+
+ return data
+
+ def _get_geo_data(self, file_type):
+ dim_0 = 2
+ dim_1 = 5
+ data = {'Navigation/NOMSunAzimuth': self.make_test_data('NUL', 'NUL', 'GEO',
+ [dim_0, dim_1], file_type)}
+ return data
+
+ def get_test_content(self, filename, filename_info, filetype_info):
+ """Mimic reader input file content."""
+ global_attrs = {
+ '/attr/NOMSubSatLat': np.array(0.0),
+ '/attr/NOMSubSatLon': np.array(133.0),
+ '/attr/NOMSatHeight': np.array(3.5786E7),
+ '/attr/Semi_major_axis': np.array(6378.14),
+ '/attr/Semi_minor_axis': np.array(6353.28),
+ '/attr/OBIType': 'REGX',
+ '/attr/RegLength': np.array(2.0),
+ '/attr/RegWidth': np.array(5.0),
+ '/attr/Corner-Point Latitudes': np.array((4.1, 5.1, 4.1, 5.1)),
+ '/attr/Corner-Point Longitudes': np.array((141.1, 141.1, 141.1, 151.1)),
+ '/attr/Begin Line Number': np.array(0),
+ '/attr/End Line Number': np.array(1),
+ '/attr/Observing Beginning Date': '2019-06-03', '/attr/Observing Beginning Time': '00:30:01.807',
+ '/attr/Observing Ending Date': '2019-06-03', '/attr/Observing Ending Time': '00:34:07.572',
+ '/attr/Satellite Name': 'FY4B', '/attr/Sensor Identification Code': 'GHI', '/attr/Sensor Name': 'GHI',
+ }
+
+ data = {}
+ if self.filetype_info['file_type'] == 'ghi_l1_0250m':
+ data = self._get_250m_data('250')
+ elif self.filetype_info['file_type'] == 'ghi_l1_0500m':
+ data = self._get_500m_data('500')
+ elif self.filetype_info['file_type'] == 'ghi_l1_2000m':
+ data = self._get_2km_data('2000')
+ elif self.filetype_info['file_type'] == 'ghi_l1_2000m_geo':
+ data = self._get_geo_data('2000')
+
+ test_content = {}
+ test_content.update(global_attrs)
+ test_content.update(data)
+
+ return test_content
+
+
+def _create_filenames_from_resolutions(*resolutions):
+ """Create filenames from the given resolutions."""
+ if 'GEO' in resolutions:
+ return ["FY4B-_GHI---_N_REGX_1330E_L1-_GEO-_MULT_NOM_20220613145300_20220613145359_2000M_V0001.HDF"]
+ pattern = ("FY4B-_GHI---_N_REGX_1330E_L1-_FDI-_MULT_NOM_20220613145300_20220613145359_"
+ "{resolution:04d}M_V0001.HDF")
+ return [pattern.format(resolution=resolution) for resolution in resolutions]
+
+
+class Test_HDF_GHI_L1_cal:
+ """Test VIRR L1B Reader."""
+
+ yaml_file = "ghi_l1.yaml"
+
+ def setup(self):
+ """Wrap HDF5 file handler with our own fake handler."""
+ from satpy._config import config_search_paths
+ from satpy.readers.fy4_base import FY4Base
+ from satpy.readers.ghi_l1 import HDF_GHI_L1
+ self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file))
+ # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library
+ self.fy4 = mock.patch.object(FY4Base, '__bases__', (FakeHDF5FileHandler2,))
+ self.p = mock.patch.object(HDF_GHI_L1.__class__, (self.fy4,))
+ self.fake_handler = self.fy4.start()
+ self.p.is_local = True
+
+ self.expected = {
+ 'C01': np.array([[2.01, 2.02, 2.03, 2.04, 2.05], [2.06, 2.07, 2.08, 2.09, 2.1]]),
+ 'C02': np.array([[4.03, 4.06, 4.09, 4.12, 4.15], [4.18, 4.21, 4.24, 4.27, 4.3]]),
+ 'C03': np.array([[6.05, 6.1, 6.15, 6.2, 6.25], [6.3, 6.35, 6.4, 6.45, 6.5]]),
+ 'C04': np.array([[8.07, 8.14, 8.21, 8.28, 8.35], [8.42, 8.49, 8.56, 8.63, 8.7]]),
+ 'C05': np.array([[10.09, 10.18, 10.27, 10.36, 10.45], [10.54, 10.63, 10.72, 10.81, 10.9]]),
+ 'C06': np.array([[12.11, 12.22, 12.33, 12.44, 12.55], [12.66, 12.77, 12.88, 12.99, 13.1]]),
+ 'C07': np.array([[0.2, 0.3, 0.4, 0.5, 0.6], [0.7, 0.8, 0.9, 1., np.nan]]),
+ }
+
+ def teardown(self):
+ """Stop wrapping the HDF5 file handler."""
+ self.p.stop()
+
+ def test_ghi_channels_are_loaded_with_right_resolution(self):
+ """Test all channels are loaded with the right resolution."""
+ reader = self._create_reader_for_resolutions(*RESOLUTION_LIST)
+
+ available_datasets = reader.available_dataset_ids
+
+ for resolution_to_test in RESOLUTION_LIST:
+ self._check_keys_for_dsq(available_datasets, resolution_to_test)
+
+ def test_ghi_all_bands_have_right_units(self):
+ """Test all bands have the right units."""
+ reader = self._create_reader_for_resolutions(*RESOLUTION_LIST)
+
+ band_names = ALL_BAND_NAMES
+ res = reader.load(band_names)
+ assert len(res) == 7
+
+ for band_name in band_names:
+ assert res[band_name].shape == (2, 5)
+ self._check_units(band_name, res)
+
+ def test_ghi_orbital_parameters_are_correct(self):
+ """Test orbital parameters are set correctly."""
+ reader = self._create_reader_for_resolutions(*RESOLUTION_LIST)
+
+ band_names = ALL_BAND_NAMES
+ res = reader.load(band_names)
+
+ # check whether the data type of orbital_parameters is float
+ orbital_parameters = res[band_names[0]].attrs['orbital_parameters']
+ for attr in orbital_parameters:
+ assert isinstance(orbital_parameters[attr], float)
+ assert orbital_parameters['satellite_nominal_latitude'] == 0.
+ assert orbital_parameters['satellite_nominal_longitude'] == 133.0
+ assert orbital_parameters['satellite_nominal_altitude'] == 3.5786E7
+
+ @staticmethod
+ def _check_keys_for_dsq(available_datasets, resolution_to_test):
+ from satpy.dataset.data_dict import get_key
+ from satpy.tests.utils import make_dsq
+
+ band_names = CHANNELS_BY_RESOLUTION[resolution_to_test]
+ for band_name in band_names:
+ ds_q = make_dsq(name=band_name, resolution=resolution_to_test)
+ res = get_key(ds_q, available_datasets, num_results=0, best=False)
+ if band_name < 'C07':
+ assert len(res) == 2
+ else:
+ assert len(res) == 3
+
+ def test_ghi_counts_calibration(self):
+ """Test loading data at counts calibration."""
+ from satpy.tests.utils import make_dsq
+ reader = self._create_reader_for_resolutions(*RESOLUTION_LIST)
+
+ ds_ids = []
+ band_names = CHANNELS_BY_RESOLUTION[2000]
+ for band_name in band_names:
+ ds_ids.append(make_dsq(name=band_name, calibration='counts'))
+ res = reader.load(ds_ids)
+ assert len(res) == 7
+
+ for band_name in band_names:
+ assert res[band_name].shape == (2, 5)
+ assert res[band_name].attrs['calibration'] == "counts"
+ assert res[band_name].dtype == np.uint16
+ assert res[band_name].attrs['units'] == "1"
+
+ def test_ghi_geo(self):
+ """Test loading data for angles."""
+ from satpy.tests.utils import make_dsq
+ reader = self._create_reader_for_resolutions('GEO')
+ band_name = 'solar_azimuth_angle'
+ ds_ids = [make_dsq(name=band_name)]
+ res = reader.load(ds_ids)
+ assert len(res) == 1
+
+ assert res[band_name].shape == (2, 5)
+ assert res[band_name].dtype == np.float32
+
+ def _create_reader_for_resolutions(self, *resolutions):
+ from satpy.readers import load_reader
+ filenames = _create_filenames_from_resolutions(*resolutions)
+ reader = load_reader(self.reader_configs)
+ files = reader.select_files_from_pathnames(filenames)
+ assert len(filenames) == len(files)
+ reader.create_filehandlers(files)
+ # Make sure we have some files
+ assert reader.file_handlers
+ return reader
+
+ @pytest.mark.parametrize("resolution_to_test", RESOLUTION_LIST)
+ def test_ghi_for_one_resolution(self, resolution_to_test):
+ """Test loading data when only one resolution is available."""
+ reader = self._create_reader_for_resolutions(resolution_to_test)
+ available_datasets = reader.available_dataset_ids
+ band_names = CHANNELS_BY_RESOLUTION[resolution_to_test]
+ self._assert_which_channels_are_loaded(available_datasets, band_names, resolution_to_test)
+ res = reader.load(band_names)
+ assert len(res) == len(band_names)
+ self._check_calibration_and_units(band_names, res)
+ print("\n\n\n")
+ for band_name in band_names:
+ np.testing.assert_allclose(np.array(res[band_name].attrs['area'].area_extent),
+ np.array(AREA_EXTENTS_BY_RESOLUTION[resolution_to_test]))
+
+ def _check_calibration_and_units(self, band_names, result):
+ for band_name in band_names:
+ assert result[band_name].attrs['sensor'].islower()
+ assert result[band_name].shape == (2, 5)
+ np.testing.assert_allclose(result[band_name].values, self.expected[band_name], equal_nan=True)
+ self._check_units(band_name, result)
+
+ @staticmethod
+ def _check_units(band_name, result):
+ if band_name <= 'C06':
+ assert result[band_name].attrs['calibration'] == "reflectance"
+ else:
+ assert result[band_name].attrs['calibration'] == 'brightness_temperature'
+ if band_name <= 'C06':
+ assert result[band_name].attrs['units'] == "%"
+ else:
+ assert result[band_name].attrs['units'] == "K"
+
+ @staticmethod
+ def _assert_which_channels_are_loaded(available_datasets, band_names, resolution_to_test):
+ from satpy.dataset.data_dict import get_key
+ from satpy.tests.utils import make_dsq
+
+ other_resolutions = RESOLUTION_LIST.copy()
+ other_resolutions.remove(resolution_to_test)
+ for band_name in band_names:
+ for resolution in other_resolutions:
+ ds_q = make_dsq(name=band_name, resolution=resolution)
+ with pytest.raises(KeyError):
+ _ = get_key(ds_q, available_datasets, num_results=0, best=False)
+
+ ds_q = make_dsq(name=band_name, resolution=resolution_to_test)
+ res = get_key(ds_q, available_datasets, num_results=0, best=False)
+ if band_name < 'C07':
+ assert len(res) == 2
+ else:
+ assert len(res) == 3
diff --git a/satpy/tests/reader_tests/test_ghrsst_l2.py b/satpy/tests/reader_tests/test_ghrsst_l2.py
new file mode 100644
index 0000000000..e33cec467a
--- /dev/null
+++ b/satpy/tests/reader_tests/test_ghrsst_l2.py
@@ -0,0 +1,152 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2018, 2022 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Module for testing the satpy.readers.ghrsst_l2 module."""
+
+import os
+import tarfile
+from datetime import datetime
+from pathlib import Path
+
+import numpy as np
+import pytest
+import xarray as xr
+
+from satpy.readers.ghrsst_l2 import GHRSSTL2FileHandler
+
+
+class TestGHRSSTL2Reader:
+ """Test Sentinel-3 SST L2 reader."""
+
+ def setup_method(self, tmp_path):
+ """Create a fake osisaf ghrsst dataset."""
+ self.base_data = np.array(([-32768, 1135, 1125], [1138, 1128, 1080]))
+ self.lon_data = np.array(([-13.43, 1.56, 11.25], [-11.38, 1.28, 10.80]))
+ self.lat_data = np.array(([43.43, 55.56, 61.25], [41.38, 50.28, 60.80]))
+ self.lon = xr.DataArray(
+ self.lon_data,
+ dims=('nj', 'ni'),
+ attrs={'standard_name': 'longitude',
+ 'units': 'degrees_east',
+ }
+ )
+ self.lat = xr.DataArray(
+ self.lat_data,
+ dims=('nj', 'ni'),
+ attrs={'standard_name': 'latitude',
+ 'units': 'degrees_north',
+ }
+ )
+ self.sst = xr.DataArray(
+ self.base_data,
+ dims=('nj', 'ni'),
+ attrs={'scale_factor': 0.01, 'add_offset': 273.15,
+ '_FillValue': -32768, 'units': 'kelvin',
+ }
+ )
+ self.fake_dataset = xr.Dataset(
+ data_vars={
+ 'sea_surface_temperature': self.sst,
+ 'longitude': self.lon,
+ 'latitude': self.lat,
+ },
+ attrs={
+ "start_time": "20220321T112640Z",
+ "stop_time": "20220321T145711Z",
+ "platform": 'NOAA20',
+ "sensor": "VIIRS",
+ },
+ )
+
+ def _create_tarfile_with_testdata(self, mypath):
+ """Create a 'fake' testdata set in a tar file."""
+ slstr_fakename = "S3A_SL_2_WST_MAR_O_NR_003.SEN3"
+ tarfile_fakename = "S3A_SL_2_WST_MAR_O_NR_003.SEN3.tar"
+
+ slstrdir = mypath / slstr_fakename
+ slstrdir.mkdir(parents=True, exist_ok=True)
+ tarfile_path = mypath / tarfile_fakename
+
+ ncfilename = slstrdir / 'L2P_GHRSST-SSTskin-202204131200.nc'
+ self.fake_dataset.to_netcdf(os.fspath(ncfilename))
+ xmlfile_path = slstrdir / 'xfdumanifest.xml'
+ xmlfile_path.touch()
+
+ with tarfile.open(name=tarfile_path, mode='w') as tar:
+ tar.add(os.fspath(ncfilename), arcname=Path(slstr_fakename) / ncfilename.name)
+ tar.add(os.fspath(xmlfile_path), arcname=Path(slstr_fakename) / xmlfile_path.name)
+
+ return tarfile_path
+
+ def test_instantiate_single_netcdf_file(self, tmp_path):
+ """Test initialization of file handlers - given a single netCDF file."""
+ filename_info = {}
+ tmp_filepath = tmp_path / 'fake_dataset.nc'
+ self.fake_dataset.to_netcdf(os.fspath(tmp_filepath))
+
+ GHRSSTL2FileHandler(os.fspath(tmp_filepath), filename_info, None)
+
+ def test_instantiate_tarfile(self, tmp_path):
+ """Test initialization of file handlers - given a tar file as in the case of the SAFE format."""
+ filename_info = {}
+ tarfile_path = self._create_tarfile_with_testdata(tmp_path)
+
+ GHRSSTL2FileHandler(os.fspath(tarfile_path), filename_info, None)
+
+ def test_get_dataset(self, tmp_path):
+ """Test retrieval of datasets."""
+ filename_info = {}
+ tmp_filepath = tmp_path / 'fake_dataset.nc'
+ self.fake_dataset.to_netcdf(os.fspath(tmp_filepath))
+
+ test = GHRSSTL2FileHandler(os.fspath(tmp_filepath), filename_info, None)
+
+ test.get_dataset('longitude', {'standard_name': 'longitude'})
+ test.get_dataset('latitude', {'standard_name': 'latitude'})
+ test.get_dataset('sea_surface_temperature', {'standard_name': 'sea_surface_temperature'})
+
+ with pytest.raises(KeyError):
+ test.get_dataset('erroneous dataset', {'standard_name': 'erroneous dataset'})
+
+ def test_get_sensor(self, tmp_path):
+ """Test retrieval of the sensor name from the netCDF file."""
+ dt_valid = datetime(2022, 3, 21, 11, 26, 40) # 202203211200Z
+ filename_info = {'field_type': 'NARSST', 'generating_centre': 'FRA_',
+ 'satid': 'NOAA20_', 'valid_time': dt_valid}
+
+ tmp_filepath = tmp_path / 'fake_dataset.nc'
+ self.fake_dataset.to_netcdf(os.fspath(tmp_filepath))
+
+ test = GHRSSTL2FileHandler(os.fspath(tmp_filepath), filename_info, None)
+ assert test.sensor == 'viirs'
+
+ def test_get_start_and_end_times(self, tmp_path):
+ """Test retrieval of the sensor name from the netCDF file."""
+ dt_valid = datetime(2022, 3, 21, 11, 26, 40) # 202203211200Z
+ good_start_time = datetime(2022, 3, 21, 11, 26, 40) # 20220321T112640Z
+ good_stop_time = datetime(2022, 3, 21, 14, 57, 11) # 20220321T145711Z
+
+ filename_info = {'field_type': 'NARSST', 'generating_centre': 'FRA_',
+ 'satid': 'NOAA20_', 'valid_time': dt_valid}
+
+ tmp_filepath = tmp_path / 'fake_dataset.nc'
+ self.fake_dataset.to_netcdf(os.fspath(tmp_filepath))
+
+ test = GHRSSTL2FileHandler(os.fspath(tmp_filepath), filename_info, None)
+
+ assert test.start_time == good_start_time
+ assert test.end_time == good_stop_time
diff --git a/satpy/tests/reader_tests/test_glm_l2.py b/satpy/tests/reader_tests/test_glm_l2.py
index 4ef5990baf..57d324f0b1 100644
--- a/satpy/tests/reader_tests/test_glm_l2.py
+++ b/satpy/tests/reader_tests/test_glm_l2.py
@@ -208,8 +208,8 @@ class TestGLML2Reader(unittest.TestCase):
@mock.patch('satpy.readers.abi_base.xr')
def setUp(self, xr_):
"""Create a fake reader to test."""
- from satpy.readers import load_reader
from satpy._config import config_search_paths
+ from satpy.readers import load_reader
self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file))
fake_dataset = setup_fake_dataset()
xr_.open_dataset.return_value = fake_dataset
diff --git a/satpy/tests/reader_tests/test_goes_imager_hrit.py b/satpy/tests/reader_tests/test_goes_imager_hrit.py
index 934759b824..02b9632335 100644
--- a/satpy/tests/reader_tests/test_goes_imager_hrit.py
+++ b/satpy/tests/reader_tests/test_goes_imager_hrit.py
@@ -22,12 +22,19 @@
from unittest import mock
import numpy as np
+from pyresample.utils import proj4_radius_parameters
from xarray import DataArray
-from satpy.readers.goes_imager_hrit import (ALTITUDE, HRITGOESFileHandler,
- HRITGOESPrologueFileHandler,
- make_gvar_float, make_sgs_time,
- sgs_time)
+from satpy.readers.goes_imager_hrit import (
+ ALTITUDE,
+ EQUATOR_RADIUS,
+ POLE_RADIUS,
+ HRITGOESFileHandler,
+ HRITGOESPrologueFileHandler,
+ make_gvar_float,
+ make_sgs_time,
+ sgs_time,
+)
from satpy.tests.utils import make_dataid
@@ -162,3 +169,31 @@ def test_get_dataset(self, base_get_dataset):
{'projection_longitude': self.reader.mda['projection_parameters']['SSP_longitude'],
'projection_latitude': 0.0,
'projection_altitude': ALTITUDE})
+
+ def test_get_area_def(self):
+ """Test getting the area definition."""
+ self.reader.mda.update({
+ 'cfac': 10216334,
+ 'lfac': 10216334,
+ 'coff': 1408.0,
+ 'loff': 944.0,
+ 'number_of_lines': 464,
+ 'number_of_columns': 2816
+ })
+ dsid = make_dataid(name="CH1", calibration='reflectance',
+ resolution=3000)
+ area = self.reader.get_area_def(dsid)
+
+ a, b = proj4_radius_parameters(area.proj_dict)
+ assert a == EQUATOR_RADIUS
+ assert b == POLE_RADIUS
+ assert area.proj_dict['h'] == ALTITUDE
+ assert area.proj_dict['lon_0'] == 100.1640625
+ assert area.proj_dict['proj'] == 'geos'
+ assert area.proj_dict['units'] == 'm'
+ assert area.width == 2816
+ assert area.height == 464
+ assert area.area_id == 'goes-15_goes_imager_fd_3km'
+ area_extent_exp = (-5639254.900260435, 1925159.4881528523,
+ 5643261.475678028, 3784210.48191544)
+ np.testing.assert_allclose(area.area_extent, area_extent_exp)
diff --git a/satpy/tests/reader_tests/test_goes_imager_nc_eum.py b/satpy/tests/reader_tests/test_goes_imager_nc_eum.py
new file mode 100644
index 0000000000..d72271f623
--- /dev/null
+++ b/satpy/tests/reader_tests/test_goes_imager_nc_eum.py
@@ -0,0 +1,167 @@
+# Copyright (c) 2018 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Tests for the goes imager nc reader (EUMETSAT variant)."""
+
+import unittest
+from unittest import mock
+
+import numpy as np
+import xarray as xr
+
+from satpy.readers.goes_imager_nc import is_vis_channel
+from satpy.tests.utils import make_dataid
+
+
+class GOESNCEUMFileHandlerRadianceTest(unittest.TestCase):
+ """Tests for the radiances."""
+
+ longMessage = True
+
+ @mock.patch('satpy.readers.goes_imager_nc.xr')
+ def setUp(self, xr_):
+ """Set up the tests."""
+ from satpy.readers.goes_imager_nc import CALIB_COEFS, GOESEUMNCFileHandler
+
+ self.coefs = CALIB_COEFS['GOES-15']
+ self.all_coefs = CALIB_COEFS
+ self.channels = sorted(self.coefs.keys())
+ self.ir_channels = sorted([ch for ch in self.channels
+ if not is_vis_channel(ch)])
+ self.vis_channels = sorted([ch for ch in self.channels
+ if is_vis_channel(ch)])
+
+ # Mock file access to return a fake dataset.
+ nrows = ncols = 300
+ self.radiance = np.ones((1, nrows, ncols)) # IR channels
+ self.lon = np.zeros((nrows, ncols)) # Dummy
+ self.lat = np.repeat(np.linspace(-150, 150, nrows), ncols).reshape(
+ nrows, ncols) # Includes invalid values to be masked
+
+ xr_.open_dataset.return_value = xr.Dataset(
+ {'data': xr.DataArray(data=self.radiance, dims=('time', 'yc', 'xc')),
+ 'time': xr.DataArray(data=np.array([0], dtype='datetime64[ms]'),
+ dims=('time',)),
+ 'bands': xr.DataArray(data=np.array([1]))},
+ attrs={'Satellite Sensor': 'G-15'})
+
+ geo_data = xr.Dataset(
+ {'lon': xr.DataArray(data=self.lon, dims=('yc', 'xc')),
+ 'lat': xr.DataArray(data=self.lat, dims=('yc', 'xc'))},
+ attrs={'Satellite Sensor': 'G-15'})
+
+ # Instantiate reader using the mocked open_dataset() method
+ self.reader = GOESEUMNCFileHandler(filename='dummy', filename_info={},
+ filetype_info={}, geo_data=geo_data)
+
+ def test_get_dataset_radiance(self):
+ """Test getting the radiances."""
+ for ch in self.channels:
+ if not is_vis_channel(ch):
+ radiance = self.reader.get_dataset(
+ key=make_dataid(name=ch, calibration='radiance'), info={})
+ # ... this only compares the valid (unmasked) elements
+ self.assertTrue(np.all(self.radiance == radiance.to_masked_array()),
+ msg='get_dataset() returns invalid radiance for '
+ 'channel {}'.format(ch))
+
+ def test_calibrate(self):
+ """Test whether the correct calibration methods are called."""
+ for ch in self.channels:
+ if not is_vis_channel(ch):
+ calibs = {'brightness_temperature': '_calibrate_ir'}
+ for calib, method in calibs.items():
+ with mock.patch.object(self.reader, method) as target_func:
+ self.reader.calibrate(data=self.reader.nc['data'],
+ calibration=calib, channel=ch)
+ target_func.assert_called()
+
+ def test_get_sector(self):
+ """Test sector identification."""
+ from satpy.readers.goes_imager_nc import (
+ FULL_DISC,
+ NORTH_HEMIS_EAST,
+ NORTH_HEMIS_WEST,
+ SOUTH_HEMIS_EAST,
+ SOUTH_HEMIS_WEST,
+ UNKNOWN_SECTOR,
+ )
+ shapes = {
+ (2700, 5200): FULL_DISC,
+ (1850, 3450): NORTH_HEMIS_EAST,
+ (600, 3500): SOUTH_HEMIS_EAST,
+ (1310, 3300): NORTH_HEMIS_WEST,
+ (1099, 2800): SOUTH_HEMIS_WEST,
+ (123, 456): UNKNOWN_SECTOR
+ }
+ for (nlines, ncols), sector_ref in shapes.items():
+ for channel in ('00_7', '10_7'):
+ sector = self.reader._get_sector(channel=channel, nlines=nlines,
+ ncols=ncols)
+ self.assertEqual(sector, sector_ref,
+ msg='Incorrect sector identification')
+
+
+class GOESNCEUMFileHandlerReflectanceTest(unittest.TestCase):
+ """Testing the reflectances."""
+
+ longMessage = True
+
+ @mock.patch('satpy.readers.goes_imager_nc.xr')
+ def setUp(self, xr_):
+ """Set up the tests."""
+ from satpy.readers.goes_imager_nc import CALIB_COEFS, GOESEUMNCFileHandler
+
+ self.coefs = CALIB_COEFS['GOES-15']
+ self.all_coefs = CALIB_COEFS
+ self.channels = sorted(self.coefs.keys())
+ self.ir_channels = sorted([ch for ch in self.channels
+ if not is_vis_channel(ch)])
+ self.vis_channels = sorted([ch for ch in self.channels
+ if is_vis_channel(ch)])
+
+ # Mock file access to return a fake dataset.
+ nrows = ncols = 300
+ self.reflectance = 50 * np.ones((1, nrows, ncols)) # Vis channel
+ self.lon = np.zeros((nrows, ncols)) # Dummy
+ self.lat = np.repeat(np.linspace(-150, 150, nrows), ncols).reshape(
+ nrows, ncols) # Includes invalid values to be masked
+
+ xr_.open_dataset.return_value = xr.Dataset(
+ {'data': xr.DataArray(data=self.reflectance, dims=('time', 'yc', 'xc')),
+ 'time': xr.DataArray(data=np.array([0], dtype='datetime64[ms]'),
+ dims=('time',)),
+ 'bands': xr.DataArray(data=np.array([1]))},
+ attrs={'Satellite Sensor': 'G-15'})
+
+ geo_data = xr.Dataset(
+ {'lon': xr.DataArray(data=self.lon, dims=('yc', 'xc')),
+ 'lat': xr.DataArray(data=self.lat, dims=('yc', 'xc'))},
+ attrs={'Satellite Sensor': 'G-15'})
+
+ # Instantiate reader using the mocked open_dataset() method
+ self.reader = GOESEUMNCFileHandler(filename='dummy', filename_info={},
+ filetype_info={}, geo_data=geo_data)
+
+ def test_get_dataset_reflectance(self):
+ """Test getting the reflectance."""
+ for ch in self.channels:
+ if is_vis_channel(ch):
+ refl = self.reader.get_dataset(
+ key=make_dataid(name=ch, calibration='reflectance'), info={})
+ # ... this only compares the valid (unmasked) elements
+ self.assertTrue(np.all(self.reflectance == refl.to_masked_array()),
+ msg='get_dataset() returns invalid reflectance for '
+ 'channel {}'.format(ch))
diff --git a/satpy/tests/reader_tests/test_goes_imager_nc.py b/satpy/tests/reader_tests/test_goes_imager_nc_noaa.py
similarity index 65%
rename from satpy/tests/reader_tests/test_goes_imager_nc.py
rename to satpy/tests/reader_tests/test_goes_imager_nc_noaa.py
index 17ae48d85a..a42ebc1f33 100644
--- a/satpy/tests/reader_tests/test_goes_imager_nc.py
+++ b/satpy/tests/reader_tests/test_goes_imager_nc_noaa.py
@@ -15,7 +15,8 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
-"""Tests for the goes imager nc reader."""
+"""Tests for the goes imager nc reader (NOAA CLASS variant)."""
+
import datetime
import unittest
from unittest import mock
@@ -23,7 +24,9 @@
import numpy as np
import pytest
import xarray as xr
+from pyresample.geometry import AreaDefinition
+from satpy.readers.goes_imager_nc import is_vis_channel
from satpy.tests.utils import make_dataid
@@ -34,7 +37,6 @@ class GOESNCBaseFileHandlerTest(unittest.TestCase):
@mock.patch('satpy.readers.goes_imager_nc.xr')
@mock.patch.multiple('satpy.readers.goes_imager_nc.GOESNCBaseFileHandler',
- __abstractmethods__=set(),
_get_sector=mock.MagicMock())
def setUp(self, xr_):
"""Set up the tests."""
@@ -49,7 +51,7 @@ def setUp(self, xr_):
self.band = 1
self.nc = xr.Dataset(
{'data': xr.DataArray(self.dummy3d, dims=('time', 'yc', 'xc')),
- 'lon': xr.DataArray(data=self.dummy2d, dims=('yc', 'xc')),
+ 'lon': xr.DataArray(data=self.dummy2d, dims=('yc', 'xc')),
'lat': xr.DataArray(data=self.dummy2d, dims=('yc', 'xc')),
'time': xr.DataArray(data=np.array([self.time],
dtype='datetime64[ms]'),
@@ -86,27 +88,6 @@ def test_get_nadir_pixel(self):
self.assertEqual((nadir_row, nadir_col), (2, 1),
msg='Incorrect nadir pixel')
- def test_get_earth_mask(self):
- """Test identification of earth/space pixels."""
- lat = xr.DataArray([-100, -90, -45, 0, 45, 90, 100])
- expected = np.array([0, 1, 1, 1, 1, 1, 0])
- mask = self.reader._get_earth_mask(lat)
- self.assertTrue(np.all(mask == expected),
- msg='Incorrect identification of earth/space pixel')
-
- def test_is_yaw_flip(self):
- """Test yaw flip identification."""
- lat_asc = xr.DataArray([[1, 1, 1],
- [2, 2, 2],
- [3, 3, 3]])
- lat_dsc = xr.DataArray([[3, 3, 3],
- [2, 2, 3],
- [1, 1, 1]])
- self.assertEqual(self.reader._is_yaw_flip(lat_asc, delta=1), True,
- msg='Yaw flip not identified')
- self.assertEqual(self.reader._is_yaw_flip(lat_dsc, delta=1), False,
- msg='Yaw flip false alarm')
-
def test_viscounts2radiance(self):
"""Test conversion from VIS counts to radiance."""
# Reference data is for detector #1
@@ -204,8 +185,7 @@ def test_start_time(self):
def test_end_time(self):
"""Test dataset end time stamp."""
- from satpy.readers.goes_imager_nc import (SCAN_DURATION, FULL_DISC,
- UNKNOWN_SECTOR)
+ from satpy.readers.goes_imager_nc import FULL_DISC, SCAN_DURATION, UNKNOWN_SECTOR
expected = {
UNKNOWN_SECTOR: self.time,
FULL_DISC: self.time + SCAN_DURATION[FULL_DISC]
@@ -215,6 +195,153 @@ def test_end_time(self):
self.assertEqual(self.reader.end_time, end_time)
+class TestMetadata:
+ """Testcase for dataset metadata."""
+
+ @pytest.fixture(params=[1, 2])
+ def channel_id(self, request):
+ """Set channel ID."""
+ return request.param
+
+ @pytest.fixture(params=[True, False])
+ def yaw_flip(self, request):
+ """Set yaw-flip flag."""
+ return request.param
+
+ def _apply_yaw_flip(self, data_array, yaw_flip):
+ if yaw_flip:
+ data_array.data = np.flipud(data_array.data)
+ return data_array
+
+ @pytest.fixture
+ def lons_lats(self, yaw_flip):
+ """Get longitudes and latitudes."""
+ lon = xr.DataArray(
+ [[-1, 0, 1, 2],
+ [-1, 0, 1, 2],
+ [-1, 0, 1, 2]],
+ dims=("yc", "xc")
+ )
+ lat = xr.DataArray(
+ [[9999, 9999, 9999, 9999],
+ [1, 1, 1, 1],
+ [-1, -1, -1, -1]],
+ dims=("yc", "xc")
+ )
+ self._apply_yaw_flip(lat, yaw_flip)
+ return lon, lat
+
+ @pytest.fixture
+ def dataset(self, lons_lats, channel_id):
+ """Create a fake dataset."""
+ lon, lat = lons_lats
+ data = xr.DataArray(
+ [[[1, 2, 3, 4],
+ [5, 6, 7, 8],
+ [9, 10, 11, 12]]],
+ dims=("time", "yc", "xc")
+ )
+ time = xr.DataArray(
+ [np.datetime64("2018-01-01 12:00:00")],
+ dims="time"
+ )
+ bands = xr.DataArray([channel_id], dims="bands")
+ return xr.Dataset(
+ {
+ 'data': data,
+ 'lon': lon,
+ 'lat': lat,
+ 'time': time,
+ 'bands': bands,
+ },
+ attrs={'Satellite Sensor': 'G-15'}
+ )
+
+ @pytest.fixture
+ def earth_mask(self, yaw_flip):
+ """Get expected earth mask."""
+ earth_mask = xr.DataArray(
+ [[False, False, False, False],
+ [True, True, True, True],
+ [True, True, True, True]],
+ dims=("yc", "xc"),
+ )
+ self._apply_yaw_flip(earth_mask, yaw_flip)
+ return earth_mask
+
+ @pytest.fixture
+ def geometry(self, channel_id, yaw_flip):
+ """Get expected geometry."""
+ shapes = {
+ 1: {"width": 10847, "height": 10810},
+ 2: {"width": 2712, "height": 2702}
+ }
+ return {
+ "nadir_row": 0 if yaw_flip else 1,
+ "projection_longitude": -1 if yaw_flip else 1,
+ "shape": shapes[channel_id]
+ }
+
+ @pytest.fixture
+ def expected(self, geometry, earth_mask, yaw_flip):
+ """Define expected metadata."""
+ proj_dict = {
+ 'a': '6378169',
+ 'h': '35785831',
+ 'lon_0': '0',
+ 'no_defs': 'None',
+ 'proj': 'geos',
+ 'rf': '295.488065897001',
+ 'type': 'crs',
+ 'units': 'm',
+ 'x_0': '0',
+ 'y_0': '0'
+ }
+ area = AreaDefinition(
+ area_id="goes_geos_uniform",
+ proj_id="goes_geos_uniform",
+ description="GOES-15 geostationary projection (uniform sampling)",
+ projection=proj_dict,
+ area_extent=(-5434201.1352, -5415668.5992, 5434201.1352, 5415668.5992),
+ **geometry["shape"]
+ )
+ return {
+ "area_def_uni": area,
+ "earth_mask": earth_mask,
+ "yaw_flip": yaw_flip,
+ "lon0": 0,
+ "lat0": geometry["projection_longitude"],
+ "nadir_row": geometry["nadir_row"],
+ "nadir_col": 1
+ }
+
+ @pytest.fixture
+ def mocked_file_handler(self, dataset):
+ """Mock file handler to load the given fake dataset."""
+ from satpy.readers.goes_imager_nc import FULL_DISC, GOESNCFileHandler
+ with mock.patch("satpy.readers.goes_imager_nc.xr") as xr_:
+ xr_.open_dataset.return_value = dataset
+ GOESNCFileHandler.vis_sectors[(3, 4)] = FULL_DISC
+ GOESNCFileHandler.ir_sectors[(3, 4)] = FULL_DISC
+ GOESNCFileHandler.yaw_flip_sampling_distance = 1
+ return GOESNCFileHandler(
+ filename='dummy',
+ filename_info={},
+ filetype_info={},
+ )
+
+ def test_metadata(self, mocked_file_handler, expected):
+ """Test dataset metadata."""
+ metadata = mocked_file_handler.meta
+ self._assert_earth_mask_equal(metadata, expected)
+ assert metadata == expected
+
+ def _assert_earth_mask_equal(self, metadata, expected):
+ earth_mask_tst = metadata.pop("earth_mask")
+ earth_mask_ref = expected.pop("earth_mask")
+ xr.testing.assert_allclose(earth_mask_tst, earth_mask_ref)
+
+
class GOESNCFileHandlerTest(unittest.TestCase):
"""Test the file handler."""
@@ -223,15 +350,15 @@ class GOESNCFileHandlerTest(unittest.TestCase):
@mock.patch('satpy.readers.goes_imager_nc.xr')
def setUp(self, xr_):
"""Set up the tests."""
- from satpy.readers.goes_imager_nc import GOESNCFileHandler, CALIB_COEFS
+ from satpy.readers.goes_imager_nc import CALIB_COEFS, GOESNCFileHandler
self.coefs = CALIB_COEFS['GOES-15']
self.all_coefs = CALIB_COEFS
self.channels = sorted(self.coefs.keys())
self.ir_channels = sorted([ch for ch in self.channels
- if not GOESNCFileHandler._is_vis(ch)])
+ if not is_vis_channel(ch)])
self.vis_channels = sorted([ch for ch in self.channels
- if GOESNCFileHandler._is_vis(ch)])
+ if is_vis_channel(ch)])
# Mock file access to return a fake dataset. Choose a medium count value
# (100) to avoid elements being masked due to invalid
@@ -244,7 +371,7 @@ def setUp(self, xr_):
xr_.open_dataset.return_value = xr.Dataset(
{'data': xr.DataArray(data=self.counts, dims=('time', 'yc', 'xc')),
- 'lon': xr.DataArray(data=self.lon, dims=('yc', 'xc')),
+ 'lon': xr.DataArray(data=self.lon, dims=('yc', 'xc')),
'lat': xr.DataArray(data=self.lat, dims=('yc', 'xc')),
'time': xr.DataArray(data=np.array([0], dtype='datetime64[ms]'),
dims=('time',)),
@@ -281,9 +408,6 @@ def test_get_dataset_counts(self):
'projection_latitude': 0.0,
'projection_altitude': ALTITUDE,
'yaw_flip': True},
- 'satellite_longitude': -75.0,
- 'satellite_latitude': 0.0,
- 'satellite_altitude': ALTITUDE,
'platform_name': 'GOES-15',
'sensor': 'goes_imager',
'sector': UNKNOWN_SECTOR,
@@ -345,7 +469,7 @@ def test_get_dataset_invalid(self):
def test_calibrate(self):
"""Test whether the correct calibration methods are called."""
for ch in self.channels:
- if self.reader._is_vis(ch):
+ if is_vis_channel(ch):
calibs = {'radiance': '_viscounts2radiance',
'reflectance': '_calibrate_vis'}
else:
@@ -359,9 +483,14 @@ def test_calibrate(self):
def test_get_sector(self):
"""Test sector identification."""
- from satpy.readers.goes_imager_nc import (FULL_DISC, NORTH_HEMIS_EAST,
- SOUTH_HEMIS_EAST, NORTH_HEMIS_WEST,
- SOUTH_HEMIS_WEST, UNKNOWN_SECTOR)
+ from satpy.readers.goes_imager_nc import (
+ FULL_DISC,
+ NORTH_HEMIS_EAST,
+ NORTH_HEMIS_WEST,
+ SOUTH_HEMIS_EAST,
+ SOUTH_HEMIS_WEST,
+ UNKNOWN_SECTOR,
+ )
shapes_vis = {
(10800, 20754): FULL_DISC,
(7286, 13900): NORTH_HEMIS_EAST,
@@ -391,138 +520,23 @@ def test_get_sector(self):
msg='Incorrect sector identification')
-class GOESNCEUMFileHandlerRadianceTest(unittest.TestCase):
- """Tests for the radiances."""
-
- longMessage = True
-
- @mock.patch('satpy.readers.goes_imager_nc.xr')
- def setUp(self, xr_):
- """Set up the tests."""
- from satpy.readers.goes_imager_nc import GOESEUMNCFileHandler, CALIB_COEFS
-
- self.coefs = CALIB_COEFS['GOES-15']
- self.all_coefs = CALIB_COEFS
- self.channels = sorted(self.coefs.keys())
- self.ir_channels = sorted([ch for ch in self.channels
- if not GOESEUMNCFileHandler._is_vis(ch)])
- self.vis_channels = sorted([ch for ch in self.channels
- if GOESEUMNCFileHandler._is_vis(ch)])
-
- # Mock file access to return a fake dataset.
- nrows = ncols = 300
- self.radiance = np.ones((1, nrows, ncols)) # IR channels
- self.lon = np.zeros((nrows, ncols)) # Dummy
- self.lat = np.repeat(np.linspace(-150, 150, nrows), ncols).reshape(
- nrows, ncols) # Includes invalid values to be masked
-
- xr_.open_dataset.return_value = xr.Dataset(
- {'data': xr.DataArray(data=self.radiance, dims=('time', 'yc', 'xc')),
- 'time': xr.DataArray(data=np.array([0], dtype='datetime64[ms]'),
- dims=('time',)),
- 'bands': xr.DataArray(data=np.array([1]))},
- attrs={'Satellite Sensor': 'G-15'})
-
- geo_data = xr.Dataset(
- {'lon': xr.DataArray(data=self.lon, dims=('yc', 'xc')),
- 'lat': xr.DataArray(data=self.lat, dims=('yc', 'xc'))},
- attrs={'Satellite Sensor': 'G-15'})
-
- # Instantiate reader using the mocked open_dataset() method
- self.reader = GOESEUMNCFileHandler(filename='dummy', filename_info={},
- filetype_info={}, geo_data=geo_data)
-
- def test_get_dataset_radiance(self):
- """Test getting the radiances."""
- for ch in self.channels:
- if not self.reader._is_vis(ch):
- radiance = self.reader.get_dataset(
- key=make_dataid(name=ch, calibration='radiance'), info={})
- # ... this only compares the valid (unmasked) elements
- self.assertTrue(np.all(self.radiance == radiance.to_masked_array()),
- msg='get_dataset() returns invalid radiance for '
- 'channel {}'.format(ch))
-
- def test_calibrate(self):
- """Test whether the correct calibration methods are called."""
- for ch in self.channels:
- if not self.reader._is_vis(ch):
- calibs = {'brightness_temperature': '_calibrate_ir'}
- for calib, method in calibs.items():
- with mock.patch.object(self.reader, method) as target_func:
- self.reader.calibrate(data=self.reader.nc['data'],
- calibration=calib, channel=ch)
- target_func.assert_called()
-
- def test_get_sector(self):
- """Test sector identification."""
- from satpy.readers.goes_imager_nc import (FULL_DISC, NORTH_HEMIS_EAST,
- SOUTH_HEMIS_EAST, NORTH_HEMIS_WEST,
- SOUTH_HEMIS_WEST, UNKNOWN_SECTOR)
- shapes = {
- (2700, 5200): FULL_DISC,
- (1850, 3450): NORTH_HEMIS_EAST,
- (600, 3500): SOUTH_HEMIS_EAST,
- (1310, 3300): NORTH_HEMIS_WEST,
- (1099, 2800): SOUTH_HEMIS_WEST,
- (123, 456): UNKNOWN_SECTOR
- }
- for (nlines, ncols), sector_ref in shapes.items():
- for channel in ('00_7', '10_7'):
- sector = self.reader._get_sector(channel=channel, nlines=nlines,
- ncols=ncols)
- self.assertEqual(sector, sector_ref,
- msg='Incorrect sector identification')
-
-
-class GOESNCEUMFileHandlerReflectanceTest(unittest.TestCase):
- """Testing the reflectances."""
-
- longMessage = True
-
- @mock.patch('satpy.readers.goes_imager_nc.xr')
- def setUp(self, xr_):
- """Set up the tests."""
- from satpy.readers.goes_imager_nc import GOESEUMNCFileHandler, CALIB_COEFS
-
- self.coefs = CALIB_COEFS['GOES-15']
- self.all_coefs = CALIB_COEFS
- self.channels = sorted(self.coefs.keys())
- self.ir_channels = sorted([ch for ch in self.channels
- if not GOESEUMNCFileHandler._is_vis(ch)])
- self.vis_channels = sorted([ch for ch in self.channels
- if GOESEUMNCFileHandler._is_vis(ch)])
-
- # Mock file access to return a fake dataset.
- nrows = ncols = 300
- self.reflectance = 50 * np.ones((1, nrows, ncols)) # Vis channel
- self.lon = np.zeros((nrows, ncols)) # Dummy
- self.lat = np.repeat(np.linspace(-150, 150, nrows), ncols).reshape(
- nrows, ncols) # Includes invalid values to be masked
-
- xr_.open_dataset.return_value = xr.Dataset(
- {'data': xr.DataArray(data=self.reflectance, dims=('time', 'yc', 'xc')),
- 'time': xr.DataArray(data=np.array([0], dtype='datetime64[ms]'),
- dims=('time',)),
- 'bands': xr.DataArray(data=np.array([1]))},
- attrs={'Satellite Sensor': 'G-15'})
-
- geo_data = xr.Dataset(
- {'lon': xr.DataArray(data=self.lon, dims=('yc', 'xc')),
- 'lat': xr.DataArray(data=self.lat, dims=('yc', 'xc'))},
- attrs={'Satellite Sensor': 'G-15'})
-
- # Instantiate reader using the mocked open_dataset() method
- self.reader = GOESEUMNCFileHandler(filename='dummy', filename_info={},
- filetype_info={}, geo_data=geo_data)
-
- def test_get_dataset_reflectance(self):
- """Test getting the reflectance."""
- for ch in self.channels:
- if self.reader._is_vis(ch):
- refl = self.reader.get_dataset(
- key=make_dataid(name=ch, calibration='reflectance'), info={})
- # ... this only compares the valid (unmasked) elements
- self.assertTrue(np.all(self.reflectance == refl.to_masked_array()),
- msg='get_dataset() returns invalid reflectance for '
- 'channel {}'.format(ch))
+class TestChannelIdentification:
+ """Test identification of channel type."""
+
+ @pytest.mark.parametrize(
+ "channel_name,expected",
+ [
+ ("00_7", True),
+ ("10_7", False),
+ (1, True),
+ (2, False)
+ ]
+ )
+ def test_is_vis_channel(self, channel_name, expected):
+ """Test vis channel identification."""
+ assert is_vis_channel(channel_name) == expected
+
+ def test_invalid_channel(self):
+ """Test handling of invalid channel type."""
+ with pytest.raises(ValueError):
+ is_vis_channel({"foo": "bar"})
diff --git a/satpy/tests/reader_tests/test_gpm_imerg.py b/satpy/tests/reader_tests/test_gpm_imerg.py
index cb93eda520..a91f6d300f 100644
--- a/satpy/tests/reader_tests/test_gpm_imerg.py
+++ b/satpy/tests/reader_tests/test_gpm_imerg.py
@@ -18,14 +18,16 @@
import os
-import numpy as np
-import xarray as xr
-import dask.array as da
-from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler
-from datetime import datetime
import unittest
+from datetime import datetime
from unittest import mock
+import dask.array as da
+import h5py
+import numpy as np
+import xarray as xr
+
+from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler
DEFAULT_FILE_SHAPE = (3600, 1800)
DEFAULT_LAT_DATA = np.linspace(-89.95, 89.95,
@@ -54,12 +56,14 @@ def _get_precip_data(self, num_rows, num_cols):
selection = {
'Grid/IRprecipitation':
xr.DataArray(
- da.ones((1, num_rows, num_cols), chunks=1024,
+ da.ones((1, num_cols, num_rows), chunks=1024,
dtype=np.float32),
attrs={
'_FillValue': -9999.9,
'units': 'mm/hr',
'Units': 'mm/hr',
+ 'badval': h5py.h5r.Reference(),
+ 'badvals': np.array([[h5py.h5r.Reference()]])
},
dims=('time', 'lon', 'lat')),
}
@@ -87,8 +91,8 @@ class TestHdf5IMERG(unittest.TestCase):
def setUp(self):
"""Wrap HDF5 file handler with our own fake handler."""
- from satpy.readers.gpm_imerg import Hdf5IMERG
from satpy._config import config_search_paths
+ from satpy.readers.gpm_imerg import Hdf5IMERG
self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file))
# http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library
self.p = mock.patch.object(Hdf5IMERG, '__bases__', (FakeHDF5FileHandler2,))
@@ -102,6 +106,7 @@ def tearDown(self):
def test_load_data(self):
"""Test loading data."""
from satpy.readers import load_reader
+
# Filename to test, needed for start and end times
filenames = [
'3B-HHR.MS.MRG.3IMERG.20200131-S233000-E235959.1410.V06B.HDF5', ]
diff --git a/satpy/tests/reader_tests/test_grib.py b/satpy/tests/reader_tests/test_grib.py
index 3f259ddf62..b349e91169 100644
--- a/satpy/tests/reader_tests/test_grib.py
+++ b/satpy/tests/reader_tests/test_grib.py
@@ -22,8 +22,8 @@
from unittest import mock
import numpy as np
-import xarray as xr
import pytest
+import xarray as xr
from satpy.dataset import DataQuery
@@ -198,7 +198,6 @@ def __enter__(self):
def __exit__(self, exc_type, exc_val, exc_tb):
"""Exit."""
- pass
class TestGRIBReader:
diff --git a/satpy/tests/reader_tests/test_hdf4_utils.py b/satpy/tests/reader_tests/test_hdf4_utils.py
index 0204e738ad..9a0773c2c1 100644
--- a/satpy/tests/reader_tests/test_hdf4_utils.py
+++ b/satpy/tests/reader_tests/test_hdf4_utils.py
@@ -19,6 +19,7 @@
import os
import unittest
+
import numpy as np
import xarray as xr
@@ -26,7 +27,7 @@
from satpy.readers.hdf4_utils import HDF4FileHandler
except ImportError:
# fake the import so we can at least run the tests in this file
- HDF4FileHandler = object
+ HDF4FileHandler = object # type: ignore
class FakeHDF4FileHandler(HDF4FileHandler):
diff --git a/satpy/tests/reader_tests/test_hdf5_utils.py b/satpy/tests/reader_tests/test_hdf5_utils.py
index 1724852a7b..2c5fd2d19a 100644
--- a/satpy/tests/reader_tests/test_hdf5_utils.py
+++ b/satpy/tests/reader_tests/test_hdf5_utils.py
@@ -19,13 +19,14 @@
import os
import unittest
+
import numpy as np
try:
from satpy.readers.hdf5_utils import HDF5FileHandler
except ImportError:
# fake the import so we can at least run the tests in this file
- HDF5FileHandler = object
+ HDF5FileHandler = object # type: ignore
class FakeHDF5FileHandler(HDF5FileHandler):
@@ -116,8 +117,9 @@ def tearDown(self):
def test_all_basic(self):
"""Test everything about the HDF5 class."""
- from satpy.readers.hdf5_utils import HDF5FileHandler
import xarray as xr
+
+ from satpy.readers.hdf5_utils import HDF5FileHandler
file_handler = HDF5FileHandler('test.h5', {}, {})
for ds_name in ('test_group/ds1_f', 'test_group/ds1_i', 'ds2_f', 'ds2_i'):
diff --git a/satpy/tests/reader_tests/test_hdfeos_base.py b/satpy/tests/reader_tests/test_hdfeos_base.py
index c0d706114c..68b8928f2e 100644
--- a/satpy/tests/reader_tests/test_hdfeos_base.py
+++ b/satpy/tests/reader_tests/test_hdfeos_base.py
@@ -19,7 +19,6 @@
import unittest
-
nrt_mda = '''GROUP = INVENTORYMETADATA
GROUPTYPE = MASTERGROUP
diff --git a/satpy/tests/reader_tests/test_hrit_base.py b/satpy/tests/reader_tests/test_hrit_base.py
index 187f37160e..e99d992f0e 100644
--- a/satpy/tests/reader_tests/test_hrit_base.py
+++ b/satpy/tests/reader_tests/test_hrit_base.py
@@ -17,15 +17,19 @@
# satpy. If not, see .
"""The HRIT base reader tests package."""
+import bz2
+import gzip
import os
import unittest
-from unittest import mock
from datetime import datetime
-from tempfile import gettempdir, NamedTemporaryFile
+from tempfile import NamedTemporaryFile, gettempdir
+from unittest import mock
import numpy as np
+import pytest
-from satpy.readers.hrit_base import HRITFileHandler, get_xritdecompress_cmd, get_xritdecompress_outfile, decompress
+from satpy.readers import FSFile
+from satpy.readers.hrit_base import HRITFileHandler, decompress, get_xritdecompress_cmd, get_xritdecompress_outfile
class TestHRITDecompress(unittest.TestCase):
@@ -79,59 +83,135 @@ def test_decompress(self, popen):
self.assertEqual(res, os.path.join('.', 'bla.__'))
-class TestHRITFileHandler(unittest.TestCase):
+# From a compressed msg hrit file.
+# uncompressed data field length 17223680
+# compressed data field length 1578312
+mda = {'file_type': 0, 'total_header_length': 6198, 'data_field_length': 17223680, 'number_of_bits_per_pixel': 10,
+ 'number_of_columns': 3712, 'number_of_lines': 464, 'compression_flag_for_data': 0,
+ 'projection_name': b'GEOS(+000.0) ',
+ 'cfac': -13642337, 'lfac': -13642337, 'coff': 1856, 'loff': 1856,
+ 'annotation_header': b'H-000-MSG4__-MSG4________-VIS006___-000001___-202208180730-C_',
+ 'cds_p_field': 64, 'timestamp': (23605, 27911151), 'GP_SC_ID': 324,
+ 'spectral_channel_id': 1,
+ 'segment_sequence_number': 1, 'planned_start_segment_number': 1, 'planned_end_segment_number': 8,
+ 'data_field_representation': 3,
+ 'image_segment_line_quality': np.array([(1, (0, 0), 1, 1, 0)] * 464,
+ dtype=[('line_number_in_grid', '>i4'),
+ ('line_mean_acquisition', [('days', '>u2'),
+ ('milliseconds', '>u4')]),
+ ('line_validity', 'u1'),
+ ('line_radiometric_quality', 'u1'),
+ ('line_geometric_quality', 'u1')]),
+ 'projection_parameters': {'a': 6378169.0, 'b': 6356583.8, 'h': 35785831.0, 'SSP_longitude': 0.0},
+ 'orbital_parameters': {}}
+
+mda_compressed = mda.copy()
+mda_compressed["data_field_length"] = 1578312
+mda_compressed['compression_flag_for_data'] = 1
+
+
+def new_get_hd(instance, hdr_info):
+ """Generate some metadata."""
+ if os.fspath(instance.filename).endswith(".C_"):
+ instance.mda = mda_compressed.copy()
+ else:
+ instance.mda = mda.copy()
+
+
+def new_get_hd_compressed(instance, hdr_info):
+ """Generate some metadata."""
+ instance.mda = mda.copy()
+ instance.mda['compression_flag_for_data'] = 1
+ instance.mda['data_field_length'] = 1578312
+
+
+@pytest.fixture
+def stub_hrit_file(tmp_path):
+ """Create a stub hrit file."""
+ filename = tmp_path / "some_hrit_file"
+ create_stub_hrit(filename)
+ return filename
+
+
+def create_stub_hrit(filename, open_fun=open, meta=mda):
+ """Create a stub hrit file."""
+ nbits = meta['number_of_bits_per_pixel']
+ lines = meta['number_of_lines']
+ cols = meta['number_of_columns']
+ total_bits = lines * cols * nbits
+ arr = np.random.randint(0, 256,
+ size=int(total_bits / 8),
+ dtype=np.uint8)
+ with open_fun(filename, mode="wb") as fd:
+ fd.write(b" " * meta['total_header_length'])
+ bytes_data = arr.tobytes()
+ fd.write(bytes_data)
+ return filename
+
+
+@pytest.fixture
+def stub_bzipped_hrit_file(tmp_path):
+ """Create a stub bzipped hrit file."""
+ filename = tmp_path / "some_hrit_file.bz2"
+ create_stub_hrit(filename, open_fun=bz2.open)
+ return filename
+
+
+@pytest.fixture
+def stub_gzipped_hrit_file(tmp_path):
+ """Create a stub gzipped hrit file."""
+ filename = tmp_path / "some_hrit_file.gz"
+ create_stub_hrit(filename, open_fun=gzip.open)
+ return filename
+
+
+@pytest.fixture
+def stub_compressed_hrit_file(tmp_path):
+ """Create a stub compressed hrit file."""
+ filename = tmp_path / "some_hrit_file.C_"
+ create_stub_hrit(filename, meta=mda_compressed)
+ return filename
+
+
+class TestHRITFileHandler:
"""Test the HRITFileHandler."""
- @mock.patch('satpy.readers.hrit_base.np.fromfile')
- def setUp(self, fromfile):
+ def setup_method(self, method):
"""Set up the hrit file handler for testing."""
- m = mock.mock_open()
- fromfile.return_value = np.array([(1, 2)], dtype=[('total_header_length', int),
- ('hdr_id', int)])
+ del method
- with mock.patch('satpy.readers.hrit_base.open', m, create=True) as newopen:
- newopen.return_value.__enter__.return_value.tell.return_value = 1
+ with mock.patch.object(HRITFileHandler, '_get_hd', new=new_get_hd):
self.reader = HRITFileHandler('filename',
{'platform_shortname': 'MSG3',
'start_time': datetime(2016, 3, 3, 0, 0)},
{'filetype': 'info'},
[mock.MagicMock(), mock.MagicMock(),
mock.MagicMock()])
- ncols = 3712
- nlines = 464
- nbits = 10
- self.reader.mda['number_of_bits_per_pixel'] = nbits
- self.reader.mda['number_of_lines'] = nlines
- self.reader.mda['number_of_columns'] = ncols
- self.reader.mda['data_field_length'] = nlines * ncols * nbits
+
self.reader.mda['cfac'] = 5
self.reader.mda['lfac'] = 5
self.reader.mda['coff'] = 10
self.reader.mda['loff'] = 10
- self.reader.mda['projection_parameters'] = {}
- self.reader.mda['projection_parameters']['a'] = 6378169.0
- self.reader.mda['projection_parameters']['b'] = 6356583.8
- self.reader.mda['projection_parameters']['h'] = 35785831.0
self.reader.mda['projection_parameters']['SSP_longitude'] = 44
def test_get_xy_from_linecol(self):
"""Test get_xy_from_linecol."""
x__, y__ = self.reader.get_xy_from_linecol(0, 0, (10, 10), (5, 5))
- self.assertEqual(-131072, x__)
- self.assertEqual(-131072, y__)
+ assert -131072 == x__
+ assert -131072 == y__
x__, y__ = self.reader.get_xy_from_linecol(10, 10, (10, 10), (5, 5))
- self.assertEqual(0, x__)
- self.assertEqual(0, y__)
+ assert x__ == 0
+ assert y__ == 0
x__, y__ = self.reader.get_xy_from_linecol(20, 20, (10, 10), (5, 5))
- self.assertEqual(131072, x__)
- self.assertEqual(131072, y__)
+ assert 131072 == x__
+ assert 131072 == y__
def test_get_area_extent(self):
"""Test getting the area extent."""
res = self.reader.get_area_extent((20, 20), (10, 10), (5, 5), 33)
exp = (-71717.44995740513, -71717.44995740513,
79266.655216079365, 79266.655216079365)
- self.assertTupleEqual(res, exp)
+ assert res == exp
def test_get_area_def(self):
"""Test getting an area definition."""
@@ -139,22 +219,76 @@ def test_get_area_def(self):
area = self.reader.get_area_def('VIS06')
proj_dict = area.proj_dict
a, b = proj4_radius_parameters(proj_dict)
- self.assertEqual(a, 6378169.0)
- self.assertEqual(b, 6356583.8)
- self.assertEqual(proj_dict['h'], 35785831.0)
- self.assertEqual(proj_dict['lon_0'], 44.0)
- self.assertEqual(proj_dict['proj'], 'geos')
- self.assertEqual(proj_dict['units'], 'm')
- self.assertEqual(area.area_extent,
- (-77771774058.38356, -77771774058.38356,
- 30310525626438.438, 3720765401003.719))
-
- @mock.patch('satpy.readers.hrit_base.np.memmap')
- def test_read_band(self, memmap):
- """Test reading a single band."""
- nbits = self.reader.mda['number_of_bits_per_pixel']
- memmap.return_value = np.random.randint(0, 256,
- size=int((464 * 3712 * nbits) / 8),
- dtype=np.uint8)
+ assert a == 6378169.0
+ assert b == 6356583.8
+ assert proj_dict['h'] == 35785831.0
+ assert proj_dict['lon_0'] == 44.0
+ assert proj_dict['proj'] == 'geos'
+ assert proj_dict['units'] == 'm'
+ assert area.area_extent == (-77771774058.38356, -77771774058.38356,
+ 30310525626438.438, 3720765401003.719)
+
+ def test_read_band_filepath(self, stub_hrit_file):
+ """Test reading a single band from a filepath."""
+ self.reader.filename = stub_hrit_file
+
+ res = self.reader.read_band('VIS006', None)
+ assert res.compute().shape == (464, 3712)
+
+ def test_read_band_FSFile(self, stub_hrit_file):
+ """Test reading a single band from an FSFile."""
+ import fsspec
+ filename = stub_hrit_file
+
+ fs_file = fsspec.open(filename)
+ self.reader.filename = FSFile(fs_file)
+
+ res = self.reader.read_band('VIS006', None)
+ assert res.compute().shape == (464, 3712)
+
+ def test_read_band_bzipped2_filepath(self, stub_bzipped_hrit_file):
+ """Test reading a single band from a bzipped file."""
+ self.reader.filename = stub_bzipped_hrit_file
+
+ res = self.reader.read_band('VIS006', None)
+ assert res.compute().shape == (464, 3712)
+
+ def test_read_band_gzip_stream(self, stub_gzipped_hrit_file):
+ """Test reading a single band from a gzip stream."""
+ import fsspec
+ filename = stub_gzipped_hrit_file
+
+ fs_file = fsspec.open(filename, compression="gzip")
+ self.reader.filename = FSFile(fs_file)
+
res = self.reader.read_band('VIS006', None)
- self.assertEqual(res.compute().shape, (464, 3712))
+ assert res.compute().shape == (464, 3712)
+
+
+def fake_decompress(infile, outdir='.'):
+ """Fake decompression."""
+ filename = os.fspath(infile)[:-3]
+ return create_stub_hrit(filename)
+
+
+class TestHRITFileHandlerCompressed:
+ """Test the HRITFileHandler with compressed segments."""
+
+ def test_read_band_filepath(self, stub_compressed_hrit_file):
+ """Test reading a single band from a filepath."""
+ filename = stub_compressed_hrit_file
+
+ with mock.patch("satpy.readers.hrit_base.decompress", side_effect=fake_decompress) as mock_decompress:
+ with mock.patch.object(HRITFileHandler, '_get_hd', side_effect=new_get_hd, autospec=True) as get_hd:
+ self.reader = HRITFileHandler(filename,
+ {'platform_shortname': 'MSG3',
+ 'start_time': datetime(2016, 3, 3, 0, 0)},
+ {'filetype': 'info'},
+ [mock.MagicMock(), mock.MagicMock(),
+ mock.MagicMock()])
+
+ res = self.reader.read_band('VIS006', None)
+ assert get_hd.call_count == 1
+ assert mock_decompress.call_count == 0
+ assert res.compute().shape == (464, 3712)
+ assert mock_decompress.call_count == 1
diff --git a/satpy/tests/reader_tests/test_hsaf_grib.py b/satpy/tests/reader_tests/test_hsaf_grib.py
index c0be432ef1..bc8a2c2c73 100644
--- a/satpy/tests/reader_tests/test_hsaf_grib.py
+++ b/satpy/tests/reader_tests/test_hsaf_grib.py
@@ -109,7 +109,6 @@ def __enter__(self):
def __exit__(self, exc_type, exc_val, exc_tb):
"""Exit the context."""
- pass
class TestHSAFFileHandler(unittest.TestCase):
@@ -156,6 +155,7 @@ def test_get_dataset(self, pg):
"""Test reading the actual datasets from a grib file."""
pg.open.return_value = FakeGRIB()
from satpy.readers.hsaf_grib import HSAFFileHandler
+
# Instantaneous precipitation
fh = HSAFFileHandler('filename', mock.MagicMock(), mock.MagicMock())
fh.filename = "H03B"
diff --git a/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py b/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py
index 1bb4368c6e..b2a5d4d3e1 100644
--- a/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py
+++ b/satpy/tests/reader_tests/test_hy2_scat_l2b_h5.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
-# Copyright (c) 2020 Satpy developers
+# Copyright (c) 2020, 2021 Satpy developers
#
# This file is part of satpy.
#
@@ -18,13 +18,14 @@
"""Module for testing the satpy.readers.hy2_scat_l2b_h5 module."""
import os
+import unittest
+from unittest import mock
+
+import dask.array as da
import numpy as np
import xarray as xr
-import dask.array as da
-from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler
-import unittest
-from unittest import mock
+from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler
DEFAULT_FILE_DTYPE = np.uint16
DEFAULT_FILE_SHAPE = (10, 300)
@@ -39,6 +40,13 @@
class FakeHDF5FileHandler2(FakeHDF5FileHandler):
"""Swap-in HDF5 File Handler."""
+ def __getitem__(self, key):
+ """Return copy of dataarray to prevent manipulating attributes in the original."""
+ val = self.file_content[key]
+ if isinstance(val, xr.core.dataarray.DataArray):
+ val = val.copy()
+ return val
+
def _get_geo_data(self, num_rows, num_cols):
geo = {
'wvc_lon':
@@ -68,6 +76,35 @@ def _get_geo_data(self, num_rows, num_cols):
}
return geo
+ def _get_geo_data_nsoas(self, num_rows, num_cols):
+ geo = {
+ 'wvc_lon':
+ xr.DataArray(
+ da.ones((num_rows, num_cols), chunks=1024,
+ dtype=np.float32),
+ attrs={
+ 'fill_value': 1.7e+38,
+ 'scale_factor': 1.,
+ 'add_offset': 0.,
+ 'units': 'degree',
+ 'valid_range': [0, 359.99],
+ },
+ dims=('y', 'x')),
+ 'wvc_lat':
+ xr.DataArray(
+ da.ones((num_rows, num_cols), chunks=1024,
+ dtype=np.float32),
+ attrs={
+ 'fill_value': 1.7e+38,
+ 'scale_factor': 1.,
+ 'add_offset': 0.,
+ 'units': 'degree',
+ 'valid_range': [-90.0, 90.0],
+ },
+ dims=('y', 'x')),
+ }
+ return geo
+
def _get_selection_data(self, num_rows, num_cols):
selection = {
'wvc_selection':
@@ -267,12 +304,8 @@ def _get_wvc_row_time(self, num_rows):
}
return wvc_row_time
- def get_test_content(self, filename, filename_info, filetype_info):
- """Mimic reader input file content."""
- num_rows = 300
- num_cols = 10
- num_amb = 8
- global_attrs = {
+ def _get_global_attrs(self, num_rows, num_cols):
+ return {
'/attr/Equator_Crossing_Longitude': '246.408397',
'/attr/Equator_Crossing_Time': '20200326T01:37:15.875',
'/attr/HDF_Version_Id': 'HDF5-1.8.16',
@@ -286,7 +319,6 @@ def get_test_content(self, filename, filename_info, filetype_info):
'used as background winds in the CMF'),
'/attr/L2B_Data_Version': '10',
'/attr/L2B_Expected_WVC_Rows': np.int32(num_rows),
- '/attr/L2B_Number_WVC_cells': np.int32(num_cols),
'/attr/L2B_Processing_Type': 'OPER',
'/attr/L2B_Processor_Name': 'hy2_sca_l2b_pro',
'/attr/L2B_Processor_Version': '01.00',
@@ -308,10 +340,22 @@ def get_test_content(self, filename, filename_info, filetype_info):
'/attr/WVC_Size': '25000m*25000m',
}
+ def get_test_content(self, filename, filename_info, filetype_info):
+ """Mimic reader input file content."""
+ num_rows = 300
+ num_cols = 10
+ num_amb = 8
+
test_content = {}
- test_content.update(global_attrs)
+ test_content.update(self._get_global_attrs(num_rows, num_cols))
data = {}
- data = self._get_geo_data(num_rows, num_cols)
+ if 'OPER_SCA_L2B' in filename:
+ test_content.update({'/attr/L2B_Expected_WVC_Cells': np.int32(num_cols)})
+ data = self._get_geo_data_nsoas(num_rows, num_cols)
+ else:
+ test_content.update({'/attr/L2B_Number_WVC_cells': np.int32(num_cols)})
+ data = self._get_geo_data(num_rows, num_cols)
+
test_content.update(data)
data = self._get_selection_data(num_rows, num_cols)
test_content.update(data)
@@ -331,8 +375,8 @@ class TestHY2SCATL2BH5Reader(unittest.TestCase):
def setUp(self):
"""Wrap HDF5 file handler with our own fake handler."""
- from satpy.readers.hy2_scat_l2b_h5 import HY2SCATL2BH5FileHandler
from satpy._config import config_search_paths
+ from satpy.readers.hy2_scat_l2b_h5 import HY2SCATL2BH5FileHandler
self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file))
# http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library
self.p = mock.patch.object(HY2SCATL2BH5FileHandler, '__bases__', (FakeHDF5FileHandler2,))
@@ -359,6 +403,22 @@ def test_load_geo(self):
res = reader.load(['wvc_lon', 'wvc_lat'])
self.assertEqual(2, len(res))
+ def test_load_geo_nsoas(self):
+ """Test loading data from nsoas file."""
+ from satpy.readers import load_reader
+ filenames = [
+ 'H2B_OPER_SCA_L2B_OR_20210803T100304_20210803T104601_13905_pwp_250_07_owv.h5', ]
+
+ reader = load_reader(self.reader_configs)
+ files = reader.select_files_from_pathnames(filenames)
+ self.assertEqual(1, len(files))
+ reader.create_filehandlers(files)
+ # Make sure we have some files
+ self.assertTrue(reader.file_handlers)
+
+ res = reader.load(['wvc_lon', 'wvc_lat'])
+ self.assertEqual(2, len(res))
+
def test_load_data_selection(self):
"""Test loading data."""
from satpy.readers import load_reader
@@ -415,3 +475,50 @@ def test_load_data_row_times(self):
self.assertTrue(reader.file_handlers)
res = reader.load(['wvc_row_time'])
self.assertEqual(1, len(res))
+
+ def test_reading_attrs(self):
+ """Test loading data."""
+ from satpy.readers import load_reader
+ filenames = [
+ 'W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5', ]
+
+ reader = load_reader(self.reader_configs)
+ files = reader.select_files_from_pathnames(filenames)
+ reader.create_filehandlers(files)
+ # Make sure we have some files
+ res = reader.load(['wvc_lon'])
+ self.assertEqual(res['wvc_lon'].attrs['L2B_Number_WVC_cells'], 10)
+ with self.assertRaises(KeyError):
+ self.assertEqual(res['wvc_lon'].attrs['L2B_Expected_WVC_Cells'], 10)
+
+ def test_reading_attrs_nsoas(self):
+ """Test loading data."""
+ from satpy.readers import load_reader
+ filenames = [
+ 'H2B_OPER_SCA_L2B_OR_20210803T100304_20210803T104601_13905_pwp_250_07_owv.h5', ]
+
+ reader = load_reader(self.reader_configs)
+ files = reader.select_files_from_pathnames(filenames)
+ reader.create_filehandlers(files)
+ # Make sure we have some files
+ res = reader.load(['wvc_lon'])
+ with self.assertRaises(KeyError):
+ self.assertEqual(res['wvc_lon'].attrs['L2B_Number_WVC_cells'], 10)
+ self.assertEqual(res['wvc_lon'].attrs['L2B_Expected_WVC_Cells'], 10)
+
+ def test_properties(self):
+ """Test platform_name."""
+ from datetime import datetime
+
+ from satpy.readers import load_reader
+ filenames = [
+ 'W_XX-EUMETSAT-Darmstadt,SURFACE+SATELLITE,HY2B+SM_C_EUMP_20200326------_07077_o_250_l2b.h5', ]
+
+ reader = load_reader(self.reader_configs)
+ files = reader.select_files_from_pathnames(filenames)
+ reader.create_filehandlers(files)
+ # Make sure we have some files
+ res = reader.load(['wvc_lon'])
+ self.assertEqual(res['wvc_lon'].platform_name, 'HY-2B')
+ self.assertEqual(res['wvc_lon'].start_time, datetime(2020, 3, 26, 1, 11, 7))
+ self.assertEqual(res['wvc_lon'].end_time, datetime(2020, 3, 26, 2, 55, 40))
diff --git a/satpy/tests/reader_tests/test_iasi_l2.py b/satpy/tests/reader_tests/test_iasi_l2.py
index fc88f7168b..c36293fef2 100644
--- a/satpy/tests/reader_tests/test_iasi_l2.py
+++ b/satpy/tests/reader_tests/test_iasi_l2.py
@@ -133,8 +133,9 @@ class TestIasiL2(unittest.TestCase):
def setUp(self):
"""Create temporary data to test on."""
- import tempfile
import datetime as dt
+ import tempfile
+
from satpy.readers.iasi_l2 import IASIL2HDF5
self.base_dir = tempfile.mkdtemp()
@@ -165,10 +166,10 @@ def test_scene(self):
from satpy import Scene
fname = os.path.join(self.base_dir, FNAME)
scn = Scene(reader='iasi_l2', filenames=[fname])
- self.assertTrue('start_time' in scn.attrs)
- self.assertTrue('end_time' in scn.attrs)
- self.assertTrue('sensor' in scn.attrs)
- self.assertTrue('iasi' in scn.attrs['sensor'])
+ assert scn.start_time is not None
+ assert scn.end_time is not None
+ assert scn.sensor_names
+ assert 'iasi' in scn.sensor_names
def test_scene_load_available_datasets(self):
"""Test that all datasets are available."""
@@ -274,6 +275,7 @@ def check_sensing_times(self, times):
def test_read_dataset(self):
"""Test read_dataset() function."""
import h5py
+
from satpy.readers.iasi_l2 import read_dataset
from satpy.tests.utils import make_dataid
with h5py.File(self.fname, 'r') as fid:
@@ -291,6 +293,7 @@ def test_read_dataset(self):
def test_read_geo(self):
"""Test read_geo() function."""
import h5py
+
from satpy.readers.iasi_l2 import read_geo
from satpy.tests.utils import make_dataid
with h5py.File(self.fname, 'r') as fid:
diff --git a/satpy/tests/reader_tests/test_iasi_l2_so2_bufr.py b/satpy/tests/reader_tests/test_iasi_l2_so2_bufr.py
index e31956a158..85df6b64ed 100644
--- a/satpy/tests/reader_tests/test_iasi_l2_so2_bufr.py
+++ b/satpy/tests/reader_tests/test_iasi_l2_so2_bufr.py
@@ -19,9 +19,10 @@
import os
import sys
-import numpy as np
import unittest
+import numpy as np
+
# TDB: this test is based on test_seviri_l2_bufr.py and test_iasi_l2.py
# This is a test IASI level 2 SO2 product message, take from a real
@@ -335,6 +336,7 @@ class TestIasiL2So2Bufr(unittest.TestCase):
def setUp(self):
"""Create temporary file to perform tests with."""
import tempfile
+
from satpy.readers.iasi_l2_so2_bufr import IASIL2SO2BUFR
self.base_dir = tempfile.mkdtemp()
@@ -361,10 +363,10 @@ def test_scene(self):
scn = Scene(reader='iasi_l2_so2_bufr', filenames=[fname])
- self.assertTrue('start_time' in scn.attrs)
- self.assertTrue('end_time' in scn.attrs)
- self.assertTrue('sensor' in scn.attrs)
- self.assertTrue('iasi' in scn.attrs['sensor'])
+ assert scn.start_time is not None
+ assert scn.end_time is not None
+ assert scn.sensor_names
+ assert 'iasi' in scn.sensor_names
@unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows")
def test_scene_load_available_datasets(self):
diff --git a/satpy/tests/reader_tests/test_ici_l1b_nc.py b/satpy/tests/reader_tests/test_ici_l1b_nc.py
new file mode 100644
index 0000000000..dd931486fd
--- /dev/null
+++ b/satpy/tests/reader_tests/test_ici_l1b_nc.py
@@ -0,0 +1,546 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+# Copyright (c) 2020 Satpy developers
+#
+# satpy is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# satpy is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with satpy. If not, see .
+"""The ici_l1b_nc reader tests package.
+
+This version tests the reader for ICI test data as per PFS V3A.
+
+"""
+
+from datetime import datetime
+from unittest.mock import patch
+
+import numpy as np
+import pytest
+import xarray as xr
+from netCDF4 import Dataset
+
+from satpy.readers.ici_l1b_nc import IciL1bNCFileHandler, InterpolationType
+
+N_CHANNELS = 13
+N_SCAN = 10
+N_SAMPLES = 784
+N_SUBS = 158
+N_HORNS = 7
+N_183 = 3
+
+
+@pytest.fixture
+def reader(fake_file):
+ """Return reader of ici level1b data."""
+ return IciL1bNCFileHandler(
+ filename=fake_file,
+ filename_info={
+ 'sensing_start_time': (
+ datetime.fromisoformat('2000-01-01T01:00:00')
+ ),
+ 'sensing_end_time': (
+ datetime.fromisoformat('2000-01-01T02:00:00')
+ ),
+ 'creation_time': (
+ datetime.fromisoformat('2000-01-01T03:00:00')
+ ),
+ },
+ filetype_info={
+ 'longitude': 'data/navigation_data/longitude',
+ 'latitude': 'data/navigation_data/latitude',
+ 'solar_azimuth': 'data/navigation_data/ici_solar_azimuth_angle',
+ 'solar_zenith': 'data/navigation_data/ici_solar_zenith_angle',
+ }
+ )
+
+
+@pytest.fixture
+def fake_file(tmp_path):
+ """Return file path to level1b file."""
+ file_path = tmp_path / 'test_file_ici_l1b_nc.nc'
+ writer = IciL1bFakeFileWriter(file_path)
+ writer.write()
+ yield file_path
+
+
+@pytest.fixture
+def dataset_info():
+ """Return dataset info."""
+ return {
+ 'name': '1',
+ 'file_type': 'nc_ici_l1b_rad',
+ 'file_key': 'data/measurement_data/ici_radiance_183',
+ 'coordinates': ['lat_pixels_horn_1', 'lon_pixels_horn_1'],
+ 'n_183': 0,
+ 'chan_index': 0,
+ 'calibration': 'brightness_temperature',
+ }
+
+
+class IciL1bFakeFileWriter:
+ """Writer class of fake ici level1b data."""
+
+ def __init__(self, file_path):
+ """Init."""
+ self.file_path = file_path
+
+ def write(self):
+ """Write fake data to file."""
+ with Dataset(self.file_path, 'w') as dataset:
+ self._write_attributes(dataset)
+ self._write_quality_group(dataset)
+ data_group = dataset.createGroup('data')
+ self._write_measurement_data_group(data_group)
+ self._write_navigation_data_group(data_group)
+
+ @staticmethod
+ def _write_attributes(dataset):
+ """Write attributes."""
+ dataset.sensing_start_time_utc = "2000-01-02 03:04:05.000"
+ dataset.sensing_end_time_utc = "2000-01-02 04:05:06.000"
+ dataset.instrument = "ICI"
+ dataset.spacecraft = "SGB"
+
+ @staticmethod
+ def _write_quality_group(dataset):
+ """Write the quality group."""
+ group = dataset.createGroup('quality')
+ group.overall_quality_flag = 0
+ duration_of_product = group.createVariable(
+ 'duration_of_product', "f4"
+ )
+ duration_of_product[:] = 1000.
+
+ @staticmethod
+ def _write_navigation_data_group(dataset):
+ """Write the navigation data group."""
+ group = dataset.createGroup('navigation_data')
+ group.createDimension('n_scan', N_SCAN)
+ group.createDimension('n_samples', N_SAMPLES)
+ group.createDimension('n_subs', N_SUBS)
+ group.createDimension('n_horns', N_HORNS)
+ subs = group.createVariable('n_subs', "i4", dimensions=('n_subs',))
+ subs[:] = np.arange(N_SUBS)
+ dimensions = ('n_scan', 'n_subs', 'n_horns')
+ shape = (N_SCAN, N_SUBS, N_HORNS)
+ longitude = group.createVariable(
+ 'longitude',
+ np.float32,
+ dimensions=dimensions,
+ )
+ longitude[:] = np.ones(shape)
+ latitude = group.createVariable(
+ 'latitude',
+ np.float32,
+ dimensions=dimensions,
+ )
+ latitude[:] = 2. * np.ones(shape)
+ azimuth = group.createVariable(
+ 'ici_solar_azimuth_angle',
+ np.float32,
+ dimensions=dimensions,
+ )
+ azimuth[:] = 3. * np.ones(shape)
+ zenith = group.createVariable(
+ 'ici_solar_zenith_angle',
+ np.float32,
+ dimensions=dimensions,
+ )
+ zenith[:] = 4. * np.ones(shape)
+ dimensions = ('n_scan', 'n_samples', 'n_horns')
+ shape = (N_SCAN, N_SAMPLES, N_HORNS)
+ delta_longitude = group.createVariable(
+ 'delta_longitude',
+ np.float32,
+ dimensions=dimensions,
+ )
+ delta_longitude[:] = 1000. * np.ones(shape)
+ delta_latitude = group.createVariable(
+ 'delta_latitude',
+ np.float32,
+ dimensions=dimensions,
+ )
+ delta_latitude[:] = 1000. * np.ones(shape)
+
+ @staticmethod
+ def _write_measurement_data_group(dataset):
+ """Write the measurement data group."""
+ group = dataset.createGroup('measurement_data')
+ group.createDimension('n_scan', N_SCAN)
+ group.createDimension('n_samples', N_SAMPLES)
+ group.createDimension('n_channels', N_CHANNELS)
+ group.createDimension('n_183', N_183)
+ scan = group.createVariable('n_scan', "i4", dimensions=('n_scan',))
+ scan[:] = np.arange(N_SCAN)
+ samples = group.createVariable(
+ 'n_samples', "i4", dimensions=('n_samples',)
+ )
+ samples[:] = np.arange(N_SAMPLES)
+ bt_a = group.createVariable(
+ 'bt_conversion_a', np.float32, dimensions=('n_channels',)
+ )
+ bt_a[:] = np.ones(N_CHANNELS)
+ bt_b = group.createVariable(
+ 'bt_conversion_b', np.float32, dimensions=('n_channels',)
+ )
+ bt_b[:] = np.zeros(N_CHANNELS)
+ cw = group.createVariable(
+ 'centre_wavenumber', np.float32, dimensions=('n_channels',)
+ )
+ cw[:] = np.array(
+ [6.0] * 3 + [8.0] * 2 + [11.0] * 3 + [15.0] * 3 + [22.0] * 2
+ )
+ ici_radiance_183 = group.createVariable(
+ 'ici_radiance_183',
+ np.float32,
+ dimensions=('n_scan', 'n_samples', 'n_183'),
+ )
+ ici_radiance_183[:] = 0.08 * np.ones((N_SCAN, N_SAMPLES, N_183))
+
+
+class TestIciL1bNCFileHandler:
+ """Test the IciL1bNCFileHandler reader."""
+
+ def test_start_time(self, reader):
+ """Test start time."""
+ assert reader.start_time == datetime(2000, 1, 2, 3, 4, 5)
+
+ def test_end_time(self, reader):
+ """Test end time."""
+ assert reader.end_time == datetime(2000, 1, 2, 4, 5, 6)
+
+ def test_sensor(self, reader):
+ """Test sensor."""
+ assert reader.sensor == "ICI"
+
+ def test_platform_name(self, reader):
+ """Test platform name."""
+ assert reader.platform_name == "SGB"
+
+ def test_ssp_lon(self, reader):
+ """Test sub satellite path longitude."""
+ assert reader.ssp_lon is None
+
+ def test_longitude(self, reader):
+ """Test longitude."""
+ np.testing.assert_allclose(reader.longitude, 1, rtol=1e-3)
+
+ def test_latitude(self, reader):
+ """Test latitude."""
+ np.testing.assert_allclose(reader.latitude, 2, rtol=1e-3)
+
+ def test_solar_azimuth(self, reader):
+ """Test solar azimuth."""
+ np.testing.assert_allclose(reader.solar_azimuth, 3, rtol=1e-3)
+
+ def test_solar_zenith(self, reader):
+ """Test solar zenith."""
+ np.testing.assert_allclose(reader.solar_zenith, 4, rtol=1e-3)
+
+ def test_calibrate_raises_for_unknown_calibration_method(self, reader):
+ """Test perform calibration raises for unknown calibration method."""
+ variable = xr.DataArray(np.ones(3))
+ dataset_info = {'calibration': 'unknown', 'name': 'radiance'}
+ with pytest.raises(ValueError, match='Unknown calibration'):
+ reader._calibrate(variable, dataset_info)
+
+ @patch('satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._calibrate_bt')
+ def test_calibrate_does_not_call_calibrate_bt_if_not_needed(
+ self,
+ mocked_calibrate,
+ reader,
+ ):
+ """Test calibrate does not call calibrate_bt if not needed."""
+ variable = xr.DataArray(
+ np.array([
+ [0.060, 0.065, 0.070, 0.075],
+ [0.080, 0.085, 0.090, 0.095],
+ ]),
+ dims=('n_scan', 'n_samples'),
+ )
+ dataset_info = {'calibration': 'radiance'}
+ reader._calibrate(variable, dataset_info)
+ mocked_calibrate.assert_not_called()
+
+ @patch('satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._calibrate_bt')
+ def test_calibrate_calls_calibrate_bt(
+ self,
+ mocked_calibrate_bt,
+ reader,
+ ):
+ """Test calibrate calls calibrate_bt."""
+ variable = xr.DataArray(
+ np.array([
+ [0.060, 0.065, 0.070, 0.075],
+ [0.080, 0.085, 0.090, 0.095],
+ ]),
+ dims=('n_scan', 'n_samples'),
+ )
+ dataset_info = {
+ 'calibration': 'brightness_temperature',
+ 'chan_index': 2,
+ }
+ reader._calibrate(variable, dataset_info)
+ mocked_calibrate_bt.assert_called_once_with(
+ variable,
+ 6.0,
+ 1.0,
+ 0.0,
+ )
+
+ def test_calibrate_bt(self, reader):
+ """Test calibrate brightness temperature."""
+ radiance = xr.DataArray(
+ np.array([
+ [0.060, 0.065, 0.070, 0.075],
+ [0.080, 0.085, 0.090, 0.095],
+ ])
+ )
+ cw = 6.1145
+ a = 1.
+ b = 0.0
+ bt = reader._calibrate_bt(radiance, cw, a, b)
+ expected_bt = np.array([
+ [198.22929022, 214.38700287, 230.54437184, 246.70146465],
+ [262.85833223, 279.01501371, 295.17153966, 311.32793429],
+ ])
+ np.testing.assert_allclose(bt, expected_bt)
+
+ @pytest.mark.parametrize('dims', (
+ ('n_scan', 'n_samples'),
+ ('x', 'y'),
+ ))
+ def test_standardize_dims(self, reader, dims):
+ """Test standardize dims."""
+ variable = xr.DataArray(
+ np.arange(6).reshape(2, 3),
+ dims=dims,
+ )
+ standardized = reader._standardize_dims(variable)
+ assert standardized.dims == ('y', 'x')
+
+ @pytest.mark.parametrize('dims,data_info,expect', (
+ (('y', 'x', 'n_horns'), {"n_horns": 1}, 1),
+ (('y', 'x', 'n_183'), {"n_183": 2}, 2),
+ ))
+ def test_filter_variable(self, reader, dims, data_info, expect):
+ """Test filter variable."""
+ data = np.arange(24).reshape(2, 3, 4)
+ variable = xr.DataArray(
+ np.arange(24).reshape(2, 3, 4),
+ dims=dims,
+ )
+ filtered = reader._filter_variable(variable, data_info)
+ assert filtered.dims == ('y', 'x')
+ assert (filtered == data[:, :, expect]).all()
+
+ def test_drop_coords(self, reader):
+ """Test drop coordinates."""
+ coords = "dummy"
+ data = xr.DataArray(
+ np.ones(10),
+ dims=('y'),
+ coords={coords: 0},
+ )
+ assert coords in data.coords
+ data = reader._drop_coords(data)
+ assert coords not in data.coords
+
+ def test_get_third_dimension_name(self, reader):
+ """Test get third dimension name."""
+ data = xr.DataArray(np.ones((1, 1, 1)), dims=('x', 'y', 'z'))
+ assert reader._get_third_dimension_name(data) == 'z'
+
+ def test_get_third_dimension_name_return_none_for_2d_data(self, reader):
+ """Test get third dimension name return none for 2d data."""
+ data = xr.DataArray(np.ones((1, 1)), dims=('x', 'y'))
+ assert reader._get_third_dimension_name(data) is None
+
+ def test_get_dataset_return_none_if_data_not_exist(self, reader):
+ """Tes get dataset return none if data does not exist."""
+ dataset_id = {'name': 'unknown'}
+ dataset_info = {'file_key': 'non/existing/data'}
+ dataset = reader.get_dataset(dataset_id, dataset_info)
+ assert dataset is None
+
+ @patch('satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._calibrate_bt')
+ def test_get_dataset_does_not_calibrate_if_not_desired(
+ self,
+ mocked_calibrate,
+ reader,
+ dataset_info,
+ ):
+ """Test get dataset does not calibrate if not desired."""
+ dataset_id = {'name': '1'}
+ dataset_info.pop('calibration')
+ dataset = reader.get_dataset(dataset_id, dataset_info)
+ assert dataset.dims == ('y', 'x')
+ mocked_calibrate.assert_not_called()
+ assert isinstance(dataset, xr.DataArray)
+
+ def test_get_dataset_orthorectifies_if_orthorect_data_defined(
+ self,
+ reader,
+ ):
+ """Test get dataset orthorectifies if orthorect data is defined."""
+ dataset_id = {'name': 'lon_pixels_horn_1'}
+ dataset_info = {
+ 'name': 'lon_pixels_horn_1',
+ 'file_type': 'nc_ici_l1b_rad',
+ 'file_key': 'longitude',
+ 'orthorect_data': 'data/navigation_data/delta_longitude',
+ 'standard_name': 'longitude',
+ 'n_horns': 0,
+ 'modifiers': (),
+ }
+ dataset = reader.get_dataset(dataset_id, dataset_info)
+ np.testing.assert_allclose(dataset, 1.009139, atol=1e-6)
+
+ def test_get_dataset_handles_calibration(
+ self,
+ reader,
+ dataset_info,
+ ):
+ """Test get dataset handles calibration."""
+ dataset_id = {'name': '1'}
+ dataset = reader.get_dataset(dataset_id, dataset_info)
+ assert dataset.attrs["calibration"] == "brightness_temperature"
+ np.testing.assert_allclose(dataset, 272.73734)
+
+ def test_interpolate_returns_none_if_dataset_not_exist(self, reader):
+ """Test interpolate returns none if dataset not exist."""
+ azimuth, zenith = reader._interpolate(
+ InterpolationType.OBSERVATION_ANGLES
+ )
+ assert azimuth is None and zenith is None
+
+ @patch('satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._interpolate_geo')
+ def test_interpolate_calls_interpolate_geo(self, mock, reader):
+ """Test interpolate calls interpolate_geo."""
+ reader._interpolate(InterpolationType.LONLAT)
+ mock.assert_called_once()
+
+ @patch('satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._interpolate_viewing_angle') # noqa: E501
+ def test_interpolate_calls_interpolate_viewing_angles(self, mock, reader):
+ """Test interpolate calls interpolate viewing_angles."""
+ reader._interpolate(InterpolationType.SOLAR_ANGLES)
+ mock.assert_called_once()
+
+ def test_interpolate_geo(self, reader):
+ """Test interpolate geographic coordinates."""
+ shape = (N_SCAN, N_SUBS, N_HORNS)
+ dims = ('n_scan', 'n_subs', 'n_horns')
+ longitude = xr.DataArray(
+ 2. * np.ones(shape),
+ dims=dims,
+ coords={
+ 'n_horns': np.arange(N_HORNS),
+ 'n_subs': np.arange(N_SUBS),
+ },
+ )
+ latitude = xr.DataArray(np.ones(shape), dims=dims)
+ lon, lat = reader._interpolate_geo(
+ longitude,
+ latitude,
+ N_SAMPLES,
+ )
+ expect_shape = (N_SCAN, N_SAMPLES, N_HORNS)
+ assert lon.shape == expect_shape
+ assert lat.shape == expect_shape
+ np.testing.assert_allclose(lon, 2.0)
+ np.testing.assert_allclose(lat, 1.0)
+
+ def test_interpolate_viewing_angle(self, reader):
+ """Test interpolate viewing angle."""
+ shape = (N_SCAN, N_SUBS, N_HORNS)
+ dims = ('n_scan', 'n_subs', 'n_horns')
+ azimuth = xr.DataArray(
+ np.ones(shape),
+ dims=dims,
+ coords={
+ 'n_horns': np.arange(N_HORNS),
+ 'n_subs': np.arange(N_SUBS),
+ },
+ )
+ zenith = xr.DataArray(100. * np.ones(shape), dims=dims)
+ azimuth, zenith = reader._interpolate_viewing_angle(
+ azimuth,
+ zenith,
+ N_SAMPLES,
+ )
+ expect_shape = (N_SCAN, N_SAMPLES, N_HORNS)
+ assert azimuth.shape == expect_shape
+ assert zenith.shape == expect_shape
+ np.testing.assert_allclose(azimuth, 1.0)
+ np.testing.assert_allclose(zenith, 100.0)
+
+ def test_orthorectify(self, reader):
+ """Test orthorectify."""
+ variable = xr.DataArray(
+ np.ones((N_SCAN, N_SAMPLES, N_HORNS)),
+ dims=('y', 'x', 'n_horns'),
+ coords={'n_horns': np.arange(N_HORNS)}
+ )
+ variable = variable.sel({'n_horns': 0})
+ orthorect_data_name = 'data/navigation_data/delta_longitude'
+ orthorectified = reader._orthorectify(
+ variable,
+ orthorect_data_name,
+ )
+ np.testing.assert_allclose(orthorectified, 1.009, rtol=1e-5)
+
+ def test_get_global_attributes(self, reader):
+ """Test get global attributes."""
+ attributes = reader._get_global_attributes()
+ assert attributes == {
+ 'filename': reader.filename,
+ 'start_time': datetime(2000, 1, 2, 3, 4, 5),
+ 'end_time': datetime(2000, 1, 2, 4, 5, 6),
+ 'spacecraft_name': 'SGB',
+ 'ssp_lon': None,
+ 'sensor': 'ICI',
+ 'filename_start_time': datetime(2000, 1, 1, 1, 0),
+ 'filename_end_time': datetime(2000, 1, 1, 2, 0),
+ 'platform_name': 'SGB',
+ 'quality_group': {
+ 'duration_of_product': np.array(1000., dtype=np.float32),
+ 'overall_quality_flag': 0,
+ }
+ }
+
+ def test_get_quality_attributes(self, reader):
+ """Test get quality attributes."""
+ attributes = reader._get_quality_attributes()
+ assert attributes == {
+ 'duration_of_product': np.array(1000., dtype=np.float32),
+ 'overall_quality_flag': 0,
+ }
+
+ @patch(
+ 'satpy.readers.ici_l1b_nc.IciL1bNCFileHandler._get_global_attributes',
+ return_value={"mocked_global_attributes": True},
+ )
+ def test_manage_attributes(self, mock, reader):
+ """Test manage attributes."""
+ variable = xr.DataArray(
+ np.ones(N_SCAN),
+ attrs={"season": "summer"},
+ )
+ dataset_info = {'name': 'ici_1', 'units': 'K'}
+ variable = reader._manage_attributes(variable, dataset_info)
+ assert variable.attrs == {
+ 'season': 'summer',
+ 'units': 'K',
+ 'name': 'ici_1',
+ 'mocked_global_attributes': True,
+ }
diff --git a/satpy/tests/reader_tests/test_mersi2_l1b.py b/satpy/tests/reader_tests/test_mersi2_l1b.py
index 47c11b46b3..240f338b85 100644
--- a/satpy/tests/reader_tests/test_mersi2_l1b.py
+++ b/satpy/tests/reader_tests/test_mersi2_l1b.py
@@ -18,12 +18,13 @@
"""Tests for the 'mersi2_l1b' reader."""
import os
import unittest
-import pytest
from unittest import mock
-import numpy as np
import dask.array as da
+import numpy as np
+import pytest
import xarray as xr
+
from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler
@@ -251,8 +252,8 @@ class TestMERSI2L1BReader(unittest.TestCase):
def setUp(self):
"""Wrap HDF5 file handler with our own fake handler."""
- from satpy.readers.mersi2_l1b import MERSI2L1B
from satpy._config import config_search_paths
+ from satpy.readers.mersi2_l1b import MERSI2L1B
self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file))
# http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library
self.p = mock.patch.object(MERSI2L1B, '__bases__', (FakeHDF5FileHandler2,))
@@ -265,9 +266,9 @@ def tearDown(self):
def test_fy3d_all_resolutions(self):
"""Test loading data when all resolutions are available."""
- from satpy.tests.utils import make_dataid
- from satpy.readers import load_reader
from satpy.dataset.data_dict import get_key
+ from satpy.readers import load_reader
+ from satpy.tests.utils import make_dataid
filenames = [
'tf2019071182739.FY3D-X_MERSI_0250M_L1B.HDF',
'tf2019071182739.FY3D-X_MERSI_1000M_L1B.HDF',
@@ -329,8 +330,8 @@ def test_fy3d_all_resolutions(self):
def test_fy3d_counts_calib(self):
"""Test loading data at counts calibration."""
- from satpy.tests.utils import make_dataid
from satpy.readers import load_reader
+ from satpy.tests.utils import make_dataid
filenames = [
'tf2019071182739.FY3D-X_MERSI_0250M_L1B.HDF',
'tf2019071182739.FY3D-X_MERSI_1000M_L1B.HDF',
@@ -385,8 +386,8 @@ def test_fy3d_counts_calib(self):
def test_fy3d_rad_calib(self):
"""Test loading data at radiance calibration."""
- from satpy.tests.utils import make_dataid
from satpy.readers import load_reader
+ from satpy.tests.utils import make_dataid
filenames = [
'tf2019071182739.FY3D-X_MERSI_0250M_L1B.HDF',
'tf2019071182739.FY3D-X_MERSI_1000M_L1B.HDF',
@@ -423,9 +424,9 @@ def test_fy3d_rad_calib(self):
def test_fy3d_1km_resolutions(self):
"""Test loading data when only 1km resolutions are available."""
- from satpy.tests.utils import make_dataid
- from satpy.readers import load_reader
from satpy.dataset.data_dict import get_key
+ from satpy.readers import load_reader
+ from satpy.tests.utils import make_dataid
filenames = [
'tf2019071182739.FY3D-X_MERSI_1000M_L1B.HDF',
'tf2019071182739.FY3D-X_MERSI_GEO1K_L1B.HDF',
@@ -485,9 +486,9 @@ def test_fy3d_1km_resolutions(self):
def test_fy3d_250_resolutions(self):
"""Test loading data when only 250m resolutions are available."""
- from satpy.tests.utils import make_dataid
- from satpy.readers import load_reader
from satpy.dataset.data_dict import get_key
+ from satpy.readers import load_reader
+ from satpy.tests.utils import make_dataid
filenames = [
'tf2019071182739.FY3D-X_MERSI_0250M_L1B.HDF',
'tf2019071182739.FY3D-X_MERSI_GEOQK_L1B.HDF',
diff --git a/satpy/tests/reader_tests/test_mimic_TPW2_lowres.py b/satpy/tests/reader_tests/test_mimic_TPW2_lowres.py
index d22f856151..cafadf9e77 100644
--- a/satpy/tests/reader_tests/test_mimic_TPW2_lowres.py
+++ b/satpy/tests/reader_tests/test_mimic_TPW2_lowres.py
@@ -18,15 +18,17 @@
# Satpy. If not, see .
"""Module for testing the satpy.readers.tropomi_l2 module."""
+import itertools
import os
import unittest
-import itertools
-from unittest import mock
from datetime import datetime
+from unittest import mock
+
import numpy as np
-from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler
import xarray as xr
+from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler
+
DEFAULT_FILE_DTYPE = np.float32
DEFAULT_FILE_SHAPE = (721, 1440)
DEFAULT_DATE = datetime(2019, 6, 19, 13, 0)
diff --git a/satpy/tests/reader_tests/test_mimic_TPW2_nc.py b/satpy/tests/reader_tests/test_mimic_TPW2_nc.py
index e8733fa5a9..544c805e70 100644
--- a/satpy/tests/reader_tests/test_mimic_TPW2_nc.py
+++ b/satpy/tests/reader_tests/test_mimic_TPW2_nc.py
@@ -20,12 +20,13 @@
import os
import unittest
-from unittest import mock
from datetime import datetime
+from unittest import mock
+
import numpy as np
-from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler
import xarray as xr
+from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler
DEFAULT_FILE_DTYPE = np.float32
DEFAULT_FILE_SHAPE = (9001, 18000)
diff --git a/satpy/tests/reader_tests/test_mirs.py b/satpy/tests/reader_tests/test_mirs.py
index df445ab4e7..b726a519e5 100644
--- a/satpy/tests/reader_tests/test_mirs.py
+++ b/satpy/tests/reader_tests/test_mirs.py
@@ -19,10 +19,11 @@
"""Module for testing the satpy.readers.tropomi_l2 module."""
import os
-from unittest import mock
-import pytest
from datetime import datetime
+from unittest import mock
+
import numpy as np
+import pytest
import xarray as xr
METOP_FILE = "IMG_SX.M2.D17037.S1601.E1607.B0000001.WE.HR.ORB.nc"
@@ -32,7 +33,7 @@
EXAMPLE_FILES = [METOP_FILE, NPP_MIRS_L2_SWATH, OTHER_MIRS_L2_SWATH]
-N_CHANNEL = 3
+N_CHANNEL = 22
N_FOV = 96
N_SCANLINE = 100
DEFAULT_FILE_DTYPE = np.float64
@@ -42,16 +43,20 @@
dtype=DEFAULT_FILE_DTYPE)
DEFAULT_LON = np.linspace(127.6879, 144.5284, N_SCANLINE * N_FOV,
dtype=DEFAULT_FILE_DTYPE)
-FREQ = xr.DataArray([88, 88, 22], dims='Channel',
+FREQ = xr.DataArray([23.8, 31.4, 50.3, 51.76, 52.8, 53.596, 54.4, 54.94, 55.5,
+ 57.29, 57.29, 57.29, 57.29, 57.29, 57.29, 88.2, 165.5,
+ 183.31, 183.31, 183.31, 183.31, 183.31][:N_CHANNEL],
+ dims='Channel',
attrs={'description': "Central Frequencies (GHz)"})
-POLO = xr.DataArray([2, 2, 3], dims='Channel',
+POLO = xr.DataArray([2, 2, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 2, 3, 3, 3,
+ 3, 3, 3][:N_CHANNEL], dims='Channel',
attrs={'description': "Polarizations"})
DS_IDS = ['RR', 'longitude', 'latitude']
-TEST_VARS = ['btemp_88v1', 'btemp_88v2',
- 'btemp_22h', 'RR', 'Sfc_type']
-DEFAULT_UNITS = {'btemp_88v1': 'K', 'btemp_88v2': 'K',
- 'btemp_22h': 'K', 'RR': 'mm/hr', 'Sfc_type': "1"}
+TEST_VARS = ['btemp_88v', 'btemp_165h',
+ 'btemp_23v', 'RR', 'Sfc_type']
+DEFAULT_UNITS = {'btemp_88v': 'K', 'btemp_165h': 'K',
+ 'btemp_23v': 'K', 'RR': 'mm/hr', 'Sfc_type': "1"}
PLATFORM = {"M2": "metop-a", "NPP": "npp", "GPM": "gpm"}
SENSOR = {"m2": "amsu-mhs", "npp": "atms", "gpm": "GPI"}
@@ -62,18 +67,46 @@
def fake_coeff_from_fn(fn):
"""Create Fake Coefficients."""
ameans = np.random.uniform(261, 267, N_CHANNEL)
- all_nchx = np.linspace(2, 3, N_CHANNEL, dtype=np.int32)
+ locations = [
+ [1, 2],
+ [1, 2],
+ [3, 4, 5],
+ [3, 4, 5],
+ [4, 5, 6],
+ [5, 6, 7],
+ [6, 7, 8],
+ [7, 8],
+ [9, 10, 11],
+ [10, 11],
+ [10, 11, 12],
+ [11, 12, 13],
+ [12, 13],
+ [12, 13, 14],
+ [14, 15],
+ [1, 16],
+ [17, 18],
+ [18, 19],
+ [18, 19, 20],
+ [19, 20, 21],
+ [20, 21, 22],
+ [21, 22],
+ ]
+ all_nchx = [len(loc) for loc in locations]
coeff_str = []
- for idx in range(1, N_CHANNEL):
+ for idx in range(1, N_CHANNEL + 1):
nx = idx - 1
coeff_str.append('\n')
next_line = ' {} {} {}\n'.format(idx, all_nchx[nx], ameans[nx])
coeff_str.append(next_line)
+ next_line = ' {}\n'.format(" ".join([str(x) for x in locations[idx - 1]]))
+ coeff_str.append(next_line)
for fov in range(1, N_FOV+1):
random_coeff = np.random.rand(all_nchx[nx])
+ random_coeff = np.ones(all_nchx[nx])
str_coeff = ' '.join([str(x) for x in random_coeff])
random_means = np.random.uniform(261, 267, all_nchx[nx])
+ random_means = np.zeros(all_nchx[nx])
str_means = ' '.join([str(x) for x in random_means])
error_val = np.random.uniform(0, 4)
coeffs_line = ' {:>2} {:>2} {} {} {}\n'.format(idx, fov,
@@ -259,7 +292,7 @@ def _check_valid_range(data_arr, test_valid_range):
@staticmethod
def _check_fill_value(data_arr, test_fill_value):
assert '_FillValue' not in data_arr.attrs
- assert test_fill_value not in data_arr.data
+ assert not (data_arr.data == test_fill_value).any()
@staticmethod
def _check_attrs(data_arr, platform_name):
@@ -293,10 +326,11 @@ def test_basic_load(self, filenames, loadable_ids,
fd, mock.patch('satpy.readers.mirs.retrieve'):
fd.side_effect = fake_coeff_from_fn
loaded_data_arrs = r.load(loadable_ids)
- assert loaded_data_arrs
+ assert len(loaded_data_arrs) == len(loadable_ids)
test_data = fake_open_dataset(filenames[0])
for _data_id, data_arr in loaded_data_arrs.items():
+ data_arr = data_arr.compute()
var_name = data_arr.attrs["name"]
if var_name not in ['latitude', 'longitude']:
self._check_area(data_arr)
diff --git a/satpy/tests/reader_tests/test_modis_l1b.py b/satpy/tests/reader_tests/test_modis_l1b.py
new file mode 100644
index 0000000000..c91c68336a
--- /dev/null
+++ b/satpy/tests/reader_tests/test_modis_l1b.py
@@ -0,0 +1,190 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Unit tests for MODIS L1b HDF reader."""
+
+from __future__ import annotations
+
+import dask
+import numpy as np
+import pytest
+from pytest_lazyfixture import lazy_fixture
+
+from satpy import Scene, available_readers
+
+from ..utils import CustomScheduler, make_dataid
+from ._modis_fixtures import (
+ AVAILABLE_1KM_PRODUCT_NAMES,
+ AVAILABLE_HKM_PRODUCT_NAMES,
+ AVAILABLE_QKM_PRODUCT_NAMES,
+ _shape_for_resolution,
+)
+
+
+def _check_shared_metadata(data_arr):
+ assert data_arr.attrs["sensor"] == "modis"
+ assert data_arr.attrs["platform_name"] == "EOS-Terra"
+ assert "rows_per_scan" in data_arr.attrs
+ assert isinstance(data_arr.attrs["rows_per_scan"], int)
+ assert data_arr.attrs['reader'] == 'modis_l1b'
+
+
+def _load_and_check_geolocation(scene, resolution, exp_res, exp_shape, has_res,
+ check_callback=_check_shared_metadata):
+ scene.load(["longitude", "latitude"], resolution=resolution)
+ lon_id = make_dataid(name="longitude", resolution=exp_res)
+ lat_id = make_dataid(name="latitude", resolution=exp_res)
+ if has_res:
+ lon_arr = scene[lon_id]
+ lat_arr = scene[lat_id]
+ assert lon_arr.shape == exp_shape
+ assert lat_arr.shape == exp_shape
+ # compute lon/lat at the same time to avoid wasted computation
+ lon_vals, lat_vals = dask.compute(lon_arr, lat_arr)
+ np.testing.assert_array_less(lon_vals, 0)
+ np.testing.assert_array_less(0, lat_vals)
+ check_callback(lon_arr)
+ check_callback(lat_arr)
+ else:
+ pytest.raises(KeyError, scene.__getitem__, lon_id)
+ pytest.raises(KeyError, scene.__getitem__, lat_id)
+
+
+class TestModisL1b:
+ """Test MODIS L1b reader."""
+
+ def test_available_reader(self):
+ """Test that MODIS L1b reader is available."""
+ assert 'modis_l1b' in available_readers()
+
+ @pytest.mark.parametrize(
+ ('input_files', 'expected_names', 'expected_data_res', 'expected_geo_res'),
+ [
+ [lazy_fixture('modis_l1b_nasa_mod021km_file'),
+ AVAILABLE_1KM_PRODUCT_NAMES + AVAILABLE_HKM_PRODUCT_NAMES + AVAILABLE_QKM_PRODUCT_NAMES,
+ [1000], [5000, 1000]],
+ [lazy_fixture('modis_l1b_imapp_1000m_file'),
+ AVAILABLE_1KM_PRODUCT_NAMES + AVAILABLE_HKM_PRODUCT_NAMES + AVAILABLE_QKM_PRODUCT_NAMES,
+ [1000], [5000, 1000]],
+ [lazy_fixture('modis_l1b_nasa_mod02hkm_file'),
+ AVAILABLE_HKM_PRODUCT_NAMES + AVAILABLE_QKM_PRODUCT_NAMES, [500], [1000, 500, 250]],
+ [lazy_fixture('modis_l1b_nasa_mod02qkm_file'),
+ AVAILABLE_QKM_PRODUCT_NAMES, [250], [1000, 500, 250]],
+ ]
+ )
+ def test_scene_available_datasets(self, input_files, expected_names, expected_data_res, expected_geo_res):
+ """Test that datasets are available."""
+ scene = Scene(reader='modis_l1b', filenames=input_files)
+ available_datasets = scene.available_dataset_names()
+ assert len(available_datasets) > 0
+ assert 'longitude' in available_datasets
+ assert 'latitude' in available_datasets
+ for chan_name in expected_names:
+ assert chan_name in available_datasets
+
+ available_data_ids = scene.available_dataset_ids()
+ available_datas = {x: [] for x in expected_data_res}
+ available_geos = {x: [] for x in expected_geo_res}
+ # Make sure that every resolution from the reader is what we expect
+ for data_id in available_data_ids:
+ res = data_id['resolution']
+ if data_id['name'] in ['longitude', 'latitude']:
+ assert res in expected_geo_res
+ available_geos[res].append(data_id)
+ else:
+ assert res in expected_data_res
+ available_datas[res].append(data_id)
+
+ # Make sure that every resolution we expect has at least one dataset
+ for exp_res, avail_id in available_datas.items():
+ assert avail_id, f"Missing datasets for data resolution {exp_res}"
+ for exp_res, avail_id in available_geos.items():
+ assert avail_id, f"Missing geo datasets for geo resolution {exp_res}"
+
+ @pytest.mark.parametrize(
+ ('input_files', 'has_5km', 'has_500', 'has_250', 'default_res'),
+ [
+ [lazy_fixture('modis_l1b_nasa_mod021km_file'),
+ True, False, False, 1000],
+ [lazy_fixture('modis_l1b_imapp_1000m_file'),
+ True, False, False, 1000],
+ [lazy_fixture('modis_l1b_nasa_mod02hkm_file'),
+ False, True, True, 250],
+ [lazy_fixture('modis_l1b_nasa_mod02qkm_file'),
+ False, True, True, 250],
+ [lazy_fixture('modis_l1b_nasa_1km_mod03_files'),
+ True, True, True, 250],
+ ]
+ )
+ def test_load_longitude_latitude(self, input_files, has_5km, has_500, has_250, default_res):
+ """Test that longitude and latitude datasets are loaded correctly."""
+ scene = Scene(reader='modis_l1b', filenames=input_files)
+ shape_5km = _shape_for_resolution(5000)
+ shape_500m = _shape_for_resolution(500)
+ shape_250m = _shape_for_resolution(250)
+ default_shape = _shape_for_resolution(default_res)
+ with dask.config.set(scheduler=CustomScheduler(max_computes=1 + has_5km + has_500 + has_250)):
+ _load_and_check_geolocation(scene, "*", default_res, default_shape, True)
+ _load_and_check_geolocation(scene, 5000, 5000, shape_5km, has_5km)
+ _load_and_check_geolocation(scene, 500, 500, shape_500m, has_500)
+ _load_and_check_geolocation(scene, 250, 250, shape_250m, has_250)
+
+ def test_load_sat_zenith_angle(self, modis_l1b_nasa_mod021km_file):
+ """Test loading satellite zenith angle band."""
+ scene = Scene(reader='modis_l1b', filenames=modis_l1b_nasa_mod021km_file)
+ dataset_name = 'satellite_zenith_angle'
+ scene.load([dataset_name])
+ dataset = scene[dataset_name]
+ assert dataset.shape == _shape_for_resolution(1000)
+ assert dataset.attrs['resolution'] == 1000
+ _check_shared_metadata(dataset)
+
+ def test_load_vis(self, modis_l1b_nasa_mod021km_file):
+ """Test loading visible band."""
+ scene = Scene(reader='modis_l1b', filenames=modis_l1b_nasa_mod021km_file)
+ dataset_name = '1'
+ scene.load([dataset_name])
+ dataset = scene[dataset_name]
+ assert dataset[0, 0] == 300.0
+ assert dataset.shape == _shape_for_resolution(1000)
+ assert dataset.attrs['resolution'] == 1000
+ _check_shared_metadata(dataset)
+
+ @pytest.mark.parametrize("mask_saturated", [False, True])
+ def test_load_vis_saturation(self, mask_saturated, modis_l1b_nasa_mod021km_file):
+ """Test loading visible band."""
+ scene = Scene(reader='modis_l1b', filenames=modis_l1b_nasa_mod021km_file,
+ reader_kwargs={"mask_saturated": mask_saturated})
+ dataset_name = '2'
+ scene.load([dataset_name])
+ dataset = scene[dataset_name]
+ assert dataset.shape == _shape_for_resolution(1000)
+ assert dataset.attrs['resolution'] == 1000
+ _check_shared_metadata(dataset)
+
+ # check saturation fill values
+ data = dataset.values
+ assert dataset[0, 0] == 300.0
+ assert np.isnan(data[-1, -1]) # normal fill value
+ if mask_saturated:
+ assert np.isnan(data[-1, -2]) # saturation
+ assert np.isnan(data[-1, -3]) # can't aggregate
+ else:
+ # test data factor/offset are 1/0
+ # albedos are converted to %
+ assert data[-1, -2] >= 32767 * 100.0 # saturation
+ assert data[-1, -3] >= 32767 * 100.0 # can't aggregate
diff --git a/satpy/tests/reader_tests/test_modis_l2.py b/satpy/tests/reader_tests/test_modis_l2.py
index d3064f7f92..848bd1bf05 100644
--- a/satpy/tests/reader_tests/test_modis_l2.py
+++ b/satpy/tests/reader_tests/test_modis_l2.py
@@ -17,205 +17,157 @@
# satpy. If not, see .
"""Unit tests for MODIS L2 HDF reader."""
-import os
-import unittest
+from __future__ import annotations
+import dask
+import dask.array as da
import numpy as np
+import pytest
+from pytest_lazyfixture import lazy_fixture
-from pyhdf.SD import SD, SDC
+from satpy import Scene, available_readers
-from satpy import available_readers, Scene
+from ..utils import CustomScheduler, make_dataid
+from ._modis_fixtures import _shape_for_resolution
-# Mock MODIS HDF4 file
-SCAN_WIDTH = 406
-SCAN_LEN = 270
-SCALE_FACTOR = 1
-TEST_LAT = np.repeat(np.linspace(35., 45., SCAN_WIDTH)[:, None], SCAN_LEN, 1)
-TEST_LAT *= np.linspace(0.9, 1.1, SCAN_LEN)
-TEST_LON = np.repeat(np.linspace(-45., -35., SCAN_LEN)[None, :], SCAN_WIDTH, 0)
-TEST_LON *= np.linspace(0.9, 1.1, SCAN_WIDTH)[:, None]
-TEST_SATZ = (np.repeat(abs(np.linspace(-65.2, 65.4, SCAN_LEN))[None, :], SCAN_WIDTH, 0) * 100).astype(np.int16)
-TEST_DATA = {
- 'Latitude': {'data': TEST_LAT.astype(np.float32),
- 'type': SDC.FLOAT32,
- 'fill_value': -999,
- 'attrs': {'dim_labels': ['Cell_Along_Swath_5km:mod35', 'Cell_Across_Swath_5km:mod35']}},
- 'Longitude': {'data': TEST_LON.astype(np.float32),
- 'type': SDC.FLOAT32,
- 'fill_value': -999,
- 'attrs': {'dim_labels': ['Cell_Along_Swath_5km:mod35', 'Cell_Across_Swath_5km:mod35']}},
- 'Sensor_Zenith': {'data': TEST_SATZ,
- 'type': SDC.INT32,
- 'fill_value': -32767,
- 'attrs': {'dim_labels': ['Cell_Along_Swath_5km:mod35', 'Cell_Across_Swath_5km:mod35'],
- 'scale_factor': 0.01}},
- 'Cloud_Mask': {'data': np.zeros((6, 5*SCAN_WIDTH, 5*SCAN_LEN+4), dtype=np.int8),
- 'type': SDC.INT8,
- 'fill_value': 0,
- 'attrs': {'dim_labels': ['Byte_Segment:mod35',
- 'Cell_Along_Swath_1km:mod35',
- 'Cell_Across_Swath_1km:mod35']}},
- 'Quality_Assurance': {'data': np.ones((5*SCAN_WIDTH, 5*SCAN_LEN+4, 10), dtype=np.int8),
- 'type': SDC.INT8,
- 'fill_value': 0,
- 'attrs': {'dim_labels': ['Cell_Along_Swath_1km:mod35',
- 'Cell_Across_Swath_1km:mod35',
- 'QA_Dimension:mod35']}}
-}
+def _check_shared_metadata(data_arr, expect_area=False):
+ assert data_arr.attrs["sensor"] == "modis"
+ assert data_arr.attrs["platform_name"] == "EOS-Terra"
+ assert "rows_per_scan" in data_arr.attrs
+ assert isinstance(data_arr.attrs["rows_per_scan"], int)
+ assert data_arr.attrs['reader'] == 'modis_l2'
+ if expect_area:
+ assert data_arr.attrs.get('area') is not None
+ else:
+ assert 'area' not in data_arr.attrs
-def generate_file_name():
- """Generate a file name that follows MODIS 35 L2 convention in a temporary directory."""
- import tempfile
- from datetime import datetime
- creation_time = datetime.now()
- processing_time = datetime.now()
- file_name = 'MOD35_L2.A{}.{}.061.{}.hdf'.format(
- creation_time.strftime("%Y%j"),
- creation_time.strftime("%H%M"),
- processing_time.strftime("%Y%j%H%M%S")
- )
-
- base_dir = tempfile.mkdtemp()
- file_name = os.path.join(base_dir, file_name)
- return base_dir, file_name
-
-
-def create_test_data():
- """Create a fake MODIS 35 L2 HDF4 file with headers."""
- from datetime import datetime, timedelta
-
- base_dir, file_name = generate_file_name()
- h = SD(file_name, SDC.WRITE | SDC.CREATE)
- # Set hdf file attributes
- beginning_date = datetime.now()
- ending_date = beginning_date + timedelta(minutes=5)
- core_metadata_header = "GROUP = INVENTORYMETADATA\nGROUPTYPE = MASTERGROUP\n\n" \
- "GROUP = RANGEDATETIME\n\nOBJECT = RANGEBEGINNINGDATE\nNUM_VAL = 1\nVALUE = \"{}\"\n" \
- "END_OBJECT = RANGEBEGINNINGDATE\n\nOBJECT = RANGEBEGINNINGTIME\n"\
- "NUM_VAL = 1\nVALUE = \"{}\"\n"\
- "END_OBJECT = RANGEBEGINNINGTIME\n\nOBJECT = RANGEENDINGDATE\nNUM_VAL = 1\nVALUE = \"{}\"\n"\
- "END_OBJECT = RANGEENDINGDATE\n\nOBJECT = RANGEENDINGTIME\nNUM_VAL = 1\nVALUE = \"{}\"\n" \
- "END_OBJECT = RANGEENDINGTIME\nEND_GROUP = RANGEDATETIME".format(
- beginning_date.strftime("%Y-%m-%d"),
- beginning_date.strftime("%H:%M:%S.%f"),
- ending_date.strftime("%Y-%m-%d"),
- ending_date.strftime("%H:%M:%S.%f")
- )
- struct_metadata_header = "GROUP=SwathStructure\n"\
- "GROUP=SWATH_1\n"\
- "GROUP=DimensionMap\n"\
- "OBJECT=DimensionMap_2\n"\
- "GeoDimension=\"Cell_Along_Swath_5km\"\n"\
- "END_OBJECT=DimensionMap_2\n"\
- "END_GROUP=DimensionMap\n"\
- "END_GROUP=SWATH_1\n"\
- "END_GROUP=SwathStructure\nEND"
- archive_metadata_header = "GROUP = ARCHIVEDMETADATA\nEND_GROUP = ARCHIVEDMETADATA\nEND"
- setattr(h, 'CoreMetadata.0', core_metadata_header) # noqa
- setattr(h, 'StructMetadata.0', struct_metadata_header) # noqa
- setattr(h, 'ArchiveMetadata.0', archive_metadata_header) # noqa
-
- # Fill datasets
- for dataset in TEST_DATA:
- v = h.create(dataset, TEST_DATA[dataset]['type'], TEST_DATA[dataset]['data'].shape)
- v[:] = TEST_DATA[dataset]['data']
- dim_count = 0
- for dimension_name in TEST_DATA[dataset]['attrs']['dim_labels']:
- v.dim(dim_count).setname(dimension_name)
- dim_count += 1
- v.setfillvalue(TEST_DATA[dataset]['fill_value'])
- v.scale_factor = TEST_DATA[dataset]['attrs'].get('scale_factor', SCALE_FACTOR)
- h.end()
- return base_dir, file_name
-
-
-class TestModisL2(unittest.TestCase):
+class TestModisL2:
"""Test MODIS L2 reader."""
- def setUp(self):
- """Create fake HDF4 MODIS file."""
- self.base_dir, self.file_name = create_test_data()
-
- def tearDown(self):
- """Remove the temporary directory created for the test."""
- try:
- import shutil
- shutil.rmtree(self.base_dir, ignore_errors=True)
- except OSError:
- pass
-
def test_available_reader(self):
"""Test that MODIS L2 reader is available."""
- self.assertIn('modis_l2', available_readers())
+ assert 'modis_l2' in available_readers()
- def test_scene_available_datasets(self):
+ def test_scene_available_datasets(self, modis_l2_nasa_mod35_file):
"""Test that datasets are available."""
- scene = Scene(reader='modis_l2', filenames=[self.file_name])
+ scene = Scene(reader='modis_l2', filenames=modis_l2_nasa_mod35_file)
available_datasets = scene.all_dataset_names()
- self.assertTrue(len(available_datasets) > 0)
- self.assertIn('cloud_mask', available_datasets)
- self.assertIn('latitude', available_datasets)
- self.assertIn('longitude', available_datasets)
-
- def test_load_longitude_latitude(self):
+ assert len(available_datasets) > 0
+ assert 'cloud_mask' in available_datasets
+ assert 'latitude' in available_datasets
+ assert 'longitude' in available_datasets
+
+ @pytest.mark.parametrize(
+ ('input_files', 'has_5km', 'has_500', 'has_250', 'default_res'),
+ [
+ [lazy_fixture('modis_l2_nasa_mod35_file'),
+ True, False, False, 1000],
+ ]
+ )
+ def test_load_longitude_latitude(self, input_files, has_5km, has_500, has_250, default_res):
"""Test that longitude and latitude datasets are loaded correctly."""
- from satpy.tests.utils import make_dataid
-
- def test_func(dname, x, y):
- if dname == 'longitude':
- # assert less
- np.testing.assert_array_less(x, y)
- else:
- # assert greater
- # np.testing.assert_equal(x > y, True)
- np.testing.assert_array_less(y, x)
-
- scene = Scene(reader='modis_l2', filenames=[self.file_name])
- for dataset_name in ['longitude', 'latitude']:
- # Default resolution should be the interpolated 1km
- scene.load([dataset_name])
- longitude_1km_id = make_dataid(name=dataset_name, resolution=1000)
- longitude_1km = scene[longitude_1km_id]
- self.assertEqual(longitude_1km.shape, (5*SCAN_WIDTH, 5*SCAN_LEN+4))
- test_func(dataset_name, longitude_1km.values, 0)
- # Specify original 5km scale
- scene.load([dataset_name], resolution=5000)
- longitude_5km_id = make_dataid(name=dataset_name, resolution=5000)
- longitude_5km = scene[longitude_5km_id]
- self.assertEqual(longitude_5km.shape, TEST_DATA[dataset_name.capitalize()]['data'].shape)
- test_func(dataset_name, longitude_5km.values, 0)
-
- def test_load_quality_assurance(self):
+ from .test_modis_l1b import _load_and_check_geolocation
+ scene = Scene(reader='modis_l2', filenames=input_files)
+ shape_5km = _shape_for_resolution(5000)
+ shape_500m = _shape_for_resolution(500)
+ shape_250m = _shape_for_resolution(250)
+ default_shape = _shape_for_resolution(default_res)
+ with dask.config.set(scheduler=CustomScheduler(max_computes=1 + has_5km + has_500 + has_250)):
+ _load_and_check_geolocation(scene, "*", default_res, default_shape, True,
+ check_callback=_check_shared_metadata)
+ _load_and_check_geolocation(scene, 5000, 5000, shape_5km, has_5km,
+ check_callback=_check_shared_metadata)
+ _load_and_check_geolocation(scene, 500, 500, shape_500m, has_500,
+ check_callback=_check_shared_metadata)
+ _load_and_check_geolocation(scene, 250, 250, shape_250m, has_250,
+ check_callback=_check_shared_metadata)
+
+ def test_load_quality_assurance(self, modis_l2_nasa_mod35_file):
"""Test loading quality assurance."""
- from satpy.tests.utils import make_dataid
- scene = Scene(reader='modis_l2', filenames=[self.file_name])
+ scene = Scene(reader='modis_l2', filenames=modis_l2_nasa_mod35_file)
dataset_name = 'quality_assurance'
scene.load([dataset_name])
quality_assurance_id = make_dataid(name=dataset_name, resolution=1000)
- self.assertIn(quality_assurance_id, scene)
+ assert quality_assurance_id in scene
quality_assurance = scene[quality_assurance_id]
- self.assertEqual(quality_assurance.shape, (5*SCAN_WIDTH, 5*SCAN_LEN+4))
-
- def test_load_1000m_cloud_mask_dataset(self):
- """Test loading 1000m cloud mask."""
- from satpy.tests.utils import make_dataid
- scene = Scene(reader='modis_l2', filenames=[self.file_name])
- dataset_name = 'cloud_mask'
- scene.load([dataset_name], resolution=1000)
- cloud_mask_id = make_dataid(name=dataset_name, resolution=1000)
- self.assertIn(cloud_mask_id, scene)
- cloud_mask = scene[cloud_mask_id]
- self.assertEqual(cloud_mask.shape, (5*SCAN_WIDTH, 5*SCAN_LEN+4))
-
- def test_load_250m_cloud_mask_dataset(self):
+ assert quality_assurance.shape == _shape_for_resolution(1000)
+ _check_shared_metadata(quality_assurance, expect_area=True)
+
+ @pytest.mark.parametrize(
+ ('input_files', 'loadables', 'request_resolution', 'exp_resolution', 'exp_area'),
+ [
+ [lazy_fixture('modis_l2_nasa_mod35_mod03_files'),
+ ["cloud_mask"],
+ 1000, 1000, True],
+ [lazy_fixture('modis_l2_imapp_mask_byte1_geo_files'),
+ ["cloud_mask", "land_sea_mask", "snow_ice_mask"],
+ None, 1000, True],
+ ]
+ )
+ def test_load_category_dataset(self, input_files, loadables, request_resolution, exp_resolution, exp_area):
+ """Test loading category products."""
+ scene = Scene(reader='modis_l2', filenames=input_files)
+ kwargs = {"resolution": request_resolution} if request_resolution is not None else {}
+ scene.load(loadables, **kwargs)
+ for ds_name in loadables:
+ cat_id = make_dataid(name=ds_name, resolution=exp_resolution)
+ assert cat_id in scene
+ cat_data_arr = scene[cat_id]
+ assert isinstance(cat_data_arr.data, da.Array)
+ cat_data_arr = cat_data_arr.compute()
+ assert cat_data_arr.shape == _shape_for_resolution(exp_resolution)
+ assert cat_data_arr.values[0, 0] == 0.0
+ assert cat_data_arr.attrs.get('resolution') == exp_resolution
+ # mask variables should be integers
+ assert np.issubdtype(cat_data_arr.dtype, np.integer)
+ assert cat_data_arr.attrs.get('_FillValue') is not None
+ _check_shared_metadata(cat_data_arr, expect_area=exp_area)
+
+ @pytest.mark.parametrize(
+ ('input_files', 'exp_area'),
+ [
+ [lazy_fixture('modis_l2_nasa_mod35_file'), False],
+ [lazy_fixture('modis_l2_nasa_mod35_mod03_files'), True],
+ ]
+ )
+ def test_load_250m_cloud_mask_dataset(self, input_files, exp_area):
"""Test loading 250m cloud mask."""
- from satpy.tests.utils import make_dataid
- scene = Scene(reader='modis_l2', filenames=[self.file_name])
+ scene = Scene(reader='modis_l2', filenames=input_files)
dataset_name = 'cloud_mask'
scene.load([dataset_name], resolution=250)
cloud_mask_id = make_dataid(name=dataset_name, resolution=250)
- self.assertIn(cloud_mask_id, scene)
+ assert cloud_mask_id in scene
cloud_mask = scene[cloud_mask_id]
- self.assertEqual(cloud_mask.shape, (4*5*SCAN_WIDTH, 4*(5*SCAN_LEN+4)))
+ assert isinstance(cloud_mask.data, da.Array)
+ cloud_mask = cloud_mask.compute()
+ assert cloud_mask.shape == _shape_for_resolution(250)
+ assert cloud_mask.values[0, 0] == 0.0
+ # mask variables should be integers
+ assert np.issubdtype(cloud_mask.dtype, np.integer)
+ assert cloud_mask.attrs.get('_FillValue') is not None
+ _check_shared_metadata(cloud_mask, expect_area=exp_area)
+
+ @pytest.mark.parametrize(
+ ('input_files', 'loadables', 'exp_resolution', 'exp_area', 'exp_value'),
+ [
+ [lazy_fixture('modis_l2_nasa_mod06_file'), ["surface_pressure"], 5000, True, 4.0],
+ # snow mask is considered a category product, factor/offset ignored
+ [lazy_fixture('modis_l2_imapp_snowmask_file'), ["snow_mask"], 1000, False, 1.0],
+ [lazy_fixture('modis_l2_imapp_snowmask_geo_files'), ["snow_mask"], 1000, True, 1.0],
+ ]
+ )
+ def test_load_l2_dataset(self, input_files, loadables, exp_resolution, exp_area, exp_value):
+ """Load and check an L2 variable."""
+ scene = Scene(reader='modis_l2', filenames=input_files)
+ scene.load(loadables)
+ for ds_name in loadables:
+ assert ds_name in scene
+ data_arr = scene[ds_name]
+ assert isinstance(data_arr.data, da.Array)
+ data_arr = data_arr.compute()
+ assert data_arr.values[0, 0] == exp_value
+ assert data_arr.shape == _shape_for_resolution(exp_resolution)
+ assert data_arr.attrs.get('resolution') == exp_resolution
+ _check_shared_metadata(data_arr, expect_area=exp_area)
diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py
new file mode 100644
index 0000000000..a085c3aa42
--- /dev/null
+++ b/satpy/tests/reader_tests/test_msi_safe.py
@@ -0,0 +1,994 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Module for testing the satpy.readers.msi_safe module."""
+import unittest
+import unittest.mock as mock
+from io import BytesIO, StringIO
+
+import numpy as np
+import pytest
+import xarray as xr
+
+from satpy.tests.utils import make_dataid
+
+mtd_tile_xml = b"""
+
+
+
+ S2B_OPER_MSI_L1C_TL_VGS1_20201001T183541_A018656_T16SEB_N02.09
+ S2B_OPER_MSI_L1C_DS_VGS1_20201001T183541_S20201001T162735_N02.09
+ NOMINAL
+ 2020-10-01T16:34:23.153611Z
+
+ VGS1
+ 2020-10-01T18:55:55.59803Z
+
+
+
+
+
+ WGS84 / UTM zone 16N
+ EPSG:32616
+
+ 10980
+ 10980
+
+
+ 5490
+ 5490
+
+
+ 1830
+ 1830
+
+
+ 499980
+ 3700020
+ 10
+ -10
+
+
+ 499980
+ 3700020
+ 20
+ -20
+
+
+ 499980
+ 3700020
+ 60
+ -60
+
+
+
+
+
+ 5000
+ 5000
+
+ 39.8824 39.8636 39.8448 39.8261 39.8074 39.7888 39.7702 39.7516 39.7331 39.7145 39.6961 39.6776 39.6592 39.6408 39.6225 39.6042 39.5859 39.5677 39.5495 39.5313 39.5132 39.4951 39.477
+ 39.8404 39.8216 39.8029 39.7841 39.7655 39.7468 39.7282 39.7096 39.691 39.6725 39.654 39.6355 39.6171 39.5987 39.5804 39.5621 39.5438 39.5255 39.5073 39.4891 39.471 39.4529 39.4348
+ 39.7985 39.7797 39.7609 39.7422 39.7235 39.7048 39.6862 39.6675 39.649 39.6304 39.6119 39.5935 39.575 39.5566 39.5383 39.5199 39.5016 39.4834 39.4651 39.4469 39.4288 39.4107 39.3926
+ 39.7566 39.7377 39.719 39.7002 39.6815 39.6628 39.6441 39.6255 39.6069 39.5884 39.5699 39.5514 39.533 39.5145 39.4962 39.4778 39.4595 39.4412 39.423 39.4048 39.3866 39.3685 39.3504
+ 39.7146 39.6958 39.677 39.6582 39.6395 39.6208 39.6021 39.5835 39.5649 39.5464 39.5278 39.5093 39.4909 39.4724 39.4541 39.4357 39.4174 39.3991 39.3808 39.3626 39.3444 39.3263 39.3082
+ 39.6727 39.6539 39.635 39.6163 39.5975 39.5788 39.5601 39.5415 39.5229 39.5043 39.4858 39.4673 39.4488 39.4304 39.412 39.3936 39.3752 39.3569 39.3387 39.3204 39.3023 39.2841 39.266
+ 39.6308 39.6119 39.5931 39.5743 39.5556 39.5368 39.5181 39.4995 39.4809 39.4623 39.4437 39.4252 39.4067 39.3883 39.3699 39.3515 39.3331 39.3148 39.2965 39.2783 39.2601 39.2419 39.2238
+ 39.5889 39.57 39.5512 39.5324 39.5136 39.4949 39.4762 39.4575 39.4389 39.4203 39.4017 39.3832 39.3647 39.3462 39.3278 39.3094 39.291 39.2727 39.2544 39.2361 39.2179 39.1997 39.1816
+ 39.547 39.5281 39.5092 39.4904 39.4716 39.4529 39.4342 39.4155 39.3968 39.3782 39.3596 39.3411 39.3226 39.3041 39.2857 39.2673 39.2489 39.2306 39.2123 39.194 39.1758 39.1576 39.1394
+ 39.5051 39.4862 39.4673 39.4485 39.4297 39.4109 39.3922 39.3735 39.3548 39.3362 39.3176 39.2991 39.2805 39.2621 39.2436 39.2252 39.2068 39.1884 39.1701 39.1518 39.1336 39.1154 39.0972
+ 39.4632 39.4442 39.4254 39.4065 39.3877 39.3689 39.3502 39.3315 39.3128 39.2942 39.2756 39.257 39.2385 39.22 39.2015 39.1831 39.1647 39.1463 39.128 39.1097 39.0914 39.0732 39.055
+ 39.4213 39.4023 39.3834 39.3646 39.3458 39.327 39.3082 39.2895 39.2708 39.2522 39.2336 39.215 39.1964 39.1779 39.1594 39.141 39.1226 39.1042 39.0859 39.0676 39.0493 39.0311 39.0129
+ 39.3794 39.3604 39.3415 39.3227 39.3038 39.285 39.2663 39.2475 39.2288 39.2102 39.1915 39.1729 39.1544 39.1359 39.1174 39.0989 39.0805 39.0621 39.0438 39.0254 39.0072 38.9889 38.9707
+ 39.3375 39.3185 39.2996 39.2807 39.2619 39.2431 39.2243 39.2056 39.1868 39.1682 39.1495 39.1309 39.1123 39.0938 39.0753 39.0568 39.0384 39.02 39.0016 38.9833 38.965 38.9468 38.9285
+ 39.2956 39.2766 39.2577 39.2388 39.22 39.2011 39.1823 39.1636 39.1449 39.1262 39.1075 39.0889 39.0703 39.0518 39.0332 39.0148 38.9963 38.9779 38.9595 38.9412 38.9229 38.9046 38.8864
+ 39.2537 39.2348 39.2158 39.1969 39.178 39.1592 39.1404 39.1216 39.1029 39.0842 39.0655 39.0469 39.0283 39.0097 38.9912 38.9727 38.9542 38.9358 38.9174 38.8991 38.8807 38.8625 38.8442
+ 39.2119 39.1929 39.1739 39.155 39.1361 39.1173 39.0984 39.0797 39.0609 39.0422 39.0235 39.0049 38.9862 38.9677 38.9491 38.9306 38.9122 38.8937 38.8753 38.8569 38.8386 38.8203 38.8021
+ 39.17 39.151 39.132 39.1131 39.0942 39.0753 39.0565 39.0377 39.0189 39.0002 38.9815 38.9628 38.9442 38.9256 38.9071 38.8886 38.8701 38.8516 38.8332 38.8148 38.7965 38.7782 38.7599
+ 39.1281 39.1091 39.0901 39.0712 39.0523 39.0334 39.0145 38.9957 38.977 38.9582 38.9395 38.9208 38.9022 38.8836 38.865 38.8465 38.828 38.8095 38.7911 38.7727 38.7544 38.736 38.7178
+ 39.0863 39.0672 39.0482 39.0293 39.0104 38.9915 38.9726 38.9538 38.935 38.9162 38.8975 38.8788 38.8602 38.8416 38.823 38.8045 38.7859 38.7675 38.749 38.7306 38.7122 38.6939 38.6756
+ 39.0444 39.0254 39.0064 38.9874 38.9685 38.9496 38.9307 38.9118 38.893 38.8743 38.8555 38.8368 38.8182 38.7996 38.781 38.7624 38.7439 38.7254 38.7069 38.6885 38.6701 38.6518 38.6335
+ 39.0026 38.9835 38.9645 38.9455 38.9266 38.9076 38.8888 38.8699 38.8511 38.8323 38.8136 38.7949 38.7762 38.7575 38.7389 38.7204 38.7018 38.6833 38.6649 38.6464 38.628 38.6097 38.5913
+ 38.9607 38.9417 38.9226 38.9036 38.8847 38.8657 38.8468 38.828 38.8091 38.7903 38.7716 38.7529 38.7342 38.7155 38.6969 38.6783 38.6598 38.6413 38.6228 38.6043 38.5859 38.5676 38.5492
+
+
+
+ 5000
+ 5000
+
+ 154.971 155.049 155.126 155.204 155.282 155.359 155.437 155.515 155.593 155.671 155.749 155.827 155.905 155.983 156.061 156.14 156.218 156.296 156.375 156.453 156.532 156.61 156.689
+ 154.953 155.03 155.108 155.186 155.263 155.341 155.419 155.497 155.575 155.653 155.731 155.809 155.887 155.965 156.043 156.122 156.2 156.278 156.357 156.435 156.514 156.592 156.671
+ 154.934 155.012 155.09 155.167 155.245 155.323 155.401 155.478 155.556 155.634 155.712 155.79 155.869 155.947 156.025 156.103 156.182 156.26 156.338 156.417 156.495 156.574 156.653
+ 154.916 154.994 155.071 155.149 155.227 155.304 155.382 155.46 155.538 155.616 155.694 155.772 155.85 155.928 156.007 156.085 156.163 156.242 156.32 156.399 156.477 156.556 156.634
+ 154.897 154.975 155.053 155.13 155.208 155.286 155.364 155.442 155.52 155.598 155.676 155.754 155.832 155.91 155.988 156.067 156.145 156.223 156.302 156.38 156.459 156.538 156.616
+ 154.879 154.956 155.034 155.112 155.19 155.267 155.345 155.423 155.501 155.579 155.657 155.735 155.814 155.892 155.97 156.048 156.127 156.205 156.284 156.362 156.441 156.519 156.598
+ 154.86 154.938 155.015 155.093 155.171 155.249 155.327 155.405 155.483 155.561 155.639 155.717 155.795 155.873 155.952 156.03 156.108 156.187 156.265 156.344 156.422 156.501 156.58
+ 154.841 154.919 154.997 155.075 155.152 155.23 155.308 155.386 155.464 155.542 155.62 155.698 155.777 155.855 155.933 156.012 156.09 156.168 156.247 156.325 156.404 156.483 156.561
+ 154.823 154.9 154.978 155.056 155.134 155.212 155.289 155.367 155.445 155.524 155.602 155.68 155.758 155.836 155.915 155.993 156.071 156.15 156.228 156.307 156.386 156.464 156.543
+ 154.804 154.882 154.959 155.037 155.115 155.193 155.271 155.349 155.427 155.505 155.583 155.661 155.739 155.818 155.896 155.974 156.053 156.131 156.21 156.289 156.367 156.446 156.525
+ 154.785 154.863 154.941 155.018 155.096 155.174 155.252 155.33 155.408 155.486 155.564 155.643 155.721 155.799 155.878 155.956 156.034 156.113 156.191 156.27 156.349 156.427 156.506
+ 154.766 154.844 154.922 155 155.077 155.155 155.233 155.311 155.389 155.467 155.546 155.624 155.702 155.78 155.859 155.937 156.016 156.094 156.173 156.251 156.33 156.409 156.488
+ 154.747 154.825 154.903 154.981 155.059 155.136 155.214 155.292 155.371 155.449 155.527 155.605 155.683 155.762 155.84 155.919 155.997 156.076 156.154 156.233 156.312 156.39 156.469
+ 154.728 154.806 154.884 154.962 155.04 155.118 155.196 155.274 155.352 155.43 155.508 155.586 155.665 155.743 155.821 155.9 155.978 156.057 156.136 156.214 156.293 156.372 156.451
+ 154.709 154.787 154.865 154.943 155.021 155.099 155.177 155.255 155.333 155.411 155.489 155.568 155.646 155.724 155.803 155.881 155.96 156.038 156.117 156.196 156.274 156.353 156.432
+ 154.69 154.768 154.846 154.924 155.002 155.08 155.158 155.236 155.314 155.392 155.47 155.549 155.627 155.705 155.784 155.862 155.941 156.019 156.098 156.177 156.256 156.334 156.413
+ 154.671 154.749 154.827 154.905 154.983 155.061 155.139 155.217 155.295 155.373 155.451 155.53 155.608 155.686 155.765 155.843 155.922 156.001 156.079 156.158 156.237 156.316 156.394
+ 154.652 154.73 154.808 154.886 154.964 155.042 155.12 155.198 155.276 155.354 155.432 155.511 155.589 155.668 155.746 155.825 155.903 155.982 156.06 156.139 156.218 156.297 156.376
+ 154.633 154.711 154.789 154.866 154.944 155.022 155.101 155.179 155.257 155.335 155.413 155.492 155.57 155.649 155.727 155.806 155.884 155.963 156.042 156.12 156.199 156.278 156.357
+ 154.614 154.691 154.769 154.847 154.925 155.003 155.081 155.16 155.238 155.316 155.394 155.473 155.551 155.63 155.708 155.787 155.865 155.944 156.023 156.101 156.18 156.259 156.338
+ 154.594 154.672 154.75 154.828 154.906 154.984 155.062 155.14 155.219 155.297 155.375 155.454 155.532 155.61 155.689 155.768 155.846 155.925 156.004 156.082 156.161 156.24 156.319
+ 154.575 154.653 154.731 154.809 154.887 154.965 155.043 155.121 155.199 155.278 155.356 155.434 155.513 155.591 155.67 155.748 155.827 155.906 155.985 156.063 156.142 156.221 156.3
+ 154.556 154.633 154.711 154.789 154.867 154.945 155.024 155.102 155.18 155.258 155.337 155.415 155.494 155.572 155.651 155.729 155.808 155.887 155.965 156.044 156.123 156.202 156.281
+
+
+
+
+ 39.2158335161115
+ 155.62398389104
+
+
+
+ 5000
+ 5000
+
+ NaN 11.7128 11.3368 10.9601 10.5837 10.2053 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN 11.6285 11.2531 10.8763 10.4977 10.1207 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ 11.9203 11.5439 11.1676 10.79 10.4135 10.036 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ 11.8359 11.4595 11.0825 10.7054 10.3284 9.95143 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ 11.751 11.3743 10.9977 10.6209 10.2437 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ 11.6664 11.2901 10.9134 10.5362 10.1591 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ 11.5818 11.2061 10.8293 10.4518 10.0747 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ 11.4976 11.121 10.7439 10.3664 9.98937 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+
+
+
+ 5000
+ 5000
+
+ NaN 111.269 111.67 112.096 112.551 113.041 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN 111.354 111.759 112.192 112.657 113.152 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ 111.053 111.44 111.852 112.292 112.762 113.266 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ 111.136 111.529 111.946 112.392 112.869 113.381 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ 111.219 111.618 112.042 112.494 112.978 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ 111.305 111.708 112.138 112.597 113.089 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ 111.391 111.799 112.235 112.702 113.201 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ 111.478 111.893 112.336 112.809 113.317 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+
+
+
+
+
+ 5000
+ 5000
+
+ NaN NaN NaN NaN NaN NaN 9.82039 9.4373 9.05284 8.66805 8.28339 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN 9.73454 9.35159 8.96724 8.58182 8.19763 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN 10.0315 9.64827 9.26401 8.87996 8.49572 8.11079 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN 9.94572 9.56205 9.17796 8.79367 8.4095 8.02451 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN 9.85977 9.47669 9.09189 8.70763 8.32282 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN 9.77437 9.38968 9.00597 8.62183 8.23655 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN 9.68751 9.30377 8.91958 8.53514 8.15057 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN 9.98449 9.60143 9.21746 8.83286 8.4486 8.06421 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN 9.51494 9.13074 8.74664 8.3621 7.97741 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+
+
+
+ 5000
+ 5000
+
+ NaN NaN NaN NaN NaN NaN 92.2969 91.9939 91.6606 91.294 90.8911 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN 92.2267 91.9172 91.5775 91.2031 90.7918 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN 92.4452 92.1553 91.8379 91.4911 91.1101 90.6885 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN 92.3781 92.0825 91.7591 91.4043 91.0144 90.5834 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN 92.31 92.0089 91.6783 91.3163 90.9166 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN 92.2413 91.9324 91.5954 91.2255 90.8166 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN 92.1696 91.8556 91.5111 91.1322 90.7147 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN 92.391 92.0976 91.7769 91.4248 91.0382 90.611 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN 92.0248 91.6966 91.3373 90.9417 90.5043 NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+
+
+
+
+
+ 5000
+ 5000
+
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.94194 7.56511 7.19038 6.81626 6.44423 NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.8569 7.48093 7.10605 6.73281 6.36089 NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.7724 7.39658 7.02215 6.64892 6.27782 NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.06455 7.688 7.31247 6.93823 6.56551 6.19477 NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.97983 7.60366 7.2287 6.85441 6.48197 NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.8952 7.51946 7.14517 6.77071 6.39873 NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.81105 7.43489 7.0603 6.68714 6.31558 NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.72611 7.35074 6.97674 6.60389 6.23289 NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN 8.01804 7.64172 7.26672 6.89282 6.52025 6.14959 NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 7.55748 7.18239 6.80886 6.43657 NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+
+
+
+ 5000
+ 5000
+
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 116.798 117.613 118.509 119.504 120.609 NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 116.97 117.802 118.719 119.735 120.87 NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 117.146 117.996 118.934 119.975 121.137 NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 116.538 117.326 118.194 119.155 120.222 121.414 NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 116.706 117.511 118.397 119.38 120.474 NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 116.877 117.699 118.604 119.612 120.733 NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 117.05 117.892 118.82 119.85 120.998 NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 117.231 118.089 119.037 120.092 121.27 NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN 116.616 117.414 118.291 119.262 120.343 121.552 NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 117.601 118.499 119.492 120.6 NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+
+
+
+
+
+ 5000
+ 5000
+
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.98397 5.60436 5.22629 4.85051 4.47749 NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.89902 5.51979 5.14214 4.76699 4.39482 NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.81385 5.43495 5.05811 4.68338 4.31176 NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.10878 5.72912 5.35071 4.97413 4.59998 4.22933 NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.02356 5.64376 5.26618 4.88984 4.51664 NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.938 5.55897 5.1813 4.80571 4.43316 NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.85332 5.47505 5.09703 4.72192 4.35017 NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.14847 5.76823 5.38949 5.01237 4.63811 4.26692 NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 6.06275 5.68347 5.30458 4.92804 4.55459 4.18407 NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.9774 5.59788 5.21981 4.84402 4.47086 NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 5.51271 5.13498 4.7597 4.38749 NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+
+
+
+ 5000
+ 5000
+
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 87.1015 86.1123 84.974 83.6538 82.1077 NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 86.8857 85.8663 84.6903 83.3238 81.7192 NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 86.6628 85.6105 84.3968 82.9801 81.3118 NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 87.3809 86.4344 85.3483 84.0925 82.6251 80.8924 NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 87.1741 86.1977 85.0768 83.7748 82.2541 NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 86.9589 85.9527 84.7944 83.4481 81.8691 NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 86.7408 85.7045 84.5049 83.1103 81.4708 NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 87.4494 86.5139 85.442 84.2014 82.7561 81.052 NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 87.244 86.2812 85.1729 83.8909 82.3929 80.6189 NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 87.0343 86.0398 84.8955 83.5691 82.0132 NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 85.7916 84.6089 83.2341 81.6205 NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+
+
+
+
+
+ 5000
+ 5000
+
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.27277 3.93031
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.19493 3.85385
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.11765 3.77876
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.38681 4.04091 3.70407
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.30823 3.96401 3.63007
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.22988 3.88788 3.55663
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.15225 3.8125 3.48381
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.4219 4.07481 3.73746 3.41201
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.34311 3.998 3.66286 3.34095
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.26482 3.92174 3.58929 3.27063
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 4.18686 3.84597 3.51627 3.20131
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 3.7708 3.44395 3.13291
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+
+
+
+ 5000
+ 5000
+
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 130.971 133.734
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 131.547 134.423
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 132.144 135.129
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 130.141 132.763 135.869
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 130.69 133.411 136.637
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 131.26 134.084 137.44
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 131.852 134.784 138.279
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 129.884 132.465 135.512 139.15
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 130.424 133.101 136.272 140.06
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 130.987 133.764 137.059 141.008
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 131.568 134.453 137.883 142.001
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN 135.169 138.743 143.037
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+ NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
+
+
+
+
+
+ 7.18024135920399
+ 106.255157702848
+
+
+ 7.22336464325122
+ 106.346078097961
+
+
+ 6.98729381785528
+ 105.765382381968
+
+
+ 6.92446640065506
+ 105.09376719949
+
+
+ 6.95791117837005
+ 105.44993173891
+
+
+ 6.99577507894955
+ 105.692478311655
+
+
+ 7.0231940118902
+ 105.844309043016
+
+
+ 7.05361232723533
+ 105.969629461909
+
+
+ 7.0871211819946
+ 106.101277617057
+
+
+ 6.93953882104395
+ 105.275888180279
+
+
+ 7.12343057570894
+ 106.21247453177
+
+
+ 7.04938612963508
+ 106.030547019406
+
+
+ 7.13282515906901
+ 106.31610702063
+
+
+
+
+
+
+
+ 0
+ 0
+
+
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DEFECT_B01.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DETFOO_B01.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_NODATA_B01.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_SATURA_B01.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_TECQUA_B01.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DEFECT_B02.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DETFOO_B02.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_NODATA_B02.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_SATURA_B02.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_TECQUA_B02.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DEFECT_B03.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DETFOO_B03.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_NODATA_B03.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_SATURA_B03.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_TECQUA_B03.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DEFECT_B04.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DETFOO_B04.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_NODATA_B04.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_SATURA_B04.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_TECQUA_B04.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DEFECT_B05.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DETFOO_B05.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_NODATA_B05.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_SATURA_B05.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_TECQUA_B05.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DEFECT_B06.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DETFOO_B06.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_NODATA_B06.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_SATURA_B06.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_TECQUA_B06.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DEFECT_B07.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DETFOO_B07.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_NODATA_B07.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_SATURA_B07.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_TECQUA_B07.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DEFECT_B08.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DETFOO_B08.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_NODATA_B08.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_SATURA_B08.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_TECQUA_B08.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DEFECT_B8A.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DETFOO_B8A.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_NODATA_B8A.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_SATURA_B8A.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_TECQUA_B8A.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DEFECT_B09.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DETFOO_B09.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_NODATA_B09.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_SATURA_B09.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_TECQUA_B09.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DEFECT_B10.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DETFOO_B10.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_NODATA_B10.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_SATURA_B10.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_TECQUA_B10.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DEFECT_B11.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DETFOO_B11.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_NODATA_B11.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_SATURA_B11.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_TECQUA_B11.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DEFECT_B12.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_DETFOO_B12.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_NODATA_B12.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_SATURA_B12.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_TECQUA_B12.gml
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/MSK_CLOUDS_B00.gml
+
+ GRANULE/L1C_T16SEB_A018656_20201001T162735/QI_DATA/T16SEB_20201001T162019_PVI.jp2
+
+
+
+""" # noqa
+
+
+mtd_l1c_old_xml = """
+
+
+
+ 2021-05-17T10:36:19.024Z
+ 2021-05-17T10:36:19.024Z
+ S2B_MSIL1C_20210517T103619_N7990_R008_T30QVE_20210929T075738.SAFE
+ Level-1C
+ S2MSI1C
+ 79.90
+ https://doi.org/10.5270/S2_-742ikth
+ 2021-09-29T07:57:38.000000Z
+ Not applicable
+ Not applicable
+
+ Sentinel-2B
+ INS-NOBS
+ 2021-05-17T10:36:19.024Z
+ 8
+ DESCENDING
+
+
+ SAFE_COMPACT
+
+
+
+
+ GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B01
+ GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B02
+ GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B03
+ GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B04
+ GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B05
+ GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B06
+ GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B07
+ GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B08
+ GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B8A
+ GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B09
+ GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B10
+ GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B11
+ GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B12
+ GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_TCI
+
+
+
+
+
+
+ NODATA
+ 0
+
+
+ SATURATED
+ 65535
+
+
+ 3
+ 2
+ 1
+
+ 10000
+
+ 0.979428313059035
+
+ 1874.3
+ 1959.75
+ 1824.93
+ 1512.79
+ 1425.78
+ 1291.13
+ 1175.57
+ 1041.28
+ 953.93
+ 817.58
+ 365.41
+ 247.08
+ 87.75
+
+
+
+
+ 60
+
+ 411
+ 456
+ 442.3
+
+
+ 1
+ 0.0062411 0.01024045 0.00402983 0.00642179 0.00552753 0.0065525 0.00409887 0.006297 0.00436742 0.00233356 0.00058162 0.00202276 0.00294328 0.00485362 0.00317041 0.00237657 0.00234612 0.00440152 0.01292397 0.05001678 0.18650104 0.45441623 0.72307877 0.83999211 0.86456334 0.87472096 0.89215296 0.91090814 0.92588017 0.93924094 0.94491826 0.95078529 0.96803023 0.99939195 1 0.97548364 0.96148351 0.94986211 0.91841452 0.87989802 0.80383677 0.59752075 0.30474132 0.10798014 0.0304465 0.00885119
+
+
+
+ 10
+
+ 456
+ 532
+ 492.3
+
+
+ 1
+ 0.05529541 0.12005068 0.25199051 0.4623617 0.65162379 0.77642171 0.82319091 0.83083116 0.83382106 0.837526 0.86304286 0.88226141 0.90486326 0.92043837 0.93602675 0.930533 0.92714067 0.9161479 0.90551724 0.89745515 0.90266694 0.90854264 0.92047913 0.92417935 0.91845025 0.90743244 0.89733983 0.88646415 0.87189983 0.85643973 0.84473414 0.84190734 0.85644111 0.87782724 0.90261174 0.91840544 0.94585847 0.96887192 0.99336135 0.99927899 1 0.99520325 0.98412711 0.97947473 0.97808297 0.97213439 0.96277794 0.95342234 0.93802376 0.92460144 0.90932642 0.90192251 0.89184298 0.88963556 0.89146958 0.89877911 0.91056869 0.92427362 0.93823555 0.95311791 0.97150808 0.98737003 0.99658514 0.99367959 0.98144714 0.95874415 0.89291635 0.73566218 0.52060373 0.3322804 0.19492197 0.11732617 0.07507304 0.05094154 0.03213016 0.01510217 0.00447984
+
+
+
+ 60
+
+ 1339
+ 1415
+ 1376.9
+
+
+ 1
+ 2.472e-05 0.00013691 0.00012558 8.901e-05 0.00012425 9.941e-05 0.00013952 0.00015816 0.00019272 0.00025959 0.00032221 0.00034719 0.0003699 0.00054874 0.00105434 0.00218813 0.00480743 0.01135252 0.02671185 0.05776022 0.11176337 0.19587518 0.31418191 0.46188068 0.62292578 0.7709851 0.88086652 0.9448941 0.97405066 0.98616696 0.99306955 0.99775441 1 0.99942348 0.99616891 0.99082045 0.9842131 0.97708513 0.97013647 0.96374366 0.95755001 0.95127438 0.94546638 0.94069659 0.93759595 0.93624612 0.93510206 0.93054472 0.91630845 0.88530334 0.83129653 0.74856466 0.63524397 0.49733159 0.34907723 0.21259735 0.10971453 0.04789269 0.01853013 0.00716776 0.0031533 0.00157017 0.00084901 0.00053006 0.00033171 0.00019447 0.00022104 0.00022646 0.00018156 0.00016063 0.00015475 0.00014734 0.00014776 0.00017405 0.00023619 0.00012007 4.337e-05
+
+
+
+ 3.97083657
+ 3.81081866
+ 4.21881648
+ 4.7545091
+ 5.16489535
+ 5.06418355
+ 4.7429031
+ 6.789537
+ 5.73223234
+ 9.32447797
+ 56.36387909
+ 37.15464608
+ 108.67071783
+ 3
+
+
+
+""" # noqa
+
+mtd_l1c_xml = """
+
+
+
+ 2021-05-17T10:36:19.024Z
+ 2021-05-17T10:36:19.024Z
+ S2B_MSIL1C_20210517T103619_N7990_R008_T30QVE_20210929T075738.SAFE
+ Level-1C
+ S2MSI1C
+ 79.90
+ https://doi.org/10.5270/S2_-742ikth
+ 2021-09-29T07:57:38.000000Z
+ Not applicable
+ Not applicable
+
+ Sentinel-2B
+ INS-NOBS
+ 2021-05-17T10:36:19.024Z
+ 8
+ DESCENDING
+
+
+ SAFE_COMPACT
+
+
+
+
+ GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B01
+ GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B02
+ GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B03
+ GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B04
+ GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B05
+ GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B06
+ GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B07
+ GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B08
+ GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B8A
+ GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B09
+ GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B10
+ GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B11
+ GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_B12
+ GRANULE/L1C_T30QVE_A021913_20210517T104617/IMG_DATA/T30QVE_20210517T103619_TCI
+
+
+
+
+
+
+ NODATA
+ 0
+
+
+ SATURATED
+ 65535
+
+
+ 3
+ 2
+ 1
+
+ 10000
+
+ -1000
+ -1000
+ -1000
+ -1000
+ -1000
+ -1000
+ -1000
+ -1000
+ -1000
+ -1000
+ -2000
+ -1000
+ -1000
+
+
+ 0.979428313059035
+
+ 1874.3
+ 1959.75
+ 1824.93
+ 1512.79
+ 1425.78
+ 1291.13
+ 1175.57
+ 1041.28
+ 953.93
+ 817.58
+ 365.41
+ 247.08
+ 87.75
+
+
+
+
+ 60
+
+ 411
+ 456
+ 442.3
+
+
+ 1
+ 0.0062411 0.01024045 0.00402983 0.00642179 0.00552753 0.0065525 0.00409887 0.006297 0.00436742 0.00233356 0.00058162 0.00202276 0.00294328 0.00485362 0.00317041 0.00237657 0.00234612 0.00440152 0.01292397 0.05001678 0.18650104 0.45441623 0.72307877 0.83999211 0.86456334 0.87472096 0.89215296 0.91090814 0.92588017 0.93924094 0.94491826 0.95078529 0.96803023 0.99939195 1 0.97548364 0.96148351 0.94986211 0.91841452 0.87989802 0.80383677 0.59752075 0.30474132 0.10798014 0.0304465 0.00885119
+
+
+
+ 10
+
+ 456
+ 532
+ 492.3
+
+
+ 1
+ 0.05529541 0.12005068 0.25199051 0.4623617 0.65162379 0.77642171 0.82319091 0.83083116 0.83382106 0.837526 0.86304286 0.88226141 0.90486326 0.92043837 0.93602675 0.930533 0.92714067 0.9161479 0.90551724 0.89745515 0.90266694 0.90854264 0.92047913 0.92417935 0.91845025 0.90743244 0.89733983 0.88646415 0.87189983 0.85643973 0.84473414 0.84190734 0.85644111 0.87782724 0.90261174 0.91840544 0.94585847 0.96887192 0.99336135 0.99927899 1 0.99520325 0.98412711 0.97947473 0.97808297 0.97213439 0.96277794 0.95342234 0.93802376 0.92460144 0.90932642 0.90192251 0.89184298 0.88963556 0.89146958 0.89877911 0.91056869 0.92427362 0.93823555 0.95311791 0.97150808 0.98737003 0.99658514 0.99367959 0.98144714 0.95874415 0.89291635 0.73566218 0.52060373 0.3322804 0.19492197 0.11732617 0.07507304 0.05094154 0.03213016 0.01510217 0.00447984
+
+
+
+ 60
+
+ 1339
+ 1415
+ 1376.9
+
+
+ 1
+ 2.472e-05 0.00013691 0.00012558 8.901e-05 0.00012425 9.941e-05 0.00013952 0.00015816 0.00019272 0.00025959 0.00032221 0.00034719 0.0003699 0.00054874 0.00105434 0.00218813 0.00480743 0.01135252 0.02671185 0.05776022 0.11176337 0.19587518 0.31418191 0.46188068 0.62292578 0.7709851 0.88086652 0.9448941 0.97405066 0.98616696 0.99306955 0.99775441 1 0.99942348 0.99616891 0.99082045 0.9842131 0.97708513 0.97013647 0.96374366 0.95755001 0.95127438 0.94546638 0.94069659 0.93759595 0.93624612 0.93510206 0.93054472 0.91630845 0.88530334 0.83129653 0.74856466 0.63524397 0.49733159 0.34907723 0.21259735 0.10971453 0.04789269 0.01853013 0.00716776 0.0031533 0.00157017 0.00084901 0.00053006 0.00033171 0.00019447 0.00022104 0.00022646 0.00018156 0.00016063 0.00015475 0.00014734 0.00014776 0.00017405 0.00023619 0.00012007 4.337e-05
+
+
+
+ 3.97083657
+ 3.81081866
+ 4.21881648
+ 4.7545091
+ 5.16489535
+ 5.06418355
+ 4.7429031
+ 6.789537
+ 5.73223234
+ 9.32447797
+ 56.36387909
+ 37.15464608
+ 108.67071783
+ 3
+
+
+
+""" # noqa
+
+
+class TestMTDXML(unittest.TestCase):
+ """Test the SAFE MTD XML file handler."""
+
+ def setUp(self):
+ """Set up the test case."""
+ from satpy.readers.msi_safe import SAFEMSIMDXML, SAFEMSITileMDXML
+ filename_info = dict(observation_time=None, dtile_number=None, fmission_id="S2A")
+ self.xml_tile_fh = SAFEMSITileMDXML(BytesIO(mtd_tile_xml), filename_info, mock.MagicMock())
+ self.old_xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_old_xml), filename_info, mock.MagicMock())
+ self.xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_xml), filename_info, mock.MagicMock(), mask_saturated=True)
+
+ def test_satellite_zenith_array(self):
+ """Test reading the satellite zenith array."""
+ info = dict(xml_tag="Viewing_Incidence_Angles_Grids", xml_item="Zenith")
+
+ expected_data = np.array([[11.7128, 11.18397802, 10.27667671, 9.35384969, 8.42850504,
+ 7.55445611, 6.65475545, 5.66517232, 4.75893757, 4.04976844],
+ [11.88606009, 10.9799713, 10.07083278, 9.14571825, 8.22607131,
+ 7.35181457, 6.44647222, 5.46144173, 4.56625547, 3.86638233],
+ [11.6823579, 10.7763071, 9.86302106, 8.93879112, 8.04005637,
+ 7.15028077, 6.21461062, 5.25780953, 4.39876601, 3.68620793],
+ [11.06724679, 10.35723901, 9.63958896, 8.73072512, 7.83680864,
+ 6.94792574, 5.9889201, 5.05445872, 4.26089708, 3.50984272],
+ [6.28411038, 6.28411038, 6.28411038, 6.28411038, 6.28411038,
+ 5.99769643, 5.62586167, 4.85165966, 4.13238314, 3.33781401],
+ [3.7708, 3.7708, 3.7708, 3.7708, 3.7708,
+ 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837],
+ [3.7708, 3.7708, 3.7708, 3.7708, 3.7708,
+ 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837],
+ [3.7708, 3.7708, 3.7708, 3.7708, 3.7708,
+ 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837],
+ [3.7708, 3.7708, 3.7708, 3.7708, 3.7708,
+ 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837],
+ [3.7708, 3.7708, 3.7708, 3.7708, 3.7708,
+ 3.7708, 3.7708, 3.7708, 3.7708, 3.24140837]])
+ res = self.xml_tile_fh.get_dataset(make_dataid(name="satellite_zenith_angle",
+ resolution=60),
+ info)[::200, ::200]
+ np.testing.assert_allclose(res, expected_data)
+
+ def test_old_xml_calibration(self):
+ """Test the calibration of older data formats (no offset)."""
+ fake_data = xr.DataArray([[[0, 1, 2, 3],
+ [4, 1000, 65534, 65535]]],
+ dims=["band", "x", "y"])
+ result = self.old_xml_fh.calibrate_to_reflectances(fake_data, "B01")
+ np.testing.assert_allclose(result, [[[np.nan, 0.01, 0.02, 0.03],
+ [0.04, 10, 655.34, np.inf]]])
+
+ def test_xml_calibration(self):
+ """Test the calibration with radiometric offset."""
+ fake_data = xr.DataArray([[[0, 1, 2, 3],
+ [4, 1000, 65534, 65535]]],
+ dims=["band", "x", "y"])
+ result = self.xml_fh.calibrate_to_reflectances(fake_data, "B01")
+ np.testing.assert_allclose(result, [[[np.nan, 0.01 - 10, 0.02 - 10, 0.03 - 10],
+ [0.04 - 10, 0, 655.34 - 10, np.inf]]])
+
+ def test_xml_calibration_unmasked_saturated(self):
+ """Test the calibration with radiometric offset but unmasked saturated pixels."""
+ from satpy.readers.msi_safe import SAFEMSIMDXML
+ filename_info = dict(observation_time=None, dtile_number=None, fmission_id="S2A")
+ self.xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_xml), filename_info, mock.MagicMock(), mask_saturated=False)
+
+ fake_data = xr.DataArray([[[0, 1, 2, 3],
+ [4, 1000, 65534, 65535]]],
+ dims=["band", "x", "y"])
+ result = self.xml_fh.calibrate_to_reflectances(fake_data, "B01")
+ np.testing.assert_allclose(result, [[[np.nan, 0.01 - 10, 0.02 - 10, 0.03 - 10],
+ [0.04 - 10, 0, 655.34 - 10, 655.35 - 10]]])
+
+ def test_xml_calibration_with_different_offset(self):
+ """Test the calibration with a different offset."""
+ fake_data = xr.DataArray([[[0, 1, 2, 3],
+ [4, 1000, 65534, 65535]]],
+ dims=["band", "x", "y"])
+ result = self.xml_fh.calibrate_to_reflectances(fake_data, "B10")
+ np.testing.assert_allclose(result, [[[np.nan, 0.01 - 20, 0.02 - 20, 0.03 - 20],
+ [0.04 - 20, -10, 655.34 - 20, np.inf]]])
+
+ def test_xml_calibration_to_radiance(self):
+ """Test the calibration with a different offset."""
+ fake_data = xr.DataArray([[[0, 1, 2, 3],
+ [4, 1000, 65534, 65535]]],
+ dims=["band", "x", "y"])
+ result = self.xml_fh.calibrate_to_radiances(fake_data, "B01")
+ expected = np.array([[[np.nan, -251.584265, -251.332429, -251.080593],
+ [-250.828757, 0., 16251.99095, np.inf]]])
+ np.testing.assert_allclose(result, expected)
+
+ def test_xml_navigation(self):
+ """Test the navigation."""
+ from pyproj import CRS
+ crs = CRS('EPSG:32616')
+
+ dsid = make_dataid(name="B01", resolution=60)
+ result = self.xml_tile_fh.get_area_def(dsid)
+
+ area_extents = (499980.0, 3590220.0, 609780.0, 3700020.0)
+ assert result.crs == crs
+ np.testing.assert_allclose(result.area_extent, area_extents)
+
+
+class TestSAFEMSIL1C:
+ """Test case for image reading (jp2k)."""
+
+ def setup(self):
+ """Set up the test."""
+ from satpy.readers.msi_safe import SAFEMSITileMDXML
+ self.filename_info = dict(observation_time=None, fmission_id="S2A", band_name="B01", dtile_number=None)
+ self.fake_data = xr.DataArray([[[0, 1], [65534, 65535]]], dims=["band", "x", "y"])
+ self.tile_mda = mock.create_autospec(SAFEMSITileMDXML)(BytesIO(mtd_tile_xml),
+ self.filename_info, mock.MagicMock())
+
+ @pytest.mark.parametrize("mask_saturated,calibration,expected",
+ [(True, "reflectance", [[np.nan, 0.01 - 10], [645.34, np.inf]]),
+ (False, "reflectance", [[np.nan, 0.01 - 10], [645.34, 645.35]]),
+ (True, "radiance", [[np.nan, -251.58426503], [16251.99095011, np.inf]])])
+ def test_calibration_and_masking(self, mask_saturated, calibration, expected):
+ """Test that saturated is masked with inf when requested and that calibration is performed."""
+ from satpy.readers.msi_safe import SAFEMSIL1C, SAFEMSIMDXML
+
+ mda = SAFEMSIMDXML(StringIO(mtd_l1c_xml), self.filename_info, mock.MagicMock(),
+ mask_saturated=mask_saturated)
+ self.jp2_fh = SAFEMSIL1C("somefile", self.filename_info, mock.MagicMock(), mda, self.tile_mda)
+
+ with mock.patch("satpy.readers.msi_safe.rioxarray.open_rasterio", return_value=self.fake_data):
+ res = self.jp2_fh.get_dataset(make_dataid(name="B01", calibration=calibration), info=dict())
+ np.testing.assert_allclose(res, expected)
diff --git a/satpy/tests/reader_tests/test_msu_gsa_l1b.py b/satpy/tests/reader_tests/test_msu_gsa_l1b.py
new file mode 100644
index 0000000000..bfb51d7873
--- /dev/null
+++ b/satpy/tests/reader_tests/test_msu_gsa_l1b.py
@@ -0,0 +1,181 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2019 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Tests for the 'msu_gsa_l1b' reader."""
+import os
+from unittest import mock
+
+import dask.array as da
+import numpy as np
+import pytest
+import xarray as xr
+
+from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler
+from satpy.tests.utils import make_dataid
+
+SOLCONST = '273.59'
+
+
+class FakeHDF5FileHandler2(FakeHDF5FileHandler):
+ """Swap-in HDF5 File Handler."""
+
+ def _get_data(self, num_scans, num_cols):
+ data = {
+ 'Data/resolution_1km/Solar_Zenith_Angle':
+ xr.DataArray(
+ da.ones((num_scans*4, num_cols*4), chunks=1024,
+ dtype=np.uint16),
+ attrs={
+ 'scale': 0.01, 'offset': 0., 'fill_value': -999.
+ },
+ dims=('x', 'y')),
+ 'Geolocation/resolution_1km/Latitude':
+ xr.DataArray(
+ da.ones((num_scans*4, num_cols*4), chunks=1024,
+ dtype=np.uint16),
+ attrs={
+ 'scale': 0.01, 'offset': 0., 'fill_value': -999.
+ },
+ dims=('x', 'y')),
+ 'Geolocation/resolution_1km/Longitude':
+ xr.DataArray(
+ da.ones((num_scans*4, num_cols*4), chunks=1024,
+ dtype=np.uint16),
+ attrs={
+ 'scale': 0.01, 'offset': 0., 'fill_value': -999.
+ },
+ dims=('x', 'y')),
+ 'Data/resolution_1km/Radiance_01':
+ xr.DataArray(
+ da.ones((num_scans*4, num_cols*4), chunks=1024,
+ dtype=np.uint16),
+ attrs={
+ 'scale': 0.01, 'offset': 0., 'fill_value': -999., 'F_solar_constant': SOLCONST
+ },
+ dims=('x', 'y')),
+ 'Data/resolution_4km/Solar_Zenith_Angle':
+ xr.DataArray(
+ da.ones((num_scans, num_cols), chunks=1024,
+ dtype=np.uint16),
+ attrs={
+ 'scale': 0.01, 'offset': 0., 'fill_value': -999.
+ },
+ dims=('x', 'y')),
+ 'Geolocation/resolution_4km/Latitude':
+ xr.DataArray(
+ da.ones((num_scans, num_cols), chunks=1024,
+ dtype=np.uint16),
+ attrs={
+ 'scale': 0.01, 'offset': 0., 'fill_value': -999.
+ },
+ dims=('x', 'y')),
+ 'Geolocation/resolution_4km/Longitude':
+ xr.DataArray(
+ da.ones((num_scans, num_cols), chunks=1024,
+ dtype=np.uint16),
+ attrs={
+ 'scale': 0.01, 'offset': 0., 'fill_value': -999.
+ },
+ dims=('x', 'y')),
+ 'Data/resolution_4km/Brightness_Temperature_09':
+ xr.DataArray(
+ da.ones((num_scans, num_cols), chunks=1024,
+ dtype=np.uint16),
+ attrs={
+ 'scale': 0.01, 'offset': 0., 'fill_value': -999.
+ },
+ dims=('x', 'y')),
+ }
+ return data
+
+ def get_test_content(self, filename, filename_info, filetype_info):
+ """Mimic reader input file content."""
+ num_scans = 20
+ num_cols = 2048
+ global_attrs = {
+ '/attr/timestamp_without_timezone': '2022-01-13T12:45:00',
+ '/attr/satellite_observation_point_height': '38500.0',
+ '/attr/satellite_observation_point_latitude': '71.25',
+ '/attr/satellite_observation_point_longitude': '21.44',
+ }
+
+ data = self._get_data(num_scans, num_cols)
+
+ test_content = {}
+ test_content.update(global_attrs)
+ test_content.update(data)
+ return test_content
+
+
+class TestMSUGSABReader:
+ """Test MSU GS/A L1B Reader."""
+
+ yaml_file = "msu_gsa_l1b.yaml"
+
+ def setup(self):
+ """Wrap HDF5 file handler with our own fake handler."""
+ from satpy._config import config_search_paths
+ from satpy.readers import load_reader
+ from satpy.readers.msu_gsa_l1b import MSUGSAFileHandler
+ self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file))
+ # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library
+ self.p = mock.patch.object(MSUGSAFileHandler, '__bases__', (FakeHDF5FileHandler2,))
+ self.fake_handler = self.p.start()
+ self.p.is_local = True
+
+ filenames = ['ArcticaM1_202201131245.h5']
+ self.reader = load_reader(self.reader_configs)
+ files = self.reader.select_files_from_pathnames(filenames)
+ self.reader.create_filehandlers(files)
+
+ def teardown(self):
+ """Stop wrapping the HDF5 file handler."""
+ self.p.stop()
+
+ def test_irbt(self):
+ """Test retrieval in brightness temperature."""
+ ds_ids = [make_dataid(name='C09', calibration='brightness_temperature')]
+ res = self.reader.load(ds_ids)
+ assert 'C09' in res
+ assert res['C09'].attrs['calibration'] == 'brightness_temperature'
+ assert res['C09'].attrs['platform_name'] == 'Arctica-M-N1'
+ assert res['C09'].attrs['sat_latitude'] == 71.25
+ assert res['C09'].attrs['sat_longitude'] == 21.44
+ assert res['C09'].attrs['sat_altitude'] == 38500.
+ assert res['C09'].attrs['resolution'] == 4000
+
+ def test_nocounts(self):
+ """Test we can't get IR or VIS data as counts."""
+ ds_ids = [make_dataid(name='C01', calibration='counts')]
+ with pytest.raises(KeyError):
+ self.reader.load(ds_ids)
+
+ ds_ids = [make_dataid(name='C09', calibration='counts')]
+ with pytest.raises(KeyError):
+ self.reader.load(ds_ids)
+
+ def test_vis_cal(self):
+ """Test that we can retrieve VIS data as both radiance and reflectance."""
+ ds_ids = [make_dataid(name='C01', calibration='radiance')]
+ res = self.reader.load(ds_ids)
+ rad = res['C01'].data
+ ds_ids = [make_dataid(name='C01', calibration='reflectance')]
+ res = self.reader.load(ds_ids)
+ refl = res['C01'].data
+
+ # Check the RAD->REFL conversion
+ np.testing.assert_allclose(100 * np.pi * rad / float(SOLCONST), refl)
diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py
index 8b15127dee..1c9f2d4d80 100644
--- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py
+++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py
@@ -17,6 +17,8 @@
# satpy. If not, see .
"""Unit tests for the FIDUCEO MVIRI FCDR Reader."""
+from __future__ import annotations
+
import os
from unittest import mock
@@ -28,12 +30,16 @@
from pyresample.utils import proj4_radius_parameters
from satpy.readers.mviri_l1b_fiduceo_nc import (
- ALTITUDE, EQUATOR_RADIUS, POLE_RADIUS, FiduceoMviriEasyFcdrFileHandler,
- FiduceoMviriFullFcdrFileHandler, DatasetWrapper
+ ALTITUDE,
+ EQUATOR_RADIUS,
+ POLE_RADIUS,
+ DatasetWrapper,
+ FiduceoMviriEasyFcdrFileHandler,
+ FiduceoMviriFullFcdrFileHandler,
)
from satpy.tests.utils import make_dataid
-attrs_exp = {
+attrs_exp: dict = {
'platform': 'MET7',
'raw_metadata': {'foo': 'bar'},
'sensor': 'MVIRI',
@@ -109,6 +115,9 @@
dtype=np.float32
),
dims=('y', 'x'),
+ coords={
+ 'acq_time': ('y', acq_time_vis_exp),
+ },
attrs=attrs_exp
)
acq_time_ir_wv_exp = [np.datetime64('1970-01-01 00:30'),
@@ -194,6 +203,9 @@
dtype=np.uint8
),
dims=('y', 'x'),
+ coords={
+ 'acq_time': ('y', acq_time_vis_exp),
+ },
attrs=attrs_exp
)
sza_vis_exp = xr.DataArray(
@@ -267,10 +279,8 @@ def fixture_fake_dataset():
'count_vis': (('y', 'x'), count_vis),
'count_wv': (('y_ir_wv', 'x_ir_wv'), count_wv),
'count_ir': (('y_ir_wv', 'x_ir_wv'), count_ir),
- 'toa_bidirectional_reflectance_vis': (
- ('y', 'x'), vis_refl_exp / 100),
- 'u_independent_toa_bidirectional_reflectance': (
- ('y', 'x'), u_vis_refl_exp / 100),
+ 'toa_bidirectional_reflectance_vis': vis_refl_exp / 100,
+ 'u_independent_toa_bidirectional_reflectance': u_vis_refl_exp / 100,
'quality_pixel_bitmask': (('y', 'x'), mask),
'solar_zenith_angle': (('y_tie', 'x_tie'), sza),
'time_ir_wv': (('y_ir_wv', 'x_ir_wv'), time),
diff --git a/satpy/tests/reader_tests/test_mws_l1b_nc.py b/satpy/tests/reader_tests/test_mws_l1b_nc.py
new file mode 100644
index 0000000000..bca79df8ad
--- /dev/null
+++ b/satpy/tests/reader_tests/test_mws_l1b_nc.py
@@ -0,0 +1,400 @@
+# Copyright (c) 2022 Pytroll Developers
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+
+"""The mws_l1b_nc reader tests.
+
+This module tests the reading of the MWS l1b netCDF format data as per version v4B issued 22 November 2021.
+
+"""
+
+import logging
+from datetime import datetime
+from unittest.mock import patch
+
+import numpy as np
+import pytest
+import xarray as xr
+from netCDF4 import Dataset
+
+from satpy.readers.mws_l1b import MWSL1BFile, get_channel_index_from_name
+
+N_CHANNELS = 24
+N_CHANNELS_OS = 2
+N_SCANS = 2637
+N_FOVS = 95
+N_FOVS_CAL = 5
+N_PRTS = 6
+
+
+@pytest.fixture
+def reader(fake_file):
+ """Return reader of mws level-1b data."""
+ return MWSL1BFile(
+ filename=fake_file,
+ filename_info={
+ 'start_time': (
+ datetime.fromisoformat('2000-01-01T01:00:00')
+ ),
+ 'end_time': (
+ datetime.fromisoformat('2000-01-01T02:00:00')
+ ),
+ 'creation_time': (
+ datetime.fromisoformat('2000-01-01T03:00:00')
+ ),
+ },
+ filetype_info={
+ 'longitude': 'data/navigation_data/mws_lon',
+ 'latitude': 'data/navigation_data/mws_lat',
+ 'solar_azimuth': 'data/navigation/mws_solar_azimuth_angle',
+ 'solar_zenith': 'data/navigation/mws_solar_zenith_angle',
+ 'satellite_azimuth': 'data/navigation/mws_satellite_azimuth_angle',
+ 'satellite_zenith': 'data/navigation/mws_satellite_zenith_angle',
+ }
+ )
+
+
+@pytest.fixture
+def fake_file(tmp_path):
+ """Return file path to level-1b file."""
+ file_path = tmp_path / 'test_file_mws_l1b.nc'
+ writer = MWSL1BFakeFileWriter(file_path)
+ writer.write()
+ yield file_path
+
+
+class MWSL1BFakeFileWriter:
+ """Writer class of fake mws level-1b data."""
+
+ def __init__(self, file_path):
+ """Init."""
+ self.file_path = file_path
+
+ def write(self):
+ """Write fake data to file."""
+ with Dataset(self.file_path, 'w') as dataset:
+ self._write_attributes(dataset)
+ self._write_status_group(dataset)
+ self._write_quality_group(dataset)
+ data_group = dataset.createGroup('data')
+ self._create_scan_dimensions(data_group)
+ self._write_navigation_data_group(data_group)
+ self._write_calibration_data_group(data_group)
+ self._write_measurement_data_group(data_group)
+
+ @staticmethod
+ def _write_attributes(dataset):
+ """Write attributes."""
+ dataset.sensing_start_time_utc = "2000-01-02 03:04:05.000"
+ dataset.sensing_end_time_utc = "2000-01-02 04:05:06.000"
+ dataset.instrument = "MWS"
+ dataset.spacecraft = "SGA1"
+
+ @staticmethod
+ def _write_status_group(dataset):
+ """Write the status group."""
+ group = dataset.createGroup('/status/satellite')
+ subsat_latitude_start = group.createVariable(
+ 'subsat_latitude_start', "f4"
+ )
+ subsat_latitude_start[:] = 52.19
+
+ subsat_longitude_start = group.createVariable(
+ 'subsat_longitude_start', "f4"
+ )
+ subsat_longitude_start[:] = 23.26
+
+ subsat_latitude_end = group.createVariable(
+ 'subsat_latitude_end', "f4"
+ )
+ subsat_latitude_end[:] = 60.00
+
+ subsat_longitude_end = group.createVariable(
+ 'subsat_longitude_end', "f4"
+ )
+ subsat_longitude_end[:] = 2.47
+
+ @staticmethod
+ def _write_quality_group(dataset):
+ """Write the quality group."""
+ group = dataset.createGroup('quality')
+ group.overall_quality_flag = 0
+ duration_of_product = group.createVariable(
+ 'duration_of_product', "f4"
+ )
+ duration_of_product[:] = 5944.
+
+ @staticmethod
+ def _write_navigation_data_group(dataset):
+ """Write the navigation data group."""
+ group = dataset.createGroup('navigation')
+ dimensions = ('n_scans', 'n_fovs')
+ shape = (N_SCANS, N_FOVS)
+ longitude = group.createVariable(
+ 'mws_lon',
+ np.int32,
+ dimensions=dimensions,
+ )
+ longitude.scale_factor = 1.0E-4
+ longitude.add_offset = 0.0
+ longitude.missing_value = np.array((-2147483648), np.int32)
+ longitude[:] = 35.7535 * np.ones(shape)
+
+ latitude = group.createVariable(
+ 'mws_lat',
+ np.float32,
+ dimensions=dimensions,
+ )
+ latitude[:] = 2. * np.ones(shape)
+
+ azimuth = group.createVariable(
+ 'mws_solar_azimuth_angle',
+ np.float32,
+ dimensions=dimensions,
+ )
+ azimuth[:] = 179. * np.ones(shape)
+
+ @staticmethod
+ def _create_scan_dimensions(dataset):
+ """Create the scan/fovs dimensions."""
+ dataset.createDimension('n_channels', N_CHANNELS)
+ dataset.createDimension('n_channels_os', N_CHANNELS_OS)
+ dataset.createDimension('n_scans', N_SCANS)
+ dataset.createDimension('n_fovs', N_FOVS)
+ dataset.createDimension('n_prts', N_PRTS)
+ dataset.createDimension('n_fovs_cal', N_FOVS_CAL)
+
+ @staticmethod
+ def _write_calibration_data_group(dataset):
+ """Write the calibration data group."""
+ group = dataset.createGroup('calibration')
+ toa_bt = group.createVariable(
+ 'mws_toa_brightness_temperature', np.float32, dimensions=('n_scans', 'n_fovs', 'n_channels',)
+ )
+ toa_bt.scale_factor = 1.0 # 1.0E-8
+ toa_bt.add_offset = 0.0
+ toa_bt.missing_value = -2147483648
+ toa_bt[:] = 240.0 * np.ones((N_SCANS, N_FOVS, N_CHANNELS))
+
+ @staticmethod
+ def _write_measurement_data_group(dataset):
+ """Write the measurement data group."""
+ group = dataset.createGroup('measurement')
+ counts = group.createVariable(
+ 'mws_earth_view_counts', np.int32, dimensions=('n_scans', 'n_fovs', 'n_channels',)
+ )
+ counts[:] = 24100 * np.ones((N_SCANS, N_FOVS, N_CHANNELS), dtype=np.int32)
+
+
+class TestMwsL1bNCFileHandler:
+ """Test the MWSL1BFile reader."""
+
+ def test_start_time(self, reader):
+ """Test acquiring the start time."""
+ assert reader.start_time == datetime(2000, 1, 2, 3, 4, 5)
+
+ def test_end_time(self, reader):
+ """Test acquiring the end time."""
+ assert reader.end_time == datetime(2000, 1, 2, 4, 5, 6)
+
+ def test_sensor(self, reader):
+ """Test sensor."""
+ assert reader.sensor == "MWS"
+
+ def test_platform_name(self, reader):
+ """Test getting the platform name."""
+ assert reader.platform_name == "Metop-SG-A1"
+
+ def test_sub_satellite_longitude_start(self, reader):
+ """Test getting the longitude of sub-satellite point at start of the product."""
+ np.testing.assert_allclose(reader.sub_satellite_longitude_start, 23.26)
+
+ def test_sub_satellite_latitude_start(self, reader):
+ """Test getting the latitude of sub-satellite point at start of the product."""
+ np.testing.assert_allclose(reader.sub_satellite_latitude_start, 52.19)
+
+ def test_sub_satellite_longitude_end(self, reader):
+ """Test getting the longitude of sub-satellite point at end of the product."""
+ np.testing.assert_allclose(reader.sub_satellite_longitude_end, 2.47)
+
+ def test_sub_satellite_latitude_end(self, reader):
+ """Test getting the latitude of sub-satellite point at end of the product."""
+ np.testing.assert_allclose(reader.sub_satellite_latitude_end, 60.0)
+
+ def test_get_dataset_get_channeldata_counts(self, reader):
+ """Test getting channel data."""
+ dataset_id = {'name': '1', 'units': None,
+ 'calibration': 'counts'}
+ dataset_info = {'file_key': 'data/measurement/mws_earth_view_counts'}
+
+ dataset = reader.get_dataset(dataset_id, dataset_info)
+ expected_bt = np.array([[24100, 24100],
+ [24100, 24100]], dtype=np.int32)
+ count = dataset[10:12, 12:14].data.compute()
+ np.testing.assert_allclose(count, expected_bt)
+
+ def test_get_dataset_get_channeldata_bts(self, reader):
+ """Test getting channel data."""
+ dataset_id = {'name': '1', 'units': 'K',
+ 'calibration': 'brightness_temperature'}
+ dataset_info = {'file_key': 'data/calibration/mws_toa_brightness_temperature'}
+
+ dataset = reader.get_dataset(dataset_id, dataset_info)
+
+ expected_bt = np.array([[240., 240., 240., 240., 240.],
+ [240., 240., 240., 240., 240.],
+ [240., 240., 240., 240., 240.],
+ [240., 240., 240., 240., 240.],
+ [240., 240., 240., 240., 240.]], dtype=np.float32)
+
+ toa_bt = dataset[0:5, 0:5].data.compute()
+ np.testing.assert_allclose(toa_bt, expected_bt)
+
+ def test_get_dataset_return_none_if_data_not_exist(self, reader):
+ """Test get dataset return none if data does not exist."""
+ dataset_id = {'name': 'unknown'}
+ dataset_info = {'file_key': 'non/existing/data'}
+ dataset = reader.get_dataset(dataset_id, dataset_info)
+ assert dataset is None
+
+ def test_get_navigation_longitudes(self, caplog, fake_file, reader):
+ """Test get the longitudes."""
+ dataset_id = {'name': 'mws_lon'}
+ dataset_info = {'file_key': 'data/navigation_data/mws_lon'}
+
+ dataset = reader.get_dataset(dataset_id, dataset_info)
+
+ expected_lons = np.array([[35.753498, 35.753498, 35.753498, 35.753498, 35.753498],
+ [35.753498, 35.753498, 35.753498, 35.753498, 35.753498],
+ [35.753498, 35.753498, 35.753498, 35.753498, 35.753498],
+ [35.753498, 35.753498, 35.753498, 35.753498, 35.753498],
+ [35.753498, 35.753498, 35.753498, 35.753498, 35.753498]], dtype=np.float32)
+
+ longitudes = dataset[0:5, 0:5].data.compute()
+ np.testing.assert_allclose(longitudes, expected_lons)
+
+ def test_get_dataset_logs_debug_message(self, caplog, fake_file, reader):
+ """Test get dataset return none if data does not exist."""
+ dataset_id = {'name': 'mws_lon'}
+ dataset_info = {'file_key': 'data/navigation_data/mws_lon'}
+
+ with caplog.at_level(logging.DEBUG):
+ _ = reader.get_dataset(dataset_id, dataset_info)
+
+ log_output = "Reading mws_lon from {filename}".format(filename=str(fake_file))
+ assert log_output in caplog.text
+
+ def test_get_dataset_aux_data_not_supported(self, reader):
+ """Test get auxillary dataset not supported."""
+ dataset_id = {'name': 'scantime_utc'}
+ dataset_info = {'file_key': 'non/existing'}
+
+ with pytest.raises(NotImplementedError) as exec_info:
+ _ = reader.get_dataset(dataset_id, dataset_info)
+
+ assert str(exec_info.value) == "Dataset 'scantime_utc' not supported!"
+
+ def test_get_dataset_aux_data_expected_data_missing(self, caplog, reader):
+ """Test get auxillary dataset which is not present but supposed to be in file."""
+ dataset_id = {'name': 'surface_type'}
+ dataset_info = {'file_key': 'non/existing'}
+
+ with caplog.at_level(logging.ERROR):
+ with pytest.raises(KeyError) as exec_info:
+ _ = reader.get_dataset(dataset_id, dataset_info)
+
+ assert str(exec_info.value) == "'data/navigation/mws_surface_type'"
+
+ log_output = ("Could not find key data/navigation/mws_surface_type in NetCDF file," +
+ " no valid Dataset created")
+ assert log_output in caplog.text
+
+ @pytest.mark.parametrize('dims', (
+ ('n_scans', 'n_fovs'),
+ ('x', 'y'),
+ ))
+ def test_standardize_dims(self, reader, dims):
+ """Test standardize dims."""
+ variable = xr.DataArray(
+ np.arange(6).reshape(2, 3),
+ dims=dims,
+ )
+ standardized = reader._standardize_dims(variable)
+ assert standardized.dims == ('y', 'x')
+
+ @staticmethod
+ def test_drop_coords(reader):
+ """Test drop coordinates."""
+ coords = "dummy"
+ data = xr.DataArray(
+ np.ones(10),
+ dims=('y'),
+ coords={coords: 0},
+ )
+ assert coords in data.coords
+ data = reader._drop_coords(data)
+ assert coords not in data.coords
+
+ def test_get_global_attributes(self, reader):
+ """Test get global attributes."""
+ attributes = reader._get_global_attributes()
+ assert attributes == {
+ 'filename': reader.filename,
+ 'start_time': datetime(2000, 1, 2, 3, 4, 5),
+ 'end_time': datetime(2000, 1, 2, 4, 5, 6),
+ 'spacecraft_name': 'Metop-SG-A1',
+ 'sensor': 'MWS',
+ 'filename_start_time': datetime(2000, 1, 1, 1, 0),
+ 'filename_end_time': datetime(2000, 1, 1, 2, 0),
+ 'platform_name': 'Metop-SG-A1',
+ 'quality_group': {
+ 'duration_of_product': np.array(5944., dtype=np.float32),
+ 'overall_quality_flag': 0,
+ }
+ }
+
+ @patch(
+ 'satpy.readers.mws_l1b.MWSL1BFile._get_global_attributes',
+ return_value={"mocked_global_attributes": True},
+ )
+ def test_manage_attributes(self, mock, reader):
+ """Test manage attributes."""
+ variable = xr.DataArray(
+ np.ones(N_SCANS),
+ attrs={"season": "summer"},
+ )
+ dataset_info = {'name': '1', 'units': 'K'}
+ variable = reader._manage_attributes(variable, dataset_info)
+ assert variable.attrs == {
+ 'season': 'summer',
+ 'units': 'K',
+ 'name': '1',
+ 'mocked_global_attributes': True,
+ }
+
+
+@pytest.mark.parametrize("name, index", [('1', 0), ('2', 1), ('24', 23)])
+def test_get_channel_index_from_name(name, index):
+ """Test getting the MWS channel index from the channel name."""
+ ch_idx = get_channel_index_from_name(name)
+ assert ch_idx == index
+
+
+def test_get_channel_index_from_name_throw_exception():
+ """Test that an excpetion is thrown when getting the MWS channel index from an unsupported name."""
+ with pytest.raises(Exception) as excinfo:
+ _ = get_channel_index_from_name('channel 1')
+
+ assert str(excinfo.value) == "Channel name 'channel 1' not supported"
+ assert excinfo.type == AttributeError
diff --git a/satpy/tests/reader_tests/test_netcdf_utils.py b/satpy/tests/reader_tests/test_netcdf_utils.py
index 317522d8ef..9c1af32830 100644
--- a/satpy/tests/reader_tests/test_netcdf_utils.py
+++ b/satpy/tests/reader_tests/test_netcdf_utils.py
@@ -19,13 +19,14 @@
import os
import unittest
+
import numpy as np
try:
from satpy.readers.netcdf_utils import NetCDF4FileHandler
except ImportError:
# fake the import so we can at least run the tests in this file
- NetCDF4FileHandler = object
+ NetCDF4FileHandler = object # type: ignore
class FakeNetCDF4FileHandler(NetCDF4FileHandler):
@@ -121,8 +122,9 @@ def tearDown(self):
def test_all_basic(self):
"""Test everything about the NetCDF4 class."""
- from satpy.readers.netcdf_utils import NetCDF4FileHandler
import xarray as xr
+
+ from satpy.readers.netcdf_utils import NetCDF4FileHandler
file_handler = NetCDF4FileHandler('test.nc', {}, {})
self.assertEqual(file_handler['/dimension/rows'], 10)
@@ -145,6 +147,14 @@ def test_all_basic(self):
self.assertEqual(file_handler['/attr/test_attr_int'], 0)
self.assertEqual(file_handler['/attr/test_attr_float'], 1.2)
+ global_attrs = {
+ 'test_attr_str': 'test_string',
+ 'test_attr_str_arr': 'test_string2',
+ 'test_attr_int': 0,
+ 'test_attr_float': 1.2
+ }
+ self.assertEqual(file_handler['/attrs'], global_attrs)
+
self.assertIsInstance(file_handler.get('ds2_f')[:], xr.DataArray)
self.assertIsNone(file_handler.get('fake_ds'))
self.assertEqual(file_handler.get('fake_ds', 'test'), 'test')
diff --git a/satpy/tests/reader_tests/test_nucaps.py b/satpy/tests/reader_tests/test_nucaps.py
index 74072c69b4..2f7b0c97a5 100644
--- a/satpy/tests/reader_tests/test_nucaps.py
+++ b/satpy/tests/reader_tests/test_nucaps.py
@@ -17,15 +17,16 @@
# satpy. If not, see .
"""Module for testing the satpy.readers.nucaps module."""
+import datetime
import os
import unittest
-import datetime
from unittest import mock
+
import numpy as np
+
from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler
from satpy.tests.utils import convert_file_content_to_data_array
-
DEFAULT_FILE_DTYPE = np.float32
DEFAULT_FILE_SHAPE = (120,)
DEFAULT_PRES_FILE_SHAPE = (120, 100,)
@@ -213,7 +214,7 @@ def test_load_nonpressure_based(self):
# self.assertNotEqual(v.info['resolution'], 0)
# self.assertEqual(v.info['units'], 'degrees')
self.assertEqual(v.ndim, 1)
- self.assertEqual(v.attrs['sensor'], ['CrIS', 'ATMS', 'VIIRS'])
+ self.assertEqual(v.attrs['sensor'], set(['cris', 'atms', 'viirs']))
self.assertEqual(type(v.attrs['start_time']), datetime.datetime)
self.assertEqual(type(v.attrs['end_time']), datetime.datetime)
@@ -249,6 +250,22 @@ def test_load_pressure_based(self):
for v in datasets.values():
# self.assertNotEqual(v.info['resolution'], 0)
self.assertEqual(v.ndim, 2)
+ if np.issubdtype(v.dtype, np.floating):
+ assert '_FillValue' not in v.attrs
+
+ def test_load_multiple_files_pressure(self):
+ """Test loading Temperature from multiple input files."""
+ from satpy.readers import load_reader
+ r = load_reader(self.reader_configs)
+ loadables = r.select_files_from_pathnames([
+ 'NUCAPS-EDR_v1r0_npp_s201603011158009_e201603011158307_c201603011222270.nc',
+ 'NUCAPS-EDR_v1r0_npp_s201603011159009_e201603011159307_c201603011222270.nc',
+ ])
+ r.create_filehandlers(loadables)
+ datasets = r.load(r.pressure_dataset_names['Temperature'], pressure_levels=True)
+ self.assertEqual(len(datasets), 100)
+ for v in datasets.values():
+ self.assertEqual(v.ndim, 1)
def test_load_individual_pressure_levels_true(self):
"""Test loading Temperature with individual pressure datasets."""
@@ -399,7 +416,7 @@ def test_load_nonpressure_based(self):
self.assertEqual(len(datasets), 5)
for v in datasets.values():
self.assertEqual(v.ndim, 1)
- self.assertEqual(v.attrs['sensor'], ['CrIS', 'ATMS', 'VIIRS'])
+ self.assertEqual(v.attrs['sensor'], set(['cris', 'atms', 'viirs']))
self.assertEqual(type(v.attrs['start_time']), datetime.datetime)
self.assertEqual(type(v.attrs['end_time']), datetime.datetime)
diff --git a/satpy/tests/reader_tests/test_nwcsaf_msg.py b/satpy/tests/reader_tests/test_nwcsaf_msg.py
index fed0715d1d..5e3053058e 100644
--- a/satpy/tests/reader_tests/test_nwcsaf_msg.py
+++ b/satpy/tests/reader_tests/test_nwcsaf_msg.py
@@ -16,13 +16,16 @@
# satpy. If not, see .
"""Unittests for NWC SAF MSG (2013) reader."""
-import unittest
-import numpy as np
-import tempfile
import os
-import h5py
+import tempfile
+import unittest
from collections import OrderedDict
+import h5py
+import numpy as np
+
+from satpy.tests.reader_tests.utils import fill_h5
+
CTYPE_TEST_ARRAY = (np.random.rand(1856, 3712) * 255).astype(np.uint8)
CTYPE_TEST_FRAME = (np.arange(100).reshape(10, 10) / 100. * 20).astype(np.uint8)
CTYPE_TEST_ARRAY[1000:1010, 1000:1010] = CTYPE_TEST_FRAME
@@ -449,32 +452,19 @@ def setUp(self):
"SAFNWC_MSG3_CTTH_201611090800_MSG-N_______.PLAX.CTTH.0.h5",
)
- def fill_h5(root, stuff):
- for key, val in stuff.items():
- if key in ["value", "attrs"]:
- continue
- if "value" in val:
- root[key] = val["value"]
- else:
- grp = root.create_group(key)
- fill_h5(grp, stuff[key])
- if "attrs" in val:
- for attrs, val in val["attrs"].items():
- if isinstance(val, str) and val.startswith(
- ""
- ):
- root[key].attrs[attrs] = root[val[24:]].ref
- else:
- root[key].attrs[attrs] = val
+ def cut_h5_object_ref(root, attr):
+ if isinstance(attr, str) and attr.startswith(""):
+ return root[attr[24:]].ref
+ return attr
h5f = h5py.File(self.filename_ct, mode="w")
- fill_h5(h5f, fake_ct)
+ fill_h5(h5f, fake_ct, attr_processor=cut_h5_object_ref)
for attr, val in fake_ct["attrs"].items():
h5f.attrs[attr] = val
h5f.close()
h5f = h5py.File(self.filename_ctth, mode="w")
- fill_h5(h5f, fake_ctth)
+ fill_h5(h5f, fake_ctth, attr_processor=cut_h5_object_ref)
for attr, val in fake_ctth["attrs"].items():
h5f.attrs[attr] = val
h5f.close()
diff --git a/satpy/tests/reader_tests/test_nwcsaf_nc.py b/satpy/tests/reader_tests/test_nwcsaf_nc.py
index 7d4ba57a70..baa1090cad 100644
--- a/satpy/tests/reader_tests/test_nwcsaf_nc.py
+++ b/satpy/tests/reader_tests/test_nwcsaf_nc.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-# Copyright (c) 2018, 2020 Satpy developers
+# Copyright (c) 2018-2022 Satpy developers
#
# This file is part of satpy.
#
@@ -19,6 +19,10 @@
import unittest
from unittest import mock
+import numpy as np
+import pytest
+import xarray as xr
+
PROJ_KM = {'gdal_projection': '+proj=geos +a=6378.137000 +b=6356.752300 +lon_0=0.000000 +h=35785.863000',
'gdal_xgeo_up_left': -5569500.0,
'gdal_ygeo_up_left': 5437500.0,
@@ -35,55 +39,59 @@ class TestNcNWCSAF(unittest.TestCase):
"""Test the NcNWCSAF reader."""
@mock.patch('satpy.readers.nwcsaf_nc.unzip_file')
- @mock.patch('satpy.readers.nwcsaf_nc.xr')
- def setUp(self, xr_, unzip):
+ @mock.patch('satpy.readers.nwcsaf_nc.xr.open_dataset')
+ def setUp(self, xr_open_dataset, unzip):
"""Set up the test case."""
from satpy.readers.nwcsaf_nc import NcNWCSAF
- xr_.return_value = mock.Mock(attrs={})
+ xr_open_dataset.return_value = xr.Dataset({"nx": xr.DataArray(), "ny": xr.DataArray()},
+ attrs={"source": "bla",
+ "satellite_identifier": "blu"})
+ self.fake_dataset = xr_open_dataset.return_value
unzip.return_value = ''
- self.scn = NcNWCSAF('filename', {}, {})
+ self.filehandler_class = NcNWCSAF
+ self.fh = self.filehandler_class('filename', {}, {})
def test_sensor_name(self):
"""Test that the correct sensor name is being set."""
- self.scn.set_platform_and_sensor(platform_name='Metop-B')
- self.assertEqual(self.scn.sensor, set(['avhrr-3']))
- self.assertEqual(self.scn.sensor_names, set(['avhrr-3']))
+ self.fh.set_platform_and_sensor(platform_name='Metop-B')
+ self.assertEqual(self.fh.sensor, set(['avhrr-3']))
+ self.assertEqual(self.fh.sensor_names, set(['avhrr-3']))
- self.scn.set_platform_and_sensor(platform_name='NOAA-20')
- self.assertEqual(self.scn.sensor, set(['viirs']))
- self.assertEqual(self.scn.sensor_names, set(['viirs']))
+ self.fh.set_platform_and_sensor(platform_name='NOAA-20')
+ self.assertEqual(self.fh.sensor, set(['viirs']))
+ self.assertEqual(self.fh.sensor_names, set(['viirs']))
- self.scn.set_platform_and_sensor(platform_name='Himawari-8')
- self.assertEqual(self.scn.sensor, set(['ahi']))
- self.assertEqual(self.scn.sensor_names, set(['ahi']))
+ self.fh.set_platform_and_sensor(platform_name='Himawari-8')
+ self.assertEqual(self.fh.sensor, set(['ahi']))
+ self.assertEqual(self.fh.sensor_names, set(['ahi']))
- self.scn.set_platform_and_sensor(sat_id='GOES16')
- self.assertEqual(self.scn.sensor, set(['abi']))
- self.assertEqual(self.scn.sensor_names, set(['abi']))
+ self.fh.set_platform_and_sensor(sat_id='GOES16')
+ self.assertEqual(self.fh.sensor, set(['abi']))
+ self.assertEqual(self.fh.sensor_names, set(['abi']))
- self.scn.set_platform_and_sensor(platform_name='GOES-17')
- self.assertEqual(self.scn.sensor, set(['abi']))
- self.assertEqual(self.scn.sensor_names, set(['abi']))
+ self.fh.set_platform_and_sensor(platform_name='GOES-17')
+ self.assertEqual(self.fh.sensor, set(['abi']))
+ self.assertEqual(self.fh.sensor_names, set(['abi']))
- self.scn.set_platform_and_sensor(sat_id='MSG4')
- self.assertEqual(self.scn.sensor, set(['seviri']))
+ self.fh.set_platform_and_sensor(sat_id='MSG4')
+ self.assertEqual(self.fh.sensor, set(['seviri']))
- self.scn.set_platform_and_sensor(platform_name='Meteosat-11')
- self.assertEqual(self.scn.sensor, set(['seviri']))
- self.assertEqual(self.scn.sensor_names, set(['seviri']))
+ self.fh.set_platform_and_sensor(platform_name='Meteosat-11')
+ self.assertEqual(self.fh.sensor, set(['seviri']))
+ self.assertEqual(self.fh.sensor_names, set(['seviri']))
def test_get_area_def(self):
"""Test that get_area_def() returns proper area."""
dsid = {'name': 'foo'}
- self.scn.nc[dsid['name']].shape = (5, 10)
+ self.fh.nc[dsid['name']] = xr.DataArray(np.zeros((5, 10)))
# a, b and h in kilometers
- self.scn.nc.attrs = PROJ_KM
- _check_area_def(self.scn.get_area_def(dsid))
+ self.fh.nc.attrs = PROJ_KM
+ _check_area_def(self.fh.get_area_def(dsid))
# a, b and h in meters
- self.scn.nc.attrs = PROJ
- _check_area_def(self.scn.get_area_def(dsid))
+ self.fh.nc.attrs = PROJ
+ _check_area_def(self.fh.get_area_def(dsid))
def test_scale_dataset_attr_removal(self):
"""Test the scaling of the dataset and removal of obsolete attributes."""
@@ -93,7 +101,7 @@ def test_scale_dataset_attr_removal(self):
attrs = {'scale_factor': np.array(10),
'add_offset': np.array(20)}
var = xr.DataArray([1, 2, 3], attrs=attrs)
- var = self.scn.scale_dataset('dummy', var, 'dummy')
+ var = self.fh.scale_dataset(var, 'dummy')
np.testing.assert_allclose(var, [30, 40, 50])
self.assertNotIn('scale_factor', var.attrs)
self.assertNotIn('add_offset', var.attrs)
@@ -106,7 +114,7 @@ def test_scale_dataset_floating(self):
'add_offset': np.array(2.5),
'_FillValue': 1}
var = xr.DataArray([1, 2, 3], attrs=attrs)
- var = self.scn.scale_dataset('dummy', var, 'dummy')
+ var = self.fh.scale_dataset(var, 'dummy')
np.testing.assert_allclose(var, [np.nan, 5.5, 7])
self.assertNotIn('scale_factor', var.attrs)
self.assertNotIn('add_offset', var.attrs)
@@ -115,7 +123,7 @@ def test_scale_dataset_floating(self):
'add_offset': np.array(2.5),
'valid_min': 1.1}
var = xr.DataArray([1, 2, 3], attrs=attrs)
- var = self.scn.scale_dataset('dummy', var, 'dummy')
+ var = self.fh.scale_dataset(var, 'dummy')
np.testing.assert_allclose(var, [np.nan, 5.5, 7])
self.assertNotIn('scale_factor', var.attrs)
self.assertNotIn('add_offset', var.attrs)
@@ -124,7 +132,7 @@ def test_scale_dataset_floating(self):
'add_offset': np.array(2.5),
'valid_max': 2.1}
var = xr.DataArray([1, 2, 3], attrs=attrs)
- var = self.scn.scale_dataset('dummy', var, 'dummy')
+ var = self.fh.scale_dataset(var, 'dummy')
np.testing.assert_allclose(var, [4, 5.5, np.nan])
self.assertNotIn('scale_factor', var.attrs)
self.assertNotIn('add_offset', var.attrs)
@@ -133,7 +141,7 @@ def test_scale_dataset_floating(self):
'add_offset': np.array(2.5),
'valid_range': (1.1, 2.1)}
var = xr.DataArray([1, 2, 3], attrs=attrs)
- var = self.scn.scale_dataset('dummy', var, 'dummy')
+ var = self.fh.scale_dataset(var, 'dummy')
np.testing.assert_allclose(var, [np.nan, 5.5, np.nan])
self.assertNotIn('scale_factor', var.attrs)
self.assertNotIn('add_offset', var.attrs)
@@ -143,13 +151,170 @@ def test_scale_dataset_floating(self):
'add_offset': np.array(-2000.),
'valid_range': (0., 27000.)}
var = xr.DataArray([1, 2, 3], attrs=attrs)
- var = self.scn.scale_dataset('dummy', var, 'dummy')
+ var = self.fh.scale_dataset(var, 'dummy')
np.testing.assert_allclose(var, [-1999., -1998., -1997.])
self.assertNotIn('scale_factor', var.attrs)
self.assertNotIn('add_offset', var.attrs)
self.assertEqual(var.attrs['valid_range'][0], -2000.)
self.assertEqual(var.attrs['valid_range'][1], 25000.)
+ def test_get_dataset_scales_and_offsets(self):
+ """Test that get_dataset() returns scaled and offseted data."""
+ dsid = {'name': 'cpp_cot'}
+ scale = 4
+ offset = 8
+ the_array = xr.DataArray(np.ones((5, 10)), attrs={"scale_factor": np.array(scale, dtype=float),
+ "add_offset": np.array(offset, dtype=float)})
+ self.fh.nc[dsid['name']] = the_array
+
+ info = dict(name="cpp_cot",
+ file_type="nc_nwcsaf_cpp")
+
+ res = self.fh.get_dataset(dsid, info)
+ np.testing.assert_allclose(res, the_array * scale + offset)
+
+ def test_get_dataset_scales_and_offsets_palette_meanings_using_other_dataset(self):
+ """Test that get_dataset() returns scaled palette_meanings while another dataset as scaling source."""
+ dsid = {'name': 'cpp_cot'}
+ scale = 4
+ offset = 8
+ array = xr.DataArray(np.ones((5, 3)), attrs={"palette_meanings": "1 2 3 4",
+ "fill_value_color": (0, 0, 0)})
+ self.fh.nc[dsid['name']] = array
+
+ so_array = xr.DataArray(np.ones((10, 10)),
+ attrs={"scale_factor": np.array(scale, dtype=float),
+ "add_offset": np.array(offset, dtype=float)},
+ dims=["lines", "colors"])
+
+ info = dict(name="cpp_cot",
+ file_type="nc_nwcsaf_cpp",
+ scale_offset_dataset="scaleoffset")
+ self.fh.nc["scaleoffset"] = so_array
+
+ res = self.fh.get_dataset(dsid, info)
+ np.testing.assert_allclose(res.attrs["palette_meanings"], np.arange(5) * scale + offset)
+
+ def test_get_dataset_raises_when_dataset_missing(self):
+ """Test that get_dataset() raises an error when the requested dataset is missing."""
+ dsid = {'name': 'cpp_cot'}
+ info = dict(name="cpp_cot",
+ file_type="nc_nwcsaf_cpp")
+ with pytest.raises(KeyError):
+ self.fh.get_dataset(dsid, info)
+
+ def test_get_dataset_uses_file_key_if_present(self):
+ """Test that get_dataset() uses a file_key if present."""
+ dsid_cpp = {'name': 'cpp_cot'}
+ dsid_cmic = {'name': 'cmic_cot'}
+ scale = 4
+ offset = 8
+ the_array = xr.DataArray(np.ones((5, 10)), attrs={"scale_factor": np.array(scale, dtype=float),
+ "add_offset": np.array(offset, dtype=float)})
+ file_key = "cmic_cot"
+ self.fh.nc[file_key] = the_array
+
+ info_cpp = dict(name="cpp_cot",
+ file_key=file_key,
+ file_type="nc_nwcsaf_cpp")
+
+ res_cpp = self.fh.get_dataset(dsid_cpp, info_cpp)
+
+ info_cmic = dict(name="cmic_cot",
+ file_type="nc_nwcsaf_cpp")
+
+ res_cmic = self.fh.get_dataset(dsid_cmic, info_cmic)
+ np.testing.assert_allclose(res_cpp, res_cmic)
+
+ def test_get_dataset_can_handle_file_key_list(self):
+ """Test that get_dataset() can handle a list of file_keys."""
+ dsid_cpp = {'name': 'cpp_reff'}
+ dsid_cmic = {'name': 'cmic_cre'}
+ scale = 4
+ offset = 8
+ data_array = xr.DataArray(np.ones((5, 10)), attrs={"scale_factor": np.array(scale, dtype=float),
+ "add_offset": np.array(offset, dtype=float)})
+
+ self.fh.nc["cpp_reff"] = data_array
+ self.fh.nc["cmic_cre"] = data_array
+ self.fh.file_key_prefix = 'cpp_'
+
+ info_cpp = dict(name="cmic_reff",
+ file_key=['reff', 'cre'],
+ file_type="nc_nwcsaf_cpp")
+
+ res_cpp = self.fh.get_dataset(dsid_cpp, info_cpp)
+
+ info_cmic = dict(name="cmic_reff",
+ file_key=['reff', 'cre'],
+ file_type="nc_nwcsaf_cpp")
+
+ res_cmic = self.fh.get_dataset(dsid_cmic, info_cmic)
+ np.testing.assert_allclose(res_cpp, res_cmic)
+
+
+class TestNcNWCSAFFileKeyPrefix(unittest.TestCase):
+ """Test the NcNWCSAF reader when using a file key prefix."""
+
+ @mock.patch('satpy.readers.nwcsaf_nc.unzip_file')
+ @mock.patch('satpy.readers.nwcsaf_nc.xr.open_dataset')
+ def setUp(self, xr_open_dataset, unzip):
+ """Set up the test case."""
+ from satpy.readers.nwcsaf_nc import NcNWCSAF
+ xr_open_dataset.return_value = xr.Dataset({"nx": xr.DataArray(), "ny": xr.DataArray()},
+ attrs={"source": "bla",
+ "satellite_identifier": "blu"})
+ self.fake_dataset = xr_open_dataset.return_value
+ unzip.return_value = ''
+ self.filehandler_class = NcNWCSAF
+ self.file_key_prefix = "cmic_"
+ self.fh = self.filehandler_class('filename', {}, {"file_key_prefix": self.file_key_prefix})
+
+ def test_get_dataset_uses_file_key_prefix(self):
+ """Test that get_dataset() uses a file_key_prefix."""
+ dsid_cpp = {'name': 'cpp_cot'}
+ dsid_cmic = {'name': 'cmic_cot'}
+ scale = 4
+ offset = 8
+ the_array = xr.DataArray(np.ones((5, 10)), attrs={"scale_factor": np.array(scale, dtype=float),
+ "add_offset": np.array(offset, dtype=float)})
+ file_key = "cot"
+ self.fh.nc[self.file_key_prefix + file_key] = the_array
+
+ info_cpp = dict(name="cpp_cot",
+ file_key=file_key,
+ file_type="nc_nwcsaf_cpp")
+
+ res_cpp = self.fh.get_dataset(dsid_cpp, info_cpp)
+
+ info_cmic = dict(name="cmic_cot",
+ file_type="nc_nwcsaf_cpp")
+
+ res_cmic = self.fh.get_dataset(dsid_cmic, info_cmic)
+ np.testing.assert_allclose(res_cpp, res_cmic)
+
+ def test_get_dataset_scales_and_offsets_palette_meanings_using_other_dataset(self):
+ """Test that get_dataset() returns scaled palette_meanings using another dataset as scaling source."""
+ dsid = {'name': 'cpp_cot_pal'}
+ scale = 4
+ offset = 8
+ array = xr.DataArray(np.ones((5, 3)), attrs={"palette_meanings": "1 2 3 4",
+ "fill_value_color": (0, 0, 0)})
+ self.fh.nc[dsid['name']] = array
+
+ so_array = xr.DataArray(np.ones((10, 10)),
+ attrs={"scale_factor": np.array(scale, dtype=float),
+ "add_offset": np.array(offset, dtype=float)},
+ dims=["lines", "colors"])
+
+ info = dict(name="cpp_cot_pal",
+ file_type="nc_nwcsaf_cpp",
+ scale_offset_dataset="scaleoffset")
+ self.fh.nc[self.file_key_prefix + "scaleoffset"] = so_array
+
+ res = self.fh.get_dataset(dsid, info)
+ np.testing.assert_allclose(res.attrs["palette_meanings"], np.arange(5) * scale + offset)
+
def _check_area_def(area_definition):
correct_h = float(PROJ['gdal_projection'].split('+h=')[-1])
diff --git a/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py b/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py
new file mode 100644
index 0000000000..bc4f310e76
--- /dev/null
+++ b/satpy/tests/reader_tests/test_oceancolorcci_l3_nc.py
@@ -0,0 +1,240 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+#
+# Copyright (c) 2022 Satpy developers
+#
+# This file is part of Satpy.
+#
+# Satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# Satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# Satpy. If not, see .
+"""Module for testing the satpy.readers.oceancolorcci_l3_nc module."""
+
+import os
+from datetime import datetime
+
+import numpy as np
+import pytest
+import xarray as xr
+from pyresample.geometry import AreaDefinition
+
+
+@pytest.fixture()
+def fake_dataset():
+ """Create a CLAAS-like test dataset."""
+ adg = xr.DataArray(
+ [[1.0, 0.47, 4.5, 1.2], [0.2, 0, 1.3, 1.3]],
+ dims=("y", "x")
+ )
+ atot = xr.DataArray(
+ [[0.001, 0.08, 23.4, 0.1], [2.1, 1.2, 4.7, 306.]],
+ dims=("y", "x")
+ )
+ kd = xr.DataArray(
+ [[0.8, 0.01, 5.34, 1.23], [0.4, 1.0, 3.2, 1.23]],
+ dims=("y", "x")
+ )
+ nobs = xr.DataArray(
+ [[5, 118, 5, 100], [0, 15, 0, 1]],
+ dims=("y", "x"),
+ attrs={'_FillValue': 0}
+ )
+ nobs_filt = xr.DataArray(
+ [[5, 118, 5, 100], [np.nan, 15, np.nan, 1]],
+ dims=("y", "x"),
+ attrs={'_FillValue': 0}
+ )
+ watcls = xr.DataArray(
+ [[12.2, 0.01, 6.754, 5.33], [12.5, 101.5, 103.5, 204.]],
+ dims=("y", "x")
+ )
+ attrs = {
+ "geospatial_lon_resolution": "90",
+ "geospatial_lat_resolution": "90",
+ "geospatial_lon_min": -180.,
+ "geospatial_lon_max": 180.,
+ "geospatial_lat_min": -90.,
+ "geospatial_lat_max": 90.,
+ "time_coverage_start": "202108010000Z",
+ "time_coverage_end": "202108312359Z",
+ }
+ return xr.Dataset(
+ {
+ "adg_490": adg,
+ "water_class10": watcls,
+ "SeaWiFS_nobs_sum": nobs,
+ "test_nobs": nobs_filt,
+ "kd_490": kd,
+ "atot_665": atot,
+ },
+ attrs=attrs
+ )
+
+
+ds_dict = {'adg_490': 'adg_490',
+ 'water_class10': 'water_class10',
+ 'seawifs_nobs_sum': 'test_nobs',
+ 'kd_490': 'kd_490',
+ 'atot_665': 'atot_665'}
+
+ds_list_all = ['adg_490', 'water_class10', 'seawifs_nobs_sum', 'kd_490', 'atot_665']
+ds_list_iop = ['adg_490', 'water_class10', 'seawifs_nobs_sum', 'atot_665']
+ds_list_kd = ['kd_490', 'water_class10', 'seawifs_nobs_sum']
+
+
+@pytest.fixture
+def fake_file_dict(fake_dataset, tmp_path):
+ """Write a fake dataset to file."""
+ fdict = {}
+ filename = tmp_path / "ESACCI-OC-L3S-OC_PRODUCTS-MERGED-10M_MONTHLY_4km_GEO_PML_OCx_QAA-202112-fv5.0.nc"
+ fake_dataset.to_netcdf(filename)
+ fdict['bad_month'] = filename
+
+ filename = tmp_path / "ESACCI-OC-L3S-OC_PRODUCTS-MERGED-2D_DAILY_4km_GEO_PML_OCx_QAA-202112-fv5.0.nc"
+ fake_dataset.to_netcdf(filename)
+ fdict['bad_day'] = filename
+
+ filename = tmp_path / "ESACCI-OC-L3S-OC_PRODUCTS-MERGED-1M_MONTHLY_4km_GEO_PML_OCx_QAA-202112-fv5.0.nc"
+ fake_dataset.to_netcdf(filename)
+ fdict['ocprod_1m'] = filename
+
+ filename = tmp_path / "ESACCI-OC-L3S-OC_PRODUCTS-MERGED-5D_DAILY_4km_GEO_PML_OCx_QAA-202112-fv5.0.nc"
+ fake_dataset.to_netcdf(filename)
+ fdict['ocprod_5d'] = filename
+
+ filename = tmp_path / "ESACCI-OC-L3S-IOP-MERGED-8D_DAILY_4km_GEO_PML_RRS-20211117-fv5.0.nc"
+ fake_dataset.to_netcdf(filename)
+ fdict['iop_8d'] = filename
+
+ filename = tmp_path / "ESACCI-OC-L3S-IOP-MERGED-1D_DAILY_4km_GEO_PML_OCx-202112-fv5.0.nc"
+ fake_dataset.to_netcdf(filename)
+ fdict['iop_1d'] = filename
+
+ filename = tmp_path / "ESACCI-OC-L3S-K_490-MERGED-1D_DAILY_4km_GEO_PML_RRS-20210113-fv5.0.nc"
+ fake_dataset.to_netcdf(filename)
+ fdict['k490_1d'] = filename
+
+ yield fdict
+
+
+class TestOCCCIReader:
+ """Test the Ocean Color reader."""
+
+ def setup(self):
+ """Set up the reader tests."""
+ from satpy._config import config_search_paths
+
+ self.yaml_file = "oceancolorcci_l3_nc.yaml"
+ self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file))
+
+ def _create_reader_for_resolutions(self, filename):
+ from satpy.readers import load_reader
+ reader = load_reader(self.reader_configs)
+ files = reader.select_files_from_pathnames(filename)
+ assert len(filename) == len(files)
+ reader.create_filehandlers(files)
+ # Make sure we have some files
+ assert reader.file_handlers
+ return reader
+
+ @pytest.fixture
+ def area_exp(self):
+ """Get expected area definition."""
+ proj_dict = {'datum': 'WGS84', 'no_defs': 'None', 'proj': 'longlat', 'type': 'crs'}
+
+ return AreaDefinition(
+ area_id="gridded_occci",
+ description="Full globe gridded area",
+ proj_id="longlat",
+ projection=proj_dict,
+ area_extent=(-180., -90., 180., 90.),
+ width=4,
+ height=2,
+ )
+
+ def test_get_area_def(self, area_exp, fake_file_dict):
+ """Test area definition."""
+ reader = self._create_reader_for_resolutions([fake_file_dict['ocprod_1m']])
+ res = reader.load([ds_list_all[0]])
+ area = res[ds_list_all[0]].attrs['area']
+
+ assert area.area_id == area_exp.area_id
+ assert area.area_extent == area_exp.area_extent
+ assert area.width == area_exp.width
+ assert area.height == area_exp.height
+ assert area.proj_dict == area_exp.proj_dict
+
+ def test_bad_fname(self, fake_dataset, fake_file_dict):
+ """Test case where an incorrect composite period is given."""
+ reader = self._create_reader_for_resolutions([fake_file_dict['bad_month']])
+ res = reader.load([ds_list_all[0]])
+ assert len(res) == 0
+ reader = self._create_reader_for_resolutions([fake_file_dict['bad_day']])
+ res = reader.load([ds_list_all[0]])
+ assert len(res) == 0
+
+ def test_get_dataset_monthly_allprods(self, fake_dataset, fake_file_dict):
+ """Test dataset loading."""
+ reader = self._create_reader_for_resolutions([fake_file_dict['ocprod_1m']])
+ # Check how many datasets are available. This file contains all of them.
+ assert len(list(reader.available_dataset_names)) == 94
+ res = reader.load(ds_list_all)
+ assert len(res) == len(ds_list_all)
+ for curds in ds_list_all:
+ np.testing.assert_allclose(res[curds].values, fake_dataset[ds_dict[curds]].values)
+ assert res[curds].attrs['sensor'] == 'merged'
+ assert res[curds].attrs['composite_period'] == 'monthly'
+
+ def test_get_dataset_8d_iopprods(self, fake_dataset, fake_file_dict):
+ """Test dataset loading."""
+ reader = self._create_reader_for_resolutions([fake_file_dict['iop_8d']])
+ # Check how many datasets are available. This file contains all of them.
+ assert len(list(reader.available_dataset_names)) == 70
+ res = reader.load(ds_list_iop)
+ assert len(res) == len(ds_list_iop)
+ for curds in ds_list_iop:
+ np.testing.assert_allclose(res[curds].values, fake_dataset[ds_dict[curds]].values)
+ assert res[curds].attrs['sensor'] == 'merged'
+ assert res[curds].attrs['composite_period'] == '8-day'
+
+ def test_get_dataset_1d_kprods(self, fake_dataset, fake_file_dict):
+ """Test dataset loading."""
+ reader = self._create_reader_for_resolutions([fake_file_dict['k490_1d']])
+ # Check how many datasets are available. This file contains all of them.
+ assert len(list(reader.available_dataset_names)) == 25
+ res = reader.load(ds_list_kd)
+ assert len(res) == len(ds_list_kd)
+ for curds in ds_list_kd:
+ np.testing.assert_allclose(res[curds].values, fake_dataset[ds_dict[curds]].values)
+ assert res[curds].attrs['sensor'] == 'merged'
+ assert res[curds].attrs['composite_period'] == 'daily'
+
+ def test_get_dataset_5d_allprods(self, fake_dataset, fake_file_dict):
+ """Test dataset loading."""
+ reader = self._create_reader_for_resolutions([fake_file_dict['ocprod_5d']])
+ # Check how many datasets are available. This file contains all of them.
+ assert len(list(reader.available_dataset_names)) == 94
+ res = reader.load(ds_list_all)
+ assert len(res) == len(ds_list_all)
+ for curds in ds_list_all:
+ np.testing.assert_allclose(res[curds].values, fake_dataset[ds_dict[curds]].values)
+ assert res[curds].attrs['sensor'] == 'merged'
+ assert res[curds].attrs['composite_period'] == '5-day'
+
+ def test_start_time(self, fake_file_dict):
+ """Test start time property."""
+ reader = self._create_reader_for_resolutions([fake_file_dict['k490_1d']])
+ assert reader.start_time == datetime(2021, 8, 1, 0, 0, 0)
+
+ def test_end_time(self, fake_file_dict):
+ """Test end time property."""
+ reader = self._create_reader_for_resolutions([fake_file_dict['iop_8d']])
+ assert reader.end_time == datetime(2021, 8, 31, 23, 59, 0)
diff --git a/satpy/tests/reader_tests/test_olci_nc.py b/satpy/tests/reader_tests/test_olci_nc.py
index 380bf000c4..b0196eb3b8 100644
--- a/satpy/tests/reader_tests/test_olci_nc.py
+++ b/satpy/tests/reader_tests/test_olci_nc.py
@@ -26,11 +26,11 @@ class TestOLCIReader(unittest.TestCase):
@mock.patch('xarray.open_dataset')
def test_instantiate(self, mocked_dataset):
"""Test initialization of file handlers."""
- from satpy.readers.olci_nc import (NCOLCIBase, NCOLCICal, NCOLCIGeo,
- NCOLCIChannelBase, NCOLCI1B, NCOLCI2)
- from satpy.tests.utils import make_dataid
import xarray as xr
+ from satpy.readers.olci_nc import NCOLCI1B, NCOLCI2, NCOLCIBase, NCOLCICal, NCOLCIChannelBase, NCOLCIGeo
+ from satpy.tests.utils import make_dataid
+
cal_data = xr.Dataset(
{
'solar_flux': (('bands'), [0, 1, 2]),
@@ -95,10 +95,11 @@ def test_open_file_objects(self, mocked_open_dataset):
@mock.patch('xarray.open_dataset')
def test_get_dataset(self, mocked_dataset):
"""Test reading datasets."""
- from satpy.readers.olci_nc import NCOLCI2
- from satpy.tests.utils import make_dataid
import numpy as np
import xarray as xr
+
+ from satpy.readers.olci_nc import NCOLCI2
+ from satpy.tests.utils import make_dataid
mocked_dataset.return_value = xr.Dataset({'mask': (['rows', 'columns'],
np.array([1 << x for x in range(30)]).reshape(5, 6))},
coords={'rows': np.arange(5),
@@ -112,10 +113,11 @@ def test_get_dataset(self, mocked_dataset):
@mock.patch('xarray.open_dataset')
def test_olci_angles(self, mocked_dataset):
"""Test reading datasets."""
- from satpy.readers.olci_nc import NCOLCIAngles
- from satpy.tests.utils import make_dataid
import numpy as np
import xarray as xr
+
+ from satpy.readers.olci_nc import NCOLCIAngles
+ from satpy.tests.utils import make_dataid
attr_dict = {
'ac_subsampling_factor': 1,
'al_subsampling_factor': 2,
@@ -144,10 +146,11 @@ def test_olci_angles(self, mocked_dataset):
@mock.patch('xarray.open_dataset')
def test_olci_meteo(self, mocked_dataset):
"""Test reading datasets."""
- from satpy.readers.olci_nc import NCOLCIMeteo
- from satpy.tests.utils import make_dataid
import numpy as np
import xarray as xr
+
+ from satpy.readers.olci_nc import NCOLCIMeteo
+ from satpy.tests.utils import make_dataid
attr_dict = {
'ac_subsampling_factor': 1,
'al_subsampling_factor': 2,
@@ -180,8 +183,10 @@ class TestBitFlags(unittest.TestCase):
def test_bitflags(self):
"""Test the BitFlags class."""
- import numpy as np
from functools import reduce
+
+ import numpy as np
+
from satpy.readers.olci_nc import BitFlags
flag_list = ['INVALID', 'WATER', 'LAND', 'CLOUD', 'SNOW_ICE',
'INLAND_WATER', 'TIDAL', 'COSMETIC', 'SUSPECT', 'HISOLZEN',
diff --git a/satpy/tests/reader_tests/test_omps_edr.py b/satpy/tests/reader_tests/test_omps_edr.py
index 7b09ef885a..2c211013cc 100644
--- a/satpy/tests/reader_tests/test_omps_edr.py
+++ b/satpy/tests/reader_tests/test_omps_edr.py
@@ -20,7 +20,9 @@
import os
import unittest
from unittest import mock
+
import numpy as np
+
from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler
from satpy.tests.utils import convert_file_content_to_data_array
@@ -169,7 +171,7 @@ class TestOMPSEDRReader(unittest.TestCase):
def setUp(self):
"""Wrap HDF5 file handler with our own fake handler."""
from satpy._config import config_search_paths
- from satpy.readers.omps_edr import EDRFileHandler, EDREOSFileHandler
+ from satpy.readers.omps_edr import EDREOSFileHandler, EDRFileHandler
self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file))
# http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library
self.p = mock.patch.object(EDRFileHandler, '__bases__', (FakeHDF5FileHandler2,))
diff --git a/satpy/tests/reader_tests/test_safe_sar_l2_ocn.py b/satpy/tests/reader_tests/test_safe_sar_l2_ocn.py
index d50c267fc5..937470724f 100644
--- a/satpy/tests/reader_tests/test_safe_sar_l2_ocn.py
+++ b/satpy/tests/reader_tests/test_safe_sar_l2_ocn.py
@@ -18,8 +18,10 @@
"""Module for testing the satpy.readers.safe_sar_l2_ocn module."""
import unittest
import unittest.mock as mock
+
import numpy as np
import xarray as xr
+
from satpy.tests.utils import make_dataid
@@ -27,8 +29,6 @@ class TestSAFENC(unittest.TestCase):
"""Test various SAFE SAR L2 OCN file handlers."""
@mock.patch('satpy.readers.safe_sar_l2_ocn.xr')
- @mock.patch.multiple('satpy.readers.safe_sar_l2_ocn.SAFENC',
- __abstractmethods__=set())
def setUp(self, xr_):
"""Set up the tests."""
from satpy.readers.safe_sar_l2_ocn import SAFENC
diff --git a/satpy/tests/reader_tests/test_sar_c_safe.py b/satpy/tests/reader_tests/test_sar_c_safe.py
index 093fed1741..5e49a1d491 100644
--- a/satpy/tests/reader_tests/test_sar_c_safe.py
+++ b/satpy/tests/reader_tests/test_sar_c_safe.py
@@ -16,17 +16,18 @@
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
"""Module for testing the satpy.readers.sar-c_safe module."""
-import os
-import tempfile
+
import unittest
import unittest.mock as mock
-from contextlib import suppress
+from enum import Enum
+from io import BytesIO
import dask.array as da
import numpy as np
import xarray as xr
from satpy.dataset import DataQuery
+from satpy.readers.sar_c_safe import SAFEXMLAnnotation, SAFEXMLCalibration, SAFEXMLNoise
class TestSAFEGRD(unittest.TestCase):
@@ -53,9 +54,9 @@ def setUp(self, mocked_rio_open):
def test_instantiate(self):
"""Test initialization of file handlers."""
- assert(self.test_fh._polarization == 'vv')
- assert(self.test_fh.calibration == self.calfh)
- assert(self.test_fh.noise == self.noisefh)
+ assert self.test_fh._polarization == 'vv'
+ assert self.test_fh.calibration == self.calfh
+ assert self.test_fh.noise == self.noisefh
self.mocked_rio_open.assert_called()
@mock.patch('rioxarray.open_rasterio')
@@ -272,6 +273,54 @@ def __init__(self, *args):
+
+
+
+ 2018-02-12T03:24:58.493342
+ 4.964462411376810e-03
+ 0
+ 0
+ 7.021017981690355e+01
+ 5.609684402205929e+01
+ 8.234046399593353e-04
+ 1.918318045731997e+01
+ 1.720012646010728e+01
+
+
+ 2018-02-12T03:24:58.493342
+ 4.964462411376810e-03
+ 0
+ 9
+ 7.021017981690355e+01
+ 5.609684402205929e+01
+ 8.234046399593353e-04
+ 1.918318045731997e+01
+ 1.720012646010728e+01
+
+
+ 2018-02-12T03:24:58.493342
+ 4.964462411376810e-03
+ 9
+ 0
+ 7.021017981690355e+01
+ 5.609684402205929e+01
+ 8.234046399593353e-04
+ 1.918318045731997e+01
+ 1.720012646010728e+01
+
+
+ 2018-02-12T03:24:58.493342
+ 4.964462411376810e-03
+ 9
+ 9
+ 7.021017981690355e+01
+ 5.609684402205929e+01
+ 8.234046399593353e-04
+ 1.918318045731997e+01
+ 1.720012646010728e+01
+
+
+
"""
@@ -374,63 +423,355 @@ def __init__(self, *args):
"""
+noise_xml_with_holes = b"""
+
+
+
+ 2020-03-15T05:04:28.137817
+ 0
+ 0 2 4 6 8 9
+ 0.00000e+00 2.00000e+00 4.00000e+00 6.00000e+00 8.00000e+00 9.00000e+00
+
+
+ 2020-03-15T05:04:28.137817
+ 5
+ 0 2 4 7 8 9
+ 0.00000e+00 2.00000e+00 4.00000e+00 7.00000e+00 8.00000e+00 9.00000e+00
+
+
+ 2020-03-15T05:04:28.137817
+ 9
+ 0 2 5 7 8 9
+ 0.00000e+00 2.00000e+00 5.00000e+00 7.00000e+00 8.00000e+00 9.00000e+00
+
+
+
+
+ IW1
+ 0
+ 3
+ 2
+ 5
+ 0
+ 1.000000e+00
+
+
+ IW1
+ 1
+ 0
+ 5
+ 1
+ 2 4 5
+ 2.000000e+00 2.000000e+00 2.000000e+00
+
+
+ IW2
+ 2
+ 8
+ 4
+ 9
+ 2 4
+ 3.000000e+00 3.000000e+00
+
+
+ IW3
+ 3
+ 2
+ 5
+ 3
+ 3 5
+ 4.000000e+00 4.000000e+00
+
+
+ IW2
+ 3
+ 4
+ 4
+ 5
+ 3 4
+ 5.000000e+00 5.000000e+00
+
+
+ IW3
+ 4
+ 6
+ 4
+ 7
+ 4
+ 6.000000e+00
+
+
+ IW2
+ 5
+ 4
+ 7
+ 6
+ 5 7
+ 7.000000e+00 7.000000e+00
+
+
+ IW3
+ 5
+ 7
+ 7
+ 9
+ 6
+ 8.000000e+00
+
+
+ IW2
+ 6
+ 0
+ 7
+ 3
+ 6 7
+ 9.000000e+00 9.000000e+00
+
+
+ IW3
+ 8
+ 0
+ 9
+ 0
+ 8
+ 10.000000e+00
+
+
+ IW2
+ 8
+ 2
+ 9
+ 3
+ 8 9
+ 11.000000e+00 11.000000e+00
+
+
+ IW3
+ 8
+ 4
+ 8
+ 5
+ 8
+ 12.000000e+00
+
+
+
+"""
+
+
+calibration_xml = b"""
+
+
+ S1A
+ GRD
+ VV
+ IW
+ IW
+ 2018-02-12T03:24:58.493726
+ 2018-02-12T03:25:01.493726
+ 20568
+ 144162
+ 001
+
+
+ 1.000000e+00
+
+
+
+ 2018-02-12T03:24:58.493726
+ 0
+ 0 2 4 6 8 9
+ 1.894274e+03 1.788593e+03 1.320240e+03 1.277968e+03 1.277968e+03 1.277968e+03
+ 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03
+ 1.840695e+03 1.718649e+03 1.187203e+03 1.185249e+03 1.183303e+03 1.181365e+03
+ 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03
+
+
+ 2018-02-12T03:24:59.493726
+ 3
+ 0 2 4 6 8 9
+ 1.894274e+03 1.788593e+03 1.320240e+03 1.277968e+03 1.277968e+03 1.277968e+03
+ 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03
+ 1.840695e+03 1.718649e+03 1.187203e+03 1.185249e+03 1.183303e+03 1.181365e+03
+ 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03
+
+
+ 2018-02-12T03:25:00.493726
+ 6
+ 0 2 4 6 8 9
+ 1.894274e+03 1.788593e+03 1.320240e+03 1.277968e+03 1.277968e+03 1.277968e+03
+ 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03
+ 1.840695e+03 1.718649e+03 1.187203e+03 1.185249e+03 1.183303e+03 1.181365e+03
+ 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03
+
+
+ 2018-02-12T03:25:01.493726
+ 9
+ 0 2 4 6 8 9
+ 1.894274e+03 1.788593e+03 1.320240e+03 1.277968e+03 1.277968e+03 1.277968e+03
+ 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03
+ 1.840695e+03 1.718649e+03 1.187203e+03 1.185249e+03 1.183303e+03 1.181365e+03
+ 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03 1.0870e+03
+
+
+
+"""
+
class TestSAFEXMLNoise(unittest.TestCase):
"""Test the SAFE XML Noise file handler."""
def setUp(self):
"""Set up the test case."""
- from satpy.readers.sar_c_safe import SAFEXML
-
- with tempfile.NamedTemporaryFile(delete=False) as ntf:
- self.annotation_filename = ntf.name
- ntf.write(annotation_xml)
- ntf.close()
- self.annotation_fh = SAFEXML(self.annotation_filename, mock.MagicMock(), mock.MagicMock())
-
- with tempfile.NamedTemporaryFile(delete=False) as ntf:
- self.noise_filename = ntf.name
- ntf.write(noise_xml)
- ntf.close()
- self.noise_fh = SAFEXML(self.noise_filename, mock.MagicMock(), mock.MagicMock(), self.annotation_fh)
-
- def tearDown(self):
- """Tear down the test case."""
- with suppress(PermissionError):
- os.remove(self.annotation_filename)
- with suppress(PermissionError):
- os.remove(self.noise_filename)
+ filename_info = dict(start_time=None, end_time=None, polarization="vv")
+ self.annotation_fh = SAFEXMLAnnotation(BytesIO(annotation_xml), filename_info, mock.MagicMock())
+ self.noise_fh = SAFEXMLNoise(BytesIO(noise_xml), filename_info, mock.MagicMock(), self.annotation_fh)
+
+ self.expected_azimuth_noise = np.array([[np.nan, 1, 1, 1, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan],
+ [np.nan, 1, 1, 1, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan],
+ [2, 2, 3, 3, 3, 4, 4, 4, 4, np.nan],
+ [2, 2, 3, 3, 3, 4, 4, 4, 4, np.nan],
+ [2, 2, 3, 3, 3, 4, 4, 4, 4, np.nan],
+ [2, 2, 5, 5, 5, 5, 6, 6, 6, 6],
+ [2, 2, 5, 5, 5, 5, 6, 6, 6, 6],
+ [2, 2, 5, 5, 5, 5, 6, 6, 6, 6],
+ [2, 2, 7, 7, 7, 7, 7, 8, 8, 8],
+ [2, 2, 7, 7, 7, 7, 7, 8, 8, 8],
+ ])
+
+ self.expected_range_noise = np.array([[0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
+ [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
+ [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
+ [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
+ [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
+ [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
+ [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
+ [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
+ [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
+ [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
+ ])
+
+ self.noise_fh_with_holes = SAFEXMLNoise(BytesIO(noise_xml_with_holes), filename_info, mock.MagicMock(),
+ self.annotation_fh)
+ self.expected_azimuth_noise_with_holes = np.array(
+ [[np.nan, np.nan, np.nan, 1, 1, 1, np.nan, np.nan, np.nan, np.nan],
+ [2, 2, np.nan, 1, 1, 1, np.nan, np.nan, np.nan, np.nan],
+ [2, 2, np.nan, 1, 1, 1, np.nan, np.nan, 3, 3],
+ [2, 2, 4, 4, 5, 5, np.nan, np.nan, 3, 3],
+ [2, 2, 4, 4, 5, 5, 6, 6, 3, 3],
+ [2, 2, 4, 4, 7, 7, 7, 8, 8, 8],
+ [9, 9, 9, 9, 7, 7, 7, 8, 8, 8],
+ [9, 9, 9, 9, 7, 7, 7, 8, 8, 8],
+ [10, np.nan, 11, 11, 12, 12, np.nan, np.nan, np.nan, np.nan],
+ [10, np.nan, 11, 11, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan]
+ ])
def test_azimuth_noise_array(self):
"""Test reading the azimuth-noise array."""
- expected_data = np.array([[np.nan, 1, 1, 1, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan],
- [np.nan, 1, 1, 1, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan],
- [2, 2, 3, 3, 3, 4, 4, 4, 4, np.nan],
- [2, 2, 3, 3, 3, 4, 4, 4, 4, np.nan],
- [2, 2, 3, 3, 3, 4, 4, 4, 4, np.nan],
- [2, 2, 5, 5, 5, 5, 6, 6, 6, 6],
- [2, 2, 5, 5, 5, 5, 6, 6, 6, 6],
- [2, 2, 5, 5, 5, 5, 6, 6, 6, 6],
- [2, 2, 7, 7, 7, 7, 7, 8, 8, 8],
- [2, 2, 7, 7, 7, 7, 7, 8, 8, 8],
- ])
-
res = self.noise_fh.azimuth_noise_reader.read_azimuth_noise_array()
- np.testing.assert_array_equal(res, expected_data)
+ np.testing.assert_array_equal(res, self.expected_azimuth_noise)
+
+ def test_azimuth_noise_array_with_holes(self):
+ """Test reading the azimuth-noise array."""
+ res = self.noise_fh_with_holes.azimuth_noise_reader.read_azimuth_noise_array()
+ np.testing.assert_array_equal(res, self.expected_azimuth_noise_with_holes)
def test_range_noise_array(self):
"""Test reading the range-noise array."""
- expected_data = np.array([[0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
- [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
- [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
- [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
- [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
- [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
- [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
- [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
- [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
- [0, 1, 2, 3, 4, 5, 6, 7, 8, 9],
- ])
-
res = self.noise_fh.read_range_noise_array(chunks=5)
- np.testing.assert_allclose(res, expected_data)
+ np.testing.assert_allclose(res, self.expected_range_noise)
+
+ def test_get_noise_dataset(self):
+ """Test using get_dataset for the noise."""
+ query = DataQuery(name="noise", polarization="vv")
+ res = self.noise_fh.get_dataset(query, {})
+ np.testing.assert_allclose(res, self.expected_azimuth_noise * self.expected_range_noise)
+
+ def test_get_noise_dataset_has_right_chunk_size(self):
+ """Test using get_dataset for the noise has right chunk size in result."""
+ query = DataQuery(name="noise", polarization="vv")
+ res = self.noise_fh.get_dataset(query, {}, chunks=3)
+ assert res.data.chunksize == (3, 3)
+
+
+class Calibration(Enum):
+ """Calibration levels."""
+
+ gamma = 1
+ sigma_nought = 2
+ beta_nought = 3
+ dn = 4
+
+
+class TestSAFEXMLCalibration(unittest.TestCase):
+ """Test the SAFE XML Calibration file handler."""
+
+ def setUp(self):
+ """Set up the test case."""
+ filename_info = dict(start_time=None, end_time=None, polarization="vv")
+ self.annotation_fh = SAFEXMLAnnotation(BytesIO(annotation_xml), filename_info, mock.MagicMock())
+ self.calibration_fh = SAFEXMLCalibration(BytesIO(calibration_xml),
+ filename_info,
+ mock.MagicMock(),
+ self.annotation_fh)
+
+ self.expected_gamma = np.array([[1840.695, 1779.672, 1718.649, 1452.926, 1187.203, 1186.226,
+ 1185.249, 1184.276, 1183.303, 1181.365]]) * np.ones((10, 1))
+
+ def test_dn_calibration_array(self):
+ """Test reading the dn calibration array."""
+ expected_dn = np.ones((10, 10)) * 1087
+ res = self.calibration_fh.get_calibration(Calibration.dn, chunks=5)
+ np.testing.assert_allclose(res, expected_dn)
+
+ def test_beta_calibration_array(self):
+ """Test reading the beta calibration array."""
+ expected_beta = np.ones((10, 10)) * 1087
+ res = self.calibration_fh.get_calibration(Calibration.beta_nought, chunks=5)
+ np.testing.assert_allclose(res, expected_beta)
+
+ def test_sigma_calibration_array(self):
+ """Test reading the sigma calibration array."""
+ expected_sigma = np.array([[1894.274, 1841.4335, 1788.593, 1554.4165, 1320.24, 1299.104,
+ 1277.968, 1277.968, 1277.968, 1277.968]]) * np.ones((10, 1))
+ res = self.calibration_fh.get_calibration(Calibration.sigma_nought, chunks=5)
+ np.testing.assert_allclose(res, expected_sigma)
+
+ def test_gamma_calibration_array(self):
+ """Test reading the gamma calibration array."""
+ res = self.calibration_fh.get_calibration(Calibration.gamma, chunks=5)
+ np.testing.assert_allclose(res, self.expected_gamma)
+
+ def test_get_calibration_dataset(self):
+ """Test using get_dataset for the calibration."""
+ query = DataQuery(name="gamma", polarization="vv")
+ res = self.calibration_fh.get_dataset(query, {})
+ np.testing.assert_allclose(res, self.expected_gamma)
+
+ def test_get_calibration_dataset_has_right_chunk_size(self):
+ """Test using get_dataset for the calibration yields array with right chunksize."""
+ query = DataQuery(name="gamma", polarization="vv")
+ res = self.calibration_fh.get_dataset(query, {}, chunks=3)
+ assert res.data.chunksize == (3, 3)
+ np.testing.assert_allclose(res, self.expected_gamma)
+
+ def test_get_calibration_constant(self):
+ """Test getting the calibration constant."""
+ query = DataQuery(name="calibration_constant", polarization="vv")
+ res = self.calibration_fh.get_dataset(query, {})
+ assert res == 1
+
+
+class TestSAFEXMLAnnotation(unittest.TestCase):
+ """Test the SAFE XML Annotation file handler."""
+
+ def setUp(self):
+ """Set up the test case."""
+ filename_info = dict(start_time=None, end_time=None, polarization="vv")
+ self.annotation_fh = SAFEXMLAnnotation(BytesIO(annotation_xml), filename_info, mock.MagicMock())
+
+ def test_incidence_angle(self):
+ """Test reading the incidence angle."""
+ query = DataQuery(name="incidence_angle", polarization="vv")
+ res = self.annotation_fh.get_dataset(query, {})
+ np.testing.assert_allclose(res, 19.18318046)
diff --git a/satpy/tests/reader_tests/test_satpy_cf_nc.py b/satpy/tests/reader_tests/test_satpy_cf_nc.py
index f6d3784db2..f01d9d1ed0 100644
--- a/satpy/tests/reader_tests/test_satpy_cf_nc.py
+++ b/satpy/tests/reader_tests/test_satpy_cf_nc.py
@@ -17,329 +17,316 @@
# satpy. If not, see .
"""Tests for the CF reader."""
-import os
-import unittest
-from contextlib import suppress
from datetime import datetime
import numpy as np
+import pytest
import xarray as xr
+from pyresample import AreaDefinition, SwathDefinition
from satpy import Scene
from satpy.dataset.dataid import WavelengthRange
from satpy.readers.satpy_cf_nc import SatpyCFFileHandler
-class TestCFReader(unittest.TestCase):
+@pytest.fixture(scope="session")
+def _cf_scene():
+ tstart = datetime(2019, 4, 1, 12, 0)
+ tend = datetime(2019, 4, 1, 12, 15)
+ data_visir = np.array([[1, 2], [3, 4]])
+ z_visir = [1, 2, 3, 4, 5, 6, 7]
+ qual_data = [[1, 2, 3, 4, 5, 6, 7],
+ [1, 2, 3, 4, 5, 6, 7]]
+ time_vis006 = [1, 2]
+ lat = 33.0 * np.array([[1, 2], [3, 4]])
+ lon = -13.0 * np.array([[1, 2], [3, 4]])
+
+ proj_dict = {
+ 'a': 6378169.0, 'b': 6356583.8, 'h': 35785831.0,
+ 'lon_0': 0.0, 'proj': 'geos', 'units': 'm'
+ }
+ x_size, y_size = data_visir.shape
+ area_extent = (339045.5577, 4365586.6063, 1068143.527, 4803645.4685)
+ area = AreaDefinition(
+ 'test',
+ 'test',
+ 'test',
+ proj_dict,
+ x_size,
+ y_size,
+ area_extent,
+ )
+
+ x, y = area.get_proj_coords()
+ y_visir = y[:, 0]
+ x_visir = x[0, :]
+
+ common_attrs = {
+ 'start_time': tstart,
+ 'end_time': tend,
+ 'platform_name': 'tirosn',
+ 'orbit_number': 99999,
+ 'area': area
+ }
+
+ vis006 = xr.DataArray(data_visir,
+ dims=('y', 'x'),
+ coords={'y': y_visir, 'x': x_visir, 'acq_time': ('y', time_vis006)},
+ attrs={
+ 'name': 'image0', 'id_tag': 'ch_r06',
+ 'coordinates': 'lat lon', 'resolution': 1000, 'calibration': 'reflectance',
+ 'wavelength': WavelengthRange(min=0.58, central=0.63, max=0.68, unit='µm'),
+ 'orbital_parameters': {
+ 'projection_longitude': 1,
+ 'projection_latitude': 1,
+ 'projection_altitude': 1,
+ 'satellite_nominal_longitude': 1,
+ 'satellite_nominal_latitude': 1,
+ 'satellite_actual_longitude': 1,
+ 'satellite_actual_latitude': 1,
+ 'satellite_actual_altitude': 1,
+ 'nadir_longitude': 1,
+ 'nadir_latitude': 1,
+ 'only_in_1': False
+ }
+ })
+
+ ir_108 = xr.DataArray(data_visir,
+ dims=('y', 'x'),
+ coords={'y': y_visir, 'x': x_visir, 'acq_time': ('y', time_vis006)},
+ attrs={'name': 'image1', 'id_tag': 'ch_tb11', 'coordinates': 'lat lon'})
+ qual_f = xr.DataArray(qual_data,
+ dims=('y', 'z'),
+ coords={'y': y_visir, 'z': z_visir, 'acq_time': ('y', time_vis006)},
+ attrs={
+ 'name': 'qual_flags',
+ 'id_tag': 'qual_flags'
+ })
+ lat = xr.DataArray(lat,
+ dims=('y', 'x'),
+ coords={'y': y_visir, 'x': x_visir},
+ attrs={
+ 'name': 'lat',
+ 'standard_name': 'latitude',
+ 'modifiers': np.array([])
+ })
+ lon = xr.DataArray(lon,
+ dims=('y', 'x'),
+ coords={'y': y_visir, 'x': x_visir},
+ attrs={
+ 'name': 'lon',
+ 'standard_name': 'longitude',
+ 'modifiers': np.array([])
+ })
+
+ # for prefix testing
+ prefix_data = xr.DataArray(data_visir,
+ dims=('y', 'x'),
+ coords={'y': y_visir, 'x': x_visir},
+ attrs={
+ 'name': '1', 'id_tag': 'ch_r06',
+ 'coordinates': 'lat lon', 'resolution': 1000, 'calibration': 'reflectance',
+ 'wavelength': WavelengthRange(min=0.58, central=0.63, max=0.68, unit='µm'),
+ 'area': area
+ })
+
+ # for swath testing
+ area = SwathDefinition(lons=lon, lats=lat)
+ swath_data = prefix_data.copy()
+ swath_data.attrs.update({'name': 'swath_data', 'area': area})
+
+ scene = Scene()
+ scene.attrs['sensor'] = ['avhrr-1', 'avhrr-2', 'avhrr-3']
+ scene_dict = {
+ 'image0': vis006,
+ 'image1': ir_108,
+ 'swath_data': swath_data,
+ '1': prefix_data,
+ 'lat': lat,
+ 'lon': lon,
+ 'qual_flags': qual_f
+ }
+
+ for key in scene_dict:
+ scene[key] = scene_dict[key]
+ if key != 'swath_data':
+ scene[key].attrs.update(common_attrs)
+ return scene
+
+
+@pytest.fixture
+def _nc_filename(tmp_path):
+ now = datetime.utcnow()
+ filename = f'testingcfwriter{now:%Y%j%H%M%S}-viirs-mband-20201007075915-20201007080744.nc'
+ return str(tmp_path / filename)
+
+
+class TestCFReader:
"""Test case for CF reader."""
- def setUp(self):
- """Create a test scene."""
- tstart = datetime(2019, 4, 1, 12, 0)
- tend = datetime(2019, 4, 1, 12, 15)
- data_visir = [[1, 2], [3, 4]]
- y_visir = [1, 2]
- x_visir = [1, 2]
- z_visir = [1, 2, 3, 4, 5, 6, 7]
- qual_data = [[1, 2, 3, 4, 5, 6, 7],
- [1, 2, 3, 4, 5, 6, 7]]
- time_vis006 = [1, 2]
- lat = 33.0 * np.array([[1, 2], [3, 4]])
- lon = -13.0 * np.array([[1, 2], [3, 4]])
- common_attrs = {'start_time': tstart,
- 'end_time': tend,
- 'platform_name': 'tirosn',
- 'orbit_number': 99999}
- vis006 = xr.DataArray(data_visir,
- dims=('y', 'x'),
- coords={'y': y_visir, 'x': x_visir, 'acq_time': ('y', time_vis006)},
- attrs={'name': 'image0', 'id_tag': 'ch_r06',
- 'coordinates': 'lat lon', 'resolution': 1000, 'calibration': 'reflectance',
- 'wavelength': WavelengthRange(min=0.58, central=0.63, max=0.68, unit='µm'),
- 'orbital_parameters': {
- 'projection_longitude': 1,
- 'projection_latitude': 1,
- 'projection_altitude': 1,
- 'satellite_nominal_longitude': 1,
- 'satellite_nominal_latitude': 1,
- 'satellite_actual_longitude': 1,
- 'satellite_actual_latitude': 1,
- 'satellite_actual_altitude': 1,
- 'nadir_longitude': 1,
- 'nadir_latitude': 1,
- 'only_in_1': False}
- })
-
- ir_108 = xr.DataArray(data_visir,
- dims=('y', 'x'),
- coords={'y': y_visir, 'x': x_visir, 'acq_time': ('y', time_vis006)},
- attrs={'name': 'image1', 'id_tag': 'ch_tb11', 'coordinates': 'lat lon'})
- qual_f = xr.DataArray(qual_data,
- dims=('y', 'z'),
- coords={'y': y_visir, 'z': z_visir, 'acq_time': ('y', time_vis006)},
- attrs={'name': 'qual_flags',
- 'id_tag': 'qual_flags'})
- lat = xr.DataArray(lat,
- dims=('y', 'x'),
- coords={'y': y_visir, 'x': x_visir},
- attrs={'name': 'lat',
- 'standard_name': 'latitude',
- 'modifiers': np.array([])})
- lon = xr.DataArray(lon,
- dims=('y', 'x'),
- coords={'y': y_visir, 'x': x_visir},
- attrs={'name': 'lon',
- 'standard_name': 'longitude',
- 'modifiers': np.array([])})
- self.scene = Scene()
- self.scene.attrs['sensor'] = ['avhrr-1', 'avhrr-2', 'avhrr-3']
- scene_dict = {'image0': vis006,
- 'image1': ir_108,
- 'lat': lat,
- 'lon': lon,
- 'qual_flags': qual_f}
- for key in scene_dict:
- self.scene[key] = scene_dict[key]
- self.scene[key].attrs.update(common_attrs)
-
- def test_write_and_read(self):
- """Save a file with cf_writer and read the data again."""
- filename = 'testingcfwriter{:s}-viirs-mband-20201007075915-20201007080744.nc'.format(
- datetime.utcnow().strftime('%Y%j%H%M%S'))
- try:
- self.scene.save_datasets(writer='cf',
- filename=filename,
- header_attrs={'instrument': 'avhrr'},
- engine='h5netcdf',
- flatten_attrs=True,
- pretty=True)
- scn_ = Scene(reader='satpy_cf_nc',
- filenames=[filename])
- scn_.load(['image0', 'image1', 'lat'])
- np.testing.assert_array_equal(scn_['image0'].data, self.scene['image0'].data)
- np.testing.assert_array_equal(scn_['lat'].data, self.scene['lat'].data) # lat loaded as dataset
- np.testing.assert_array_equal(scn_['image0'].coords['lon'], self.scene['lon'].data) # lon loded as coord
- assert isinstance(scn_['image0'].attrs['wavelength'], WavelengthRange)
- finally:
- with suppress(PermissionError):
- os.remove(filename)
+ def test_write_and_read_with_area_definition(self, _cf_scene, _nc_filename):
+ """Save a dataset with an area definition to file with cf_writer and read the data again."""
+ _cf_scene.save_datasets(writer='cf',
+ filename=_nc_filename,
+ engine='h5netcdf',
+ flatten_attrs=True,
+ pretty=True)
+ scn_ = Scene(reader='satpy_cf_nc',
+ filenames=[_nc_filename])
+ scn_.load(['image0', 'image1', 'lat'])
+ np.testing.assert_array_equal(scn_['image0'].data, _cf_scene['image0'].data)
+ np.testing.assert_array_equal(scn_['lat'].data, _cf_scene['lat'].data) # lat loaded as dataset
+ np.testing.assert_array_equal(scn_['image0'].coords['lon'], _cf_scene['lon'].data) # lon loded as coord
+ assert isinstance(scn_['image0'].attrs['wavelength'], WavelengthRange)
+ expected_area = _cf_scene['image0'].attrs['area']
+ actual_area = scn_['image0'].attrs['area']
+ assert pytest.approx(expected_area.area_extent, 0.000001) == actual_area.area_extent
+ assert expected_area.proj_dict == actual_area.proj_dict
+ assert expected_area.shape == actual_area.shape
+ assert expected_area.area_id == actual_area.area_id
+ assert expected_area.description == actual_area.description
+ assert expected_area.proj_dict == actual_area.proj_dict
+
+ def test_write_and_read_with_swath_definition(self, _cf_scene, _nc_filename):
+ """Save a dataset with a swath definition to file with cf_writer and read the data again."""
+ _cf_scene.save_datasets(writer='cf',
+ filename=_nc_filename,
+ engine='h5netcdf',
+ flatten_attrs=True,
+ pretty=True,
+ datasets=["swath_data"])
+ scn_ = Scene(reader='satpy_cf_nc',
+ filenames=[_nc_filename])
+ scn_.load(['swath_data'])
+ expected_area = _cf_scene['swath_data'].attrs['area']
+ actual_area = scn_['swath_data'].attrs['area']
+ assert expected_area.shape == actual_area.shape
+ np.testing.assert_array_equal(expected_area.lons.data, actual_area.lons.data)
+ np.testing.assert_array_equal(expected_area.lats.data, actual_area.lats.data)
def test_fix_modifier_attr(self):
"""Check that fix modifier can handle empty list as modifier attribute."""
- self.reader = SatpyCFFileHandler('filename',
- {},
- {'filetype': 'info'})
+ reader = SatpyCFFileHandler('filename',
+ {},
+ {'filetype': 'info'})
ds_info = {'modifiers': []}
- self.reader.fix_modifier_attr(ds_info)
- self.assertEqual(ds_info['modifiers'], ())
-
- def _dataset_for_prefix_testing(self):
- data_visir = [[1, 2], [3, 4]]
- y_visir = [1, 2]
- x_visir = [1, 2]
- lat = 33.0 * np.array([[1, 2], [3, 4]])
- lon = -13.0 * np.array([[1, 2], [3, 4]])
- vis006 = xr.DataArray(data_visir,
- dims=('y', 'x'),
- coords={'y': y_visir, 'x': x_visir},
- attrs={'name': '1', 'id_tag': 'ch_r06',
- 'coordinates': 'lat lon', 'resolution': 1000, 'calibration': 'reflectance',
- 'wavelength': WavelengthRange(min=0.58, central=0.63, max=0.68, unit='µm')
- })
- lat = xr.DataArray(lat,
- dims=('y', 'x'),
- coords={'y': y_visir, 'x': x_visir},
- attrs={'name': 'lat',
- 'standard_name': 'latitude',
- 'modifiers': np.array([])})
- lon = xr.DataArray(lon,
- dims=('y', 'x'),
- coords={'y': y_visir, 'x': x_visir},
- attrs={'name': 'lon',
- 'standard_name': 'longitude',
- 'modifiers': np.array([])})
- scene = Scene()
- scene.attrs['sensor'] = ['avhrr-1', 'avhrr-2', 'avhrr-3']
- scene['1'] = vis006
- scene['lat'] = lat
- scene['lon'] = lon
-
- return scene
-
- def test_read_prefixed_channels(self):
+ reader.fix_modifier_attr(ds_info)
+ assert ds_info['modifiers'] == ()
+
+ def test_read_prefixed_channels(self, _cf_scene, _nc_filename):
"""Check channels starting with digit is prefixed and read back correctly."""
- scene = self._dataset_for_prefix_testing()
- # Testing with default prefixing
- filename = 'testingcfwriter{:s}-viirs-mband-20201007075915-20201007080744.nc'.format(
- datetime.utcnow().strftime('%Y%j%H%M%S'))
- try:
- scene.save_datasets(writer='cf',
- filename=filename,
- header_attrs={'instrument': 'avhrr'},
+ _cf_scene.save_datasets(writer='cf',
+ filename=_nc_filename,
engine='netcdf4',
flatten_attrs=True,
pretty=True)
- scn_ = Scene(reader='satpy_cf_nc',
- filenames=[filename])
- scn_.load(['1'])
- np.testing.assert_array_equal(scn_['1'].data, scene['1'].data)
- np.testing.assert_array_equal(scn_['1'].coords['lon'], scene['lon'].data) # lon loaded as coord
-
- scn_ = Scene(reader='satpy_cf_nc',
- filenames=[filename], reader_kwargs={})
- scn_.load(['1'])
- np.testing.assert_array_equal(scn_['1'].data, scene['1'].data)
- np.testing.assert_array_equal(scn_['1'].coords['lon'], scene['lon'].data) # lon loaded as coord
-
- # Check that variables starting with a digit is written to filename variable prefixed
- with xr.open_dataset(filename) as ds_disk:
- np.testing.assert_array_equal(ds_disk['CHANNEL_1'].data, scene['1'].data)
- finally:
- with suppress(PermissionError):
- os.remove(filename)
-
- def test_read_prefixed_channels_include_orig_name(self):
+ scn_ = Scene(reader='satpy_cf_nc',
+ filenames=[_nc_filename])
+ scn_.load(['1'])
+ np.testing.assert_array_equal(scn_['1'].data, _cf_scene['1'].data)
+ np.testing.assert_array_equal(scn_['1'].coords['lon'], _cf_scene['lon'].data) # lon loaded as coord
+
+ scn_ = Scene(reader='satpy_cf_nc',
+ filenames=[_nc_filename], reader_kwargs={})
+ scn_.load(['1'])
+ np.testing.assert_array_equal(scn_['1'].data, _cf_scene['1'].data)
+ np.testing.assert_array_equal(scn_['1'].coords['lon'], _cf_scene['lon'].data) # lon loaded as coord
+
+ # Check that variables starting with a digit is written to filename variable prefixed
+ with xr.open_dataset(_nc_filename) as ds_disk:
+ np.testing.assert_array_equal(ds_disk['CHANNEL_1'].data, _cf_scene['1'].data)
+
+ def test_read_prefixed_channels_include_orig_name(self, _cf_scene, _nc_filename):
"""Check channels starting with digit and includeed orig name is prefixed and read back correctly."""
- scene = self._dataset_for_prefix_testing()
- # Testing with default prefixing
- filename = 'testingcfwriter{:s}-viirs-mband-20201007075915-20201007080744.nc'.format(
- datetime.utcnow().strftime('%Y%j%H%M%S'))
- try:
- scene.save_datasets(writer='cf',
- filename=filename,
- header_attrs={'instrument': 'avhrr'},
+ _cf_scene.save_datasets(writer='cf',
+ filename=_nc_filename,
engine='netcdf4',
flatten_attrs=True,
pretty=True,
include_orig_name=True)
- scn_ = Scene(reader='satpy_cf_nc',
- filenames=[filename])
- scn_.load(['1'])
- np.testing.assert_array_equal(scn_['1'].data, scene['1'].data)
- np.testing.assert_array_equal(scn_['1'].coords['lon'], scene['lon'].data) # lon loaded as coord
-
- self.assertEqual(scn_['1'].attrs['original_name'], '1')
-
- # Check that variables starting with a digit is written to filename variable prefixed
- with xr.open_dataset(filename) as ds_disk:
- np.testing.assert_array_equal(ds_disk['CHANNEL_1'].data, scene['1'].data)
- finally:
- with suppress(PermissionError):
- os.remove(filename)
-
- def test_read_prefixed_channels_by_user(self):
+ scn_ = Scene(reader='satpy_cf_nc',
+ filenames=[_nc_filename])
+ scn_.load(['1'])
+ np.testing.assert_array_equal(scn_['1'].data, _cf_scene['1'].data)
+ np.testing.assert_array_equal(scn_['1'].coords['lon'], _cf_scene['lon'].data) # lon loaded as coord
+
+ assert scn_['1'].attrs['original_name'] == '1'
+
+ # Check that variables starting with a digit is written to filename variable prefixed
+ with xr.open_dataset(_nc_filename) as ds_disk:
+ np.testing.assert_array_equal(ds_disk['CHANNEL_1'].data, _cf_scene['1'].data)
+
+ def test_read_prefixed_channels_by_user(self, _cf_scene, _nc_filename):
"""Check channels starting with digit is prefixed by user and read back correctly."""
- scene = self._dataset_for_prefix_testing()
- filename = 'testingcfwriter{:s}-viirs-mband-20201007075915-20201007080744.nc'.format(
- datetime.utcnow().strftime('%Y%j%H%M%S'))
- try:
- scene.save_datasets(writer='cf',
- filename=filename,
- header_attrs={'instrument': 'avhrr'},
+ _cf_scene.save_datasets(writer='cf',
+ filename=_nc_filename,
engine='netcdf4',
flatten_attrs=True,
pretty=True,
numeric_name_prefix='USER')
- scn_ = Scene(reader='satpy_cf_nc',
- filenames=[filename], reader_kwargs={'numeric_name_prefix': 'USER'})
- scn_.load(['1'])
- np.testing.assert_array_equal(scn_['1'].data, scene['1'].data)
- np.testing.assert_array_equal(scn_['1'].coords['lon'], scene['lon'].data) # lon loded as coord
-
- # Check that variables starting with a digit is written to filename variable prefixed
- with xr.open_dataset(filename) as ds_disk:
- np.testing.assert_array_equal(ds_disk['USER1'].data, scene['1'].data)
- finally:
- with suppress(PermissionError):
- os.remove(filename)
-
- def test_read_prefixed_channels_by_user2(self):
+ scn_ = Scene(reader='satpy_cf_nc',
+ filenames=[_nc_filename], reader_kwargs={'numeric_name_prefix': 'USER'})
+ scn_.load(['1'])
+ np.testing.assert_array_equal(scn_['1'].data, _cf_scene['1'].data)
+ np.testing.assert_array_equal(scn_['1'].coords['lon'], _cf_scene['lon'].data) # lon loded as coord
+
+ # Check that variables starting with a digit is written to filename variable prefixed
+ with xr.open_dataset(_nc_filename) as ds_disk:
+ np.testing.assert_array_equal(ds_disk['USER1'].data, _cf_scene['1'].data)
+
+ def test_read_prefixed_channels_by_user2(self, _cf_scene, _nc_filename):
"""Check channels starting with digit is prefixed by user when saving and read back correctly without prefix."""
- scene = self._dataset_for_prefix_testing()
- filename = 'testingcfwriter{:s}-viirs-mband-20201007075915-20201007080744.nc'.format(
- datetime.utcnow().strftime('%Y%j%H%M%S'))
- try:
- scene.save_datasets(writer='cf',
- filename=filename,
- header_attrs={'instrument': 'avhrr'},
+ _cf_scene.save_datasets(writer='cf',
+ filename=_nc_filename,
engine='netcdf4',
flatten_attrs=True,
pretty=True,
include_orig_name=False,
numeric_name_prefix='USER')
- scn_ = Scene(reader='satpy_cf_nc',
- filenames=[filename])
- scn_.load(['USER1'])
- np.testing.assert_array_equal(scn_['USER1'].data, scene['1'].data)
- np.testing.assert_array_equal(scn_['USER1'].coords['lon'], scene['lon'].data) # lon loded as coord
+ scn_ = Scene(reader='satpy_cf_nc',
+ filenames=[_nc_filename])
+ scn_.load(['USER1'])
+ np.testing.assert_array_equal(scn_['USER1'].data, _cf_scene['1'].data)
+ np.testing.assert_array_equal(scn_['USER1'].coords['lon'], _cf_scene['lon'].data) # lon loded as coord
- finally:
- with suppress(PermissionError):
- os.remove(filename)
-
- def test_read_prefixed_channels_by_user_include_prefix(self):
+ def test_read_prefixed_channels_by_user_include_prefix(self, _cf_scene, _nc_filename):
"""Check channels starting with digit is prefixed by user and include original name when saving."""
- scene = self._dataset_for_prefix_testing()
- filename = 'testingcfwriter2{:s}-viirs-mband-20201007075915-20201007080744.nc'.format(
- datetime.utcnow().strftime('%Y%j%H%M%S'))
- try:
- scene.save_datasets(writer='cf',
- filename=filename,
- header_attrs={'instrument': 'avhrr'},
+ _cf_scene.save_datasets(writer='cf',
+ filename=_nc_filename,
engine='netcdf4',
flatten_attrs=True,
pretty=True,
include_orig_name=True,
numeric_name_prefix='USER')
- scn_ = Scene(reader='satpy_cf_nc',
- filenames=[filename])
- scn_.load(['1'])
- np.testing.assert_array_equal(scn_['1'].data, scene['1'].data)
- np.testing.assert_array_equal(scn_['1'].coords['lon'], scene['lon'].data) # lon loded as coord
-
- finally:
- with suppress(PermissionError):
- os.remove(filename)
+ scn_ = Scene(reader='satpy_cf_nc',
+ filenames=[_nc_filename])
+ scn_.load(['1'])
+ np.testing.assert_array_equal(scn_['1'].data, _cf_scene['1'].data)
+ np.testing.assert_array_equal(scn_['1'].coords['lon'], _cf_scene['lon'].data) # lon loded as coord
- def test_read_prefixed_channels_by_user_no_prefix(self):
+ def test_read_prefixed_channels_by_user_no_prefix(self, _cf_scene, _nc_filename):
"""Check channels starting with digit is not prefixed by user."""
- scene = self._dataset_for_prefix_testing()
- filename = 'testingcfwriter3{:s}-viirs-mband-20201007075915-20201007080744.nc'.format(
- datetime.utcnow().strftime('%Y%j%H%M%S'))
- try:
- scene.save_datasets(writer='cf',
- filename=filename,
- header_attrs={'instrument': 'avhrr'},
+ _cf_scene.save_datasets(writer='cf',
+ filename=_nc_filename,
engine='netcdf4',
flatten_attrs=True,
pretty=True,
numeric_name_prefix='')
- scn_ = Scene(reader='satpy_cf_nc',
- filenames=[filename])
- scn_.load(['1'])
- np.testing.assert_array_equal(scn_['1'].data, scene['1'].data)
- np.testing.assert_array_equal(scn_['1'].coords['lon'], scene['lon'].data) # lon loded as coord
-
- finally:
- with suppress(PermissionError):
- os.remove(filename)
+ scn_ = Scene(reader='satpy_cf_nc',
+ filenames=[_nc_filename])
+ scn_.load(['1'])
+ np.testing.assert_array_equal(scn_['1'].data, _cf_scene['1'].data)
+ np.testing.assert_array_equal(scn_['1'].coords['lon'], _cf_scene['lon'].data) # lon loded as coord
- def test_orbital_parameters(self):
+ def test_orbital_parameters(self, _cf_scene, _nc_filename):
"""Test that the orbital parameters in attributes are handled correctly."""
- filename = 'testingcfwriter4{:s}-viirs-mband-20201007075915-20201007080744.nc'.format(
- datetime.utcnow().strftime('%Y%j%H%M%S'))
- try:
- self.scene.save_datasets(writer='cf',
- filename=filename,
- header_attrs={'instrument': 'avhrr'})
- scn_ = Scene(reader='satpy_cf_nc',
- filenames=[filename])
- scn_.load(['image0'])
- orig_attrs = self.scene['image0'].attrs['orbital_parameters']
- new_attrs = scn_['image0'].attrs['orbital_parameters']
- assert isinstance(new_attrs, dict)
- for key in orig_attrs:
- assert orig_attrs[key] == new_attrs[key]
- finally:
- with suppress(PermissionError):
- os.remove(filename)
+ _cf_scene.save_datasets(writer='cf',
+ filename=_nc_filename)
+ scn_ = Scene(reader='satpy_cf_nc',
+ filenames=[_nc_filename])
+ scn_.load(['image0'])
+ orig_attrs = _cf_scene['image0'].attrs['orbital_parameters']
+ new_attrs = scn_['image0'].attrs['orbital_parameters']
+ assert isinstance(new_attrs, dict)
+ for key in orig_attrs:
+ assert orig_attrs[key] == new_attrs[key]
diff --git a/satpy/tests/reader_tests/test_scmi.py b/satpy/tests/reader_tests/test_scmi.py
index 684e15248b..6d42720c8d 100644
--- a/satpy/tests/reader_tests/test_scmi.py
+++ b/satpy/tests/reader_tests/test_scmi.py
@@ -19,6 +19,7 @@
import unittest
from unittest import mock
+
import numpy as np
import xarray as xr
@@ -102,6 +103,7 @@ def setUp(self, xr_):
def test_basic_attributes(self):
"""Test getting basic file attributes."""
from datetime import datetime
+
from satpy.tests.utils import make_dataid
self.assertEqual(self.reader.start_time,
datetime(2017, 7, 29, 12, 0, 0, 0))
@@ -121,6 +123,11 @@ def test_data_load(self):
self.assertNotIn('_FillValue', res.attrs)
self.assertEqual(res.attrs['standard_name'],
'toa_bidirectional_reflectance')
+ assert 'orbital_parameters' in res.attrs
+ orb_params = res.attrs['orbital_parameters']
+ assert orb_params['projection_longitude'] == -90.0
+ assert orb_params['projection_latitude'] == 0.0
+ assert orb_params['projection_altitude'] == 35785831.0
class TestSCMIFileHandlerArea(unittest.TestCase):
diff --git a/satpy/tests/reader_tests/test_seadas_l2.py b/satpy/tests/reader_tests/test_seadas_l2.py
new file mode 100644
index 0000000000..2cc10a4344
--- /dev/null
+++ b/satpy/tests/reader_tests/test_seadas_l2.py
@@ -0,0 +1,248 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Tests for the 'seadas_l2' reader."""
+
+import numpy as np
+import pytest
+from pyresample.geometry import SwathDefinition
+from pytest_lazyfixture import lazy_fixture
+
+from satpy import Scene, available_readers
+
+
+@pytest.fixture(scope="module")
+def seadas_l2_modis_chlor_a(tmp_path_factory):
+ """Create MODIS SEADAS file."""
+ filename = "a1.21322.1758.seadas.hdf"
+ full_path = str(tmp_path_factory.mktemp("seadas_l2") / filename)
+ return _create_seadas_chlor_a_hdf4_file(full_path, "Aqua", "MODISA")
+
+
+@pytest.fixture(scope="module")
+def seadas_l2_viirs_npp_chlor_a(tmp_path_factory):
+ """Create VIIRS NPP SEADAS file."""
+ filename = "SEADAS_npp_d20211118_t1728125_e1739327.hdf"
+ full_path = str(tmp_path_factory.mktemp("seadas") / filename)
+ return _create_seadas_chlor_a_hdf4_file(full_path, "NPP", "VIIRSN")
+
+
+@pytest.fixture(scope="module")
+def seadas_l2_viirs_j01_chlor_a(tmp_path_factory):
+ """Create VIIRS JPSS-01 SEADAS file."""
+ filename = "SEADAS_j01_d20211118_t1728125_e1739327.hdf"
+ full_path = str(tmp_path_factory.mktemp("seadas") / filename)
+ return _create_seadas_chlor_a_hdf4_file(full_path, "JPSS-1", "VIIRSJ1")
+
+
+def _create_seadas_chlor_a_hdf4_file(full_path, mission, sensor):
+ from pyhdf.SD import SD, SDC
+ h = SD(full_path, SDC.WRITE | SDC.CREATE)
+ setattr(h, "Sensor Name", sensor)
+ h.Mission = mission
+ setattr(h, "Start Time", "2021322175853191")
+ setattr(h, "End Time", "2021322180551214")
+
+ lon_info = {
+ "type": SDC.FLOAT32,
+ "data": np.zeros((5, 5), dtype=np.float32),
+ "dim_labels": ["Number of Scan Lines", "Number of Pixel Control Points"],
+ "attrs": {
+ "long_name": "Longitude\x00",
+ "standard_name": "longitude\x00",
+ "units": "degrees_east\x00",
+ "valid_range": (-180.0, 180.0),
+ }
+ }
+ lat_info = {
+ "type": SDC.FLOAT32,
+ "data": np.zeros((5, 5), np.float32),
+ "dim_labels": ["Number of Scan Lines", "Number of Pixel Control Points"],
+ "attrs": {
+ "long_name": "Latitude\x00",
+ "standard_name": "latitude\x00",
+ "units": "degrees_north\x00",
+ "valid_range": (-90.0, 90.0),
+ }
+ }
+ _add_variable_to_hdf4_file(h, "longitude", lon_info)
+ _add_variable_to_hdf4_file(h, "latitude", lat_info)
+
+ chlor_a_info = {
+ "type": SDC.FLOAT32,
+ "data": np.ones((5, 5), np.float32),
+ "dim_labels": ["Number of Scan Lines", "Number of Pixel Control Points"],
+ "attrs": {
+ "long_name": "Chlorophyll Concentration, OCI Algorithm\x00",
+ "units": "mg m^-3\x00",
+ "standard_name": "mass_concentration_of_chlorophyll_in_sea_water\x00",
+ "valid_range": (0.001, 100.0),
+ }
+ }
+ _add_variable_to_hdf4_file(h, "chlor_a", chlor_a_info)
+
+ l2_flags = np.zeros((5, 5), dtype=np.int32)
+ l2_flags[2, 2] = -1
+ l2_flags_info = {
+ "type": SDC.INT32,
+ "data": l2_flags,
+ "dim_labels": ["Number of Scan Lines", "Number of Pixel Control Points"],
+ "attrs": {},
+ }
+ _add_variable_to_hdf4_file(h, "l2_flags", l2_flags_info)
+ return [full_path]
+
+
+def _add_variable_to_hdf4_file(h, var_name, var_info):
+ v = h.create(var_name, var_info['type'], var_info['data'].shape)
+ v[:] = var_info['data']
+ for dim_count, dimension_name in enumerate(var_info['dim_labels']):
+ v.dim(dim_count).setname(dimension_name)
+ if var_info.get('fill_value'):
+ v.setfillvalue(var_info['fill_value'])
+ for attr_key, attr_val in var_info['attrs'].items():
+ setattr(v, attr_key, attr_val)
+
+
+@pytest.fixture(scope="module")
+def seadas_l2_modis_chlor_a_netcdf(tmp_path_factory):
+ """Create MODIS SEADAS NetCDF file."""
+ filename = "t1.21332.1758.seadas.nc"
+ full_path = str(tmp_path_factory.mktemp("seadas_l2") / filename)
+ return _create_seadas_chlor_a_netcdf_file(full_path, "Terra", "MODIS")
+
+
+def _create_seadas_chlor_a_netcdf_file(full_path, mission, sensor):
+ from netCDF4 import Dataset
+ nc = Dataset(full_path, "w")
+ nc.createDimension("number_of_lines", 5)
+ nc.createDimension("pixels_per_line", 5)
+ nc.instrument = sensor
+ nc.platform = mission
+ nc.time_coverage_start = "2021-11-18T17:58:53.191Z"
+ nc.time_coverage_end = "2021-11-18T18:05:51.214Z"
+
+ lon_info = {
+ "data": np.zeros((5, 5), dtype=np.float32),
+ "dim_labels": ("number_of_lines", "pixels_per_line"),
+ "attrs": {
+ "long_name": "Longitude",
+ "standard_name": "longitude",
+ "units": "degrees_east",
+ "valid_min": -180.0,
+ "valid_max": 180.0,
+ }
+ }
+ lat_info = {
+ "data": np.zeros((5, 5), np.float32),
+ "dim_labels": ("number_of_lines", "pixels_per_line"),
+ "attrs": {
+ "long_name": "Latitude",
+ "standard_name": "latitude",
+ "units": "degrees_north",
+ "valid_min": -90.0,
+ "valid_max": 90.0,
+ }
+ }
+ nav_group = nc.createGroup("navigation_data")
+ _add_variable_to_netcdf_file(nav_group, "longitude", lon_info)
+ _add_variable_to_netcdf_file(nav_group, "latitude", lat_info)
+
+ chlor_a_info = {
+ "data": np.ones((5, 5), np.float32),
+ "dim_labels": ("number_of_lines", "pixels_per_line"),
+ "attrs": {
+ "long_name": "Chlorophyll Concentration, OCI Algorithm",
+ "units": "mg m^-3",
+ "standard_name": "mass_concentration_of_chlorophyll_in_sea_water",
+ "valid_min": 0.001,
+ "valid_max": 100.0,
+ }
+ }
+ l2_flags = np.zeros((5, 5), dtype=np.int32)
+ l2_flags[2, 2] = -1
+ l2_flags_info = {
+ "data": l2_flags,
+ "dim_labels": ("number_of_lines", "pixels_per_line"),
+ "attrs": {
+ "valid_min": -2147483648,
+ "valid_max": 2147483647,
+ },
+ }
+ geophys_group = nc.createGroup("geophysical_data")
+ _add_variable_to_netcdf_file(geophys_group, "chlor_a", chlor_a_info)
+ _add_variable_to_netcdf_file(geophys_group, "l2_flags", l2_flags_info)
+ return [full_path]
+
+
+def _add_variable_to_netcdf_file(nc, var_name, var_info):
+ v = nc.createVariable(var_name, var_info["data"].dtype.str[1:], dimensions=var_info["dim_labels"],
+ fill_value=var_info.get("fill_value"))
+ v[:] = var_info['data']
+ for attr_key, attr_val in var_info['attrs'].items():
+ setattr(v, attr_key, attr_val)
+
+
+class TestSEADAS:
+ """Test the SEADAS L2 file reader."""
+
+ def test_available_reader(self):
+ """Test that SEADAS L2 reader is available."""
+ assert 'seadas_l2' in available_readers()
+
+ @pytest.mark.parametrize(
+ "input_files",
+ [
+ lazy_fixture("seadas_l2_modis_chlor_a"),
+ lazy_fixture("seadas_l2_viirs_npp_chlor_a"),
+ lazy_fixture("seadas_l2_viirs_j01_chlor_a"),
+ ])
+ def test_scene_available_datasets(self, input_files):
+ """Test that datasets are available."""
+ scene = Scene(reader='seadas_l2', filenames=input_files)
+ available_datasets = scene.all_dataset_names()
+ assert len(available_datasets) > 0
+ assert 'chlor_a' in available_datasets
+
+ @pytest.mark.parametrize(
+ ("input_files", "exp_plat", "exp_sensor", "exp_rps"),
+ [
+ (lazy_fixture("seadas_l2_modis_chlor_a"), "Aqua", {"modis"}, 10),
+ (lazy_fixture("seadas_l2_viirs_npp_chlor_a"), "Suomi-NPP", {"viirs"}, 16),
+ (lazy_fixture("seadas_l2_viirs_j01_chlor_a"), "NOAA-20", {"viirs"}, 16),
+ (lazy_fixture("seadas_l2_modis_chlor_a_netcdf"), "Terra", {"modis"}, 10),
+ ])
+ @pytest.mark.parametrize("apply_quality_flags", [False, True])
+ def test_load_chlor_a(self, input_files, exp_plat, exp_sensor, exp_rps, apply_quality_flags):
+ """Test that we can load 'chlor_a'."""
+ reader_kwargs = {"apply_quality_flags": apply_quality_flags}
+ scene = Scene(reader='seadas_l2', filenames=input_files, reader_kwargs=reader_kwargs)
+ scene.load(['chlor_a'])
+ data_arr = scene['chlor_a']
+ assert data_arr.dims == ("y", "x")
+ assert data_arr.attrs['platform_name'] == exp_plat
+ assert data_arr.attrs['sensor'] == exp_sensor
+ assert data_arr.attrs['units'] == 'mg m^-3'
+ assert data_arr.dtype.type == np.float32
+ assert isinstance(data_arr.attrs["area"], SwathDefinition)
+ assert data_arr.attrs["rows_per_scan"] == exp_rps
+ data = data_arr.data.compute()
+ if apply_quality_flags:
+ assert np.isnan(data[2, 2])
+ assert np.count_nonzero(np.isnan(data)) == 1
+ else:
+ assert np.count_nonzero(np.isnan(data)) == 0
diff --git a/satpy/tests/reader_tests/test_seviri_base.py b/satpy/tests/reader_tests/test_seviri_base.py
index 3cf482c6ba..92b4c5e287 100644
--- a/satpy/tests/reader_tests/test_seviri_base.py
+++ b/satpy/tests/reader_tests/test_seviri_base.py
@@ -17,20 +17,27 @@
# satpy. If not, see .
"""Test the MSG common (native and hrit format) functionionalities."""
-from datetime import datetime
-import pytest
import unittest
+from datetime import datetime
+import dask.array as da
import numpy as np
+import pytest
import xarray as xr
-import dask.array as da
+from satpy import CHUNK_SIZE
from satpy.readers.seviri_base import (
- dec10216, chebyshev, get_cds_time, get_padding_area, pad_data_horizontally,
- pad_data_vertically, get_satpos, OrbitPolynomial, NoValidOrbitParams,
- OrbitPolynomialFinder
+ NoValidOrbitParams,
+ OrbitPolynomial,
+ OrbitPolynomialFinder,
+ chebyshev,
+ dec10216,
+ get_cds_time,
+ get_padding_area,
+ get_satpos,
+ pad_data_horizontally,
+ pad_data_vertically,
)
-from satpy import CHUNK_SIZE
def chebyshev4(c, x, domain):
diff --git a/satpy/tests/reader_tests/test_seviri_l1b_calibration.py b/satpy/tests/reader_tests/test_seviri_l1b_calibration.py
index 1f9a01f6d0..fdcedea3f2 100644
--- a/satpy/tests/reader_tests/test_seviri_l1b_calibration.py
+++ b/satpy/tests/reader_tests/test_seviri_l1b_calibration.py
@@ -17,17 +17,14 @@
# satpy. If not, see .
"""Unittesting the native msg reader."""
-from datetime import datetime
import unittest
+from datetime import datetime
import numpy as np
import pytest
import xarray as xr
-from satpy.readers.seviri_base import (
- SEVIRICalibrationHandler, SEVIRICalibrationAlgorithm
-)
-
+from satpy.readers.seviri_base import SEVIRICalibrationAlgorithm, SEVIRICalibrationHandler
COUNTS_INPUT = xr.DataArray(
np.array([[377., 377., 377., 376., 375.],
@@ -97,11 +94,11 @@
)
VIS008_REFLECTANCE = xr.DataArray(
- np.array([[2.8066392, 2.6790648, 2.6790648, 2.6790648, 2.6790648],
- [2.6790648, 2.8066392, 2.8066392, 2.6790648, 2.8066392],
- [3.444512, 3.572086, 3.572086, 3.444512, 3.572086],
- [14.926213, 15.053792, 15.053792, 14.671064, 15.691662],
- [33.934814, 35.33813, 37.251755, 38.655075, 40.56869]],
+ np.array([[2.739768, 2.615233, 2.615233, 2.615233, 2.615233],
+ [2.615233, 2.739768, 2.739768, 2.615233, 2.739768],
+ [3.362442, 3.486977, 3.486977, 3.362442, 3.486977],
+ [14.570578, 14.695117, 14.695117, 14.321507, 15.317789],
+ [33.126278, 34.49616, 36.364185, 37.73407, 39.60209]],
dtype=np.float32)
)
@@ -250,13 +247,13 @@ class TestFileHandlerCalibrationBase:
},
'reflectance': {
'NOMINAL': xr.DataArray(
- [[np.nan, 40.47923],
- [445.27155, 1142.414]],
+ [[np.nan, 41.88985],
+ [460.7884, 1182.2247]],
dims=('y', 'x')
),
'EXTERNAL': xr.DataArray(
- [[np.nan, 404.7923],
- [4452.7153, 11424.14]],
+ [[np.nan, 418.89853],
+ [4607.8843, 11822.249]],
dims=('y', 'x')
)
}
@@ -331,13 +328,13 @@ class TestFileHandlerCalibrationBase:
},
'reflectance': {
'NOMINAL': xr.DataArray(
- [[np.nan, 401.28372],
- [4414.121, 11325.118]],
+ [[np.nan, 415.26767],
+ [4567.944, 11719.775]],
dims=('y', 'x')
),
'EXTERNAL': xr.DataArray(
- [[np.nan, 167.20154],
- [1839.217, 4718.799]],
+ [[np.nan, 173.02817],
+ [1903.31, 4883.2397]],
dims=('y', 'x')
)
}
diff --git a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py
index 9bf204ec1f..1a2258b6f5 100644
--- a/satpy/tests/reader_tests/test_seviri_l1b_hrit.py
+++ b/satpy/tests/reader_tests/test_seviri_l1b_hrit.py
@@ -17,25 +17,20 @@
# satpy. If not, see .
"""The HRIT msg reader tests package."""
-import copy
import unittest
-from unittest import mock
from datetime import datetime
+from unittest import mock
import numpy as np
-from numpy import testing as npt
import pytest
import xarray as xr
+from numpy import testing as npt
-from satpy.readers.seviri_l1b_hrit import (
- HRITMSGFileHandler, HRITMSGPrologueFileHandler, HRITMSGEpilogueFileHandler,
-)
-from satpy.tests.utils import make_dataid, assert_attrs_equal
-from satpy.tests.reader_tests.test_seviri_base import ORBIT_POLYNOMIALS_INVALID
-from satpy.tests.reader_tests.test_seviri_l1b_calibration import (
- TestFileHandlerCalibrationBase
-)
import satpy.tests.reader_tests.test_seviri_l1b_hrit_setup as setup
+from satpy.readers.seviri_l1b_hrit import HRITMSGEpilogueFileHandler, HRITMSGFileHandler, HRITMSGPrologueFileHandler
+from satpy.tests.reader_tests.test_seviri_base import ORBIT_POLYNOMIALS_INVALID
+from satpy.tests.reader_tests.test_seviri_l1b_calibration import TestFileHandlerCalibrationBase
+from satpy.tests.utils import assert_attrs_equal, make_dataid
class TestHRITMSGBase(unittest.TestCase):
@@ -43,11 +38,6 @@ class TestHRITMSGBase(unittest.TestCase):
def assert_attrs_equal(self, attrs, attrs_exp):
"""Assert equality of dataset attributes."""
- attrs = copy.deepcopy(attrs)
- attrs_exp = copy.deepcopy(attrs_exp)
- # Raw metadata: Check existence only
- self.assertIn('raw_metadata', attrs)
- attrs.pop('raw_metadata')
assert_attrs_equal(attrs, attrs_exp, tolerance=1e-4)
@@ -165,6 +155,12 @@ def setUp(self):
projection_longitude=self.projection_longitude
)
+ def _get_fake_data(self):
+ return xr.DataArray(
+ data=np.zeros((self.nlines, self.ncols)),
+ dims=('y', 'x')
+ )
+
def test_get_area_def(self):
"""Test getting the area def."""
from pyresample.utils import proj4_radius_parameters
@@ -204,10 +200,7 @@ def test_read_band(self, memmap):
@mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate')
def test_get_dataset(self, calibrate, parent_get_dataset):
"""Test getting the dataset."""
- data = xr.DataArray(
- data=np.zeros((self.nlines, self.ncols)),
- dims=('y', 'x')
- )
+ data = self._get_fake_data()
parent_get_dataset.return_value = mock.MagicMock()
calibrate.return_value = data
@@ -227,6 +220,17 @@ def test_get_dataset(self, calibrate, parent_get_dataset):
setup.get_attrs_exp(self.projection_longitude)
)
+ @mock.patch('satpy.readers.seviri_l1b_hrit.HRITFileHandler.get_dataset')
+ @mock.patch('satpy.readers.seviri_l1b_hrit.HRITMSGFileHandler.calibrate')
+ def test_get_dataset_with_raw_metadata(self, calibrate, parent_get_dataset):
+ """Test getting the dataset."""
+ calibrate.return_value = self._get_fake_data()
+ key = make_dataid(name='VIS006', calibration='reflectance')
+ info = setup.get_fake_dataset_info()
+ self.reader.include_raw_metadata = True
+ res = self.reader.get_dataset(key, info)
+ assert 'raw_metadata' in res.attrs
+
def test_get_raw_mda(self):
"""Test provision of raw metadata."""
self.reader.mda = {'segment': 1, 'loff': 123}
@@ -275,7 +279,7 @@ def init_patched(self, *args, **kwargs):
init.side_effect = init_patched
- HRITMSGPrologueFileHandler(filename=None,
+ HRITMSGPrologueFileHandler(filename='dummy_prologue_filename',
filename_info={'service': ''},
filetype_info=None,
ext_calib_coefs={},
@@ -308,7 +312,7 @@ def init_patched(self, *args, **kwargs):
init.side_effect = init_patched
- self.reader = HRITMSGEpilogueFileHandler(filename=None,
+ self.reader = HRITMSGEpilogueFileHandler(filename='dummy_epilogue_filename',
filename_info={'service': ''},
filetype_info=None,
calib_mode='nominal')
@@ -323,7 +327,7 @@ def init_patched(self, *args, **kwargs):
init.side_effect = init_patched
- HRITMSGEpilogueFileHandler(filename=None,
+ HRITMSGEpilogueFileHandler(filename='dummy_epilogue_filename',
filename_info={'service': ''},
filetype_info=None,
ext_calib_coefs={},
diff --git a/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py b/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py
index 4853b466e8..b89a32bd4f 100644
--- a/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py
+++ b/satpy/tests/reader_tests/test_seviri_l1b_hrit_setup.py
@@ -22,9 +22,7 @@
import numpy as np
-from satpy.readers.seviri_l1b_hrit import (
- HRITMSGFileHandler, HRITMSGPrologueFileHandler
-)
+from satpy.readers.seviri_l1b_hrit import HRITMSGFileHandler, HRITMSGPrologueFileHandler
from satpy.tests.reader_tests.test_seviri_base import ORBIT_POLYNOMIALS
@@ -59,6 +57,7 @@ def get_fake_file_handler(start_time, nlines, ncols, projection_longitude=0,
m = mock.mock_open()
with mock.patch('satpy.readers.seviri_l1b_hrit.np.fromfile') as fromfile, \
mock.patch('satpy.readers.hrit_base.open', m, create=True) as newopen, \
+ mock.patch('satpy.readers.utils.open', m, create=True) as utilopen, \
mock.patch('satpy.readers.seviri_l1b_hrit.CHANNEL_NAMES'), \
mock.patch.object(HRITMSGFileHandler, '_get_hd', new=new_get_hd), \
mock.patch.object(HRITMSGPrologueFileHandler, 'read_prologue',
@@ -70,8 +69,12 @@ def get_fake_file_handler(start_time, nlines, ncols, projection_longitude=0,
('hdr_id', int)]
)
newopen.return_value.__enter__.return_value.tell.return_value = 1
+ # The size of the return value hereafter was chosen arbitrarily with the expectation
+ # that it would return sufficiently many bytes for testing the fake-opening of HRIT
+ # files.
+ utilopen.return_value.__enter__.return_value.read.return_value = bytes([0]*8192)
prologue = HRITMSGPrologueFileHandler(
- filename=None,
+ filename='dummy_prologue_filename',
filename_info=filename_info,
filetype_info={}
)
@@ -118,7 +121,8 @@ def get_fake_prologue(projection_longitude, orbit_polynomials):
},
'ImageAcquisition': {
'PlannedAcquisitionTime': {
- 'TrueRepeatCycleStart': datetime(2006, 1, 1, 12, 15, 9, 304888)
+ 'TrueRepeatCycleStart': datetime(2006, 1, 1, 12, 15, 9, 304888),
+ 'PlannedRepeatCycleEnd': datetime(2006, 1, 1, 12, 30, 0, 0)
}
}
}
@@ -209,9 +213,6 @@ def get_attrs_exp(projection_longitude=0.0):
'standard_name': 'standard_name',
'platform_name': 'Meteosat-11',
'sensor': 'seviri',
- 'satellite_longitude': projection_longitude,
- 'satellite_latitude': 0.0,
- 'satellite_altitude': 35785831.0,
'orbital_parameters': {'projection_longitude': projection_longitude,
'projection_latitude': 0.,
'projection_altitude': 35785831.0,
@@ -220,5 +221,7 @@ def get_attrs_exp(projection_longitude=0.0):
'satellite_actual_longitude': -3.55117540817073,
'satellite_actual_latitude': -0.5711243456528018,
'satellite_actual_altitude': 35783296.150123544},
- 'georef_offset_corrected': True
+ 'georef_offset_corrected': True,
+ 'nominal_start_time': datetime(2006, 1, 1, 12, 15, 9, 304888),
+ 'nominal_end_time': datetime(2006, 1, 1, 12, 30, 0, 0)
}
diff --git a/satpy/tests/reader_tests/test_seviri_l1b_icare.py b/satpy/tests/reader_tests/test_seviri_l1b_icare.py
index 839954f01f..5ca8ac1a2e 100644
--- a/satpy/tests/reader_tests/test_seviri_l1b_icare.py
+++ b/satpy/tests/reader_tests/test_seviri_l1b_icare.py
@@ -19,10 +19,12 @@
import os
import unittest
from unittest import mock
+
+import dask.array as da
import numpy as np
-from satpy.tests.reader_tests.test_hdf4_utils import FakeHDF4FileHandler
-from satpy.readers import load_reader
+from satpy.readers import load_reader
+from satpy.tests.reader_tests.test_hdf4_utils import FakeHDF4FileHandler
DEFAULT_FILE_DTYPE = np.uint16
DEFAULT_FILE_SHAPE = (10, 300)
@@ -63,7 +65,7 @@ def get_test_content(self, filename, filename_info, filename_type):
file_content['Brightness_Temperature/attr/add_offset'] = 0.
file_content['Brightness_Temperature/shape'] = DEFAULT_FILE_SHAPE
- # convert tp xarrays
+ # convert to xarrays
from xarray import DataArray
for key, val in file_content.items():
if isinstance(val, np.ndarray):
@@ -71,10 +73,7 @@ def get_test_content(self, filename, filename_info, filename_type):
for a in ['_FillValue', 'scale_factor', 'add_offset']:
if key + '/attr/' + a in file_content:
attrs[a] = file_content[key + '/attr/' + a]
- file_content[key] = DataArray(val, dims=('fakeDim0', 'fakeDim1'), attrs=attrs)
- if 'y' not in file_content['Normalized_Radiance'].dims:
- file_content['Normalized_Radiance'] = file_content['Normalized_Radiance'].rename({'fakeDim0': 'x',
- 'fakeDim1': 'y'})
+ file_content[key] = DataArray(da.from_array(val), dims=('x', 'y'), attrs=attrs)
return file_content
@@ -183,7 +182,6 @@ def test_sensor_names(self):
'Meteosat-11': 'MSG4/SEVIRI'}
with mock.patch('satpy.tests.reader_tests.test_seviri_l1b_icare.'
'FakeHDF4FileHandler2.get_test_content') as patched_func:
-
def _run_target():
patched_func.return_value = file_data
return self.p.target(mock.MagicMock(),
@@ -205,3 +203,16 @@ def test_bad_bandname(self):
self.p.target(mock.MagicMock(),
mock.MagicMock(),
mock.MagicMock())._get_dsname({'name': 'badband'})
+
+ def test_nocompute(self):
+ """Test that dask does not compute anything in the reader itself."""
+ import dask
+
+ from satpy.tests.utils import CustomScheduler
+ with dask.config.set(scheduler=CustomScheduler(max_computes=0)):
+ r = load_reader(self.reader_configs)
+ loadables = r.select_files_from_pathnames([
+ 'GEO_L1B-MSG1_2004-12-29T12-15-00_G_VIS08_V1-04.hdf'
+ ])
+ r.create_filehandlers(loadables)
+ r.load(['VIS008'])
diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py
index a8dfd3aac9..6bf5d2705d 100644
--- a/satpy/tests/reader_tests/test_seviri_l1b_native.py
+++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py
@@ -17,9 +17,11 @@
# satpy. If not, see .
"""Unittesting the Native SEVIRI reader."""
-from datetime import datetime
+from __future__ import annotations
+
import os
import unittest
+from datetime import datetime
from unittest import mock
import dask.array as da
@@ -28,17 +30,10 @@
import xarray as xr
from satpy.readers.eum_base import time_cds_short
-from satpy.readers.seviri_l1b_native import (
- NativeMSGFileHandler, ImageBoundaries, Padder,
- get_available_channels,
-)
-from satpy.tests.reader_tests.test_seviri_l1b_calibration import (
- TestFileHandlerCalibrationBase
-)
-from satpy.tests.reader_tests.test_seviri_base import (
- ORBIT_POLYNOMIALS, ORBIT_POLYNOMIALS_INVALID
-)
-from satpy.tests.utils import make_dataid, assert_attrs_equal
+from satpy.readers.seviri_l1b_native import ImageBoundaries, NativeMSGFileHandler, Padder, get_available_channels
+from satpy.tests.reader_tests.test_seviri_base import ORBIT_POLYNOMIALS, ORBIT_POLYNOMIALS_INVALID
+from satpy.tests.reader_tests.test_seviri_l1b_calibration import TestFileHandlerCalibrationBase
+from satpy.tests.utils import assert_attrs_equal, make_dataid
CHANNEL_INDEX_LIST = ['VIS006', 'VIS008', 'IR_016', 'IR_039',
'WV_062', 'WV_073', 'IR_087', 'IR_097',
@@ -50,13 +45,13 @@
SEC15HDR = '15_SECONDARY_PRODUCT_HEADER'
IDS = 'SelectedBandIDs'
-TEST1_HEADER_CHNLIST = {SEC15HDR: {IDS: {}}}
+TEST1_HEADER_CHNLIST: dict[str, dict[str, dict]] = {SEC15HDR: {IDS: {}}}
TEST1_HEADER_CHNLIST[SEC15HDR][IDS]['Value'] = 'XX--XX--XX--'
-TEST2_HEADER_CHNLIST = {SEC15HDR: {IDS: {}}}
+TEST2_HEADER_CHNLIST: dict[str, dict[str, dict]] = {SEC15HDR: {IDS: {}}}
TEST2_HEADER_CHNLIST[SEC15HDR][IDS]['Value'] = 'XX-XXXX----X'
-TEST3_HEADER_CHNLIST = {SEC15HDR: {IDS: {}}}
+TEST3_HEADER_CHNLIST: dict[str, dict[str, dict]] = {SEC15HDR: {IDS: {}}}
TEST3_HEADER_CHNLIST[SEC15HDR][IDS]['Value'] = 'XXXXXXXXXXXX'
TEST_AREA_EXTENT_EARTHMODEL1_VISIR_FULLDISK = {
@@ -533,7 +528,7 @@ class TestNativeMSGArea(unittest.TestCase):
"""
@staticmethod
- def create_test_header(earth_model, dataset_id, is_full_disk, is_rapid_scan):
+ def create_test_header(earth_model, dataset_id, is_full_disk, is_rapid_scan, good_qual='OK'):
"""Create mocked NativeMSGFileHandler.
Contains sufficient attributes for NativeMSGFileHandler.get_area_extent to be able to execute.
@@ -577,8 +572,11 @@ def create_test_header(earth_model, dataset_id, is_full_disk, is_rapid_scan):
n_hrv_cols = n_visir_cols * 3
n_hrv_lines = n_visir_lines * 3
ssp_lon = 0
-
header = {
+ '15_MAIN_PRODUCT_HEADER': {
+ 'QQOV': {'Name': 'QQOV',
+ 'Value': good_qual}
+ },
'15_DATA_HEADER': {
'ImageDescription': {
reference_grid: {
@@ -1024,11 +1022,21 @@ def file_handler(self):
}
}
}
+ trailer = {
+ '15TRAILER': {
+ 'ImageProductionStats': {
+ 'ActualScanningSummary': {
+ 'ForwardScanStart': self.scan_time
+ }
+ }
+ }
+ }
header['15_DATA_HEADER'].update(TEST_HEADER_CALIB)
with mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler.__init__',
return_value=None):
fh = NativeMSGFileHandler()
fh.header = header
+ fh.trailer = trailer
fh.platform_id = self.platform_id
return fh
@@ -1090,26 +1098,16 @@ class TestNativeMSGDataset:
@pytest.fixture
def file_handler(self):
"""Create a file handler for testing."""
- header = {
- '15_DATA_HEADER': {
- 'SatelliteStatus': {
- 'SatelliteDefinition': {
- 'NominalLongitude': 0.0
- },
- 'Orbit': {
- 'OrbitPolynomial': ORBIT_POLYNOMIALS
- }
- },
- 'ImageAcquisition': {
- 'PlannedAcquisitionTime': {
- 'TrueRepeatCycleStart': datetime(
- 2006, 1, 1, 12, 15, 9, 304888
- )
+ trailer = {
+ '15TRAILER': {
+ 'ImageProductionStats': {
+ 'ActualScanningSummary': {
+ 'ForwardScanStart': datetime(2006, 1, 1, 12, 15, 9, 304888),
+ 'ForwardScanEnd': datetime(2006, 1, 1, 12, 27, 9, 304888)
}
}
- },
+ }
}
- header['15_DATA_HEADER'].update(TEST_HEADER_CALIB)
mda = {
'channel_list': ['VIS006', 'IR_108'],
'number_of_lines': 4,
@@ -1124,6 +1122,48 @@ def file_handler(self):
'b': 6356583.8
}
}
+ header = self._fake_header()
+ data = self._fake_data()
+ with mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler.__init__',
+ return_value=None):
+ fh = NativeMSGFileHandler()
+ fh.header = header
+ fh.trailer = trailer
+ fh.mda = mda
+ fh.dask_array = da.from_array(data)
+ fh.platform_id = 324
+ fh.fill_disk = False
+ fh.calib_mode = 'NOMINAL'
+ fh.ext_calib_coefs = {}
+ fh.include_raw_metadata = False
+ fh.mda_max_array_size = 100
+ return fh
+
+ @staticmethod
+ def _fake_header():
+ header = {
+ '15_DATA_HEADER': {
+ 'SatelliteStatus': {
+ 'SatelliteDefinition': {
+ 'NominalLongitude': 0.0
+ },
+ 'Orbit': {
+ 'OrbitPolynomial': ORBIT_POLYNOMIALS
+ }
+ },
+ 'ImageAcquisition': {
+ 'PlannedAcquisitionTime': {
+ 'TrueRepeatCycleStart': datetime(2006, 1, 1, 12, 15, 0, 0),
+ 'PlannedRepeatCycleEnd': datetime(2006, 1, 1, 12, 30, 0, 0),
+ }
+ }
+ },
+ }
+ header['15_DATA_HEADER'].update(TEST_HEADER_CALIB)
+ return header
+
+ @staticmethod
+ def _fake_data():
num_visir_cols = 5 # will be divided by 1.25 -> 4 columns
visir_rec = [
('line_data', np.uint8, (num_visir_cols,)),
@@ -1147,18 +1187,7 @@ def file_handler(self):
[(vis006_line4,), (ir108_line4,)]],
dtype=[('visir', visir_rec)]
)
- with mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler.__init__',
- return_value=None):
- fh = NativeMSGFileHandler()
- fh.header = header
- fh.mda = mda
- fh.dask_array = da.from_array(data)
- fh.platform_id = 324
- fh.fill_disk = False
- fh.calib_mode = 'NOMINAL'
- fh.ext_calib_coefs = {}
- fh.mda_max_array_size = 100
- return fh
+ return data
def test_get_dataset(self, file_handler):
"""Test getting the dataset."""
@@ -1173,6 +1202,15 @@ def test_get_dataset(self, file_handler):
'standard_name': 'counts'
}
dataset = file_handler.get_dataset(dataset_id, dataset_info)
+ expected = self._exp_data_array()
+ xr.testing.assert_equal(dataset, expected)
+ assert 'raw_metadata' not in dataset.attrs
+ assert file_handler.start_time == datetime(2006, 1, 1, 12, 15, 0)
+ assert file_handler.end_time == datetime(2006, 1, 1, 12, 30, 0)
+ assert_attrs_equal(dataset.attrs, expected.attrs, tolerance=1e-4)
+
+ @staticmethod
+ def _exp_data_array():
expected = xr.DataArray(
np.array([[4., 32., 193., 5.],
[24., 112., 514., 266.],
@@ -1191,22 +1229,41 @@ def test_get_dataset(self, file_handler):
'projection_latitude': 0.0,
'projection_altitude': 35785831.0
},
+ 'time_parameters': {
+ 'nominal_start_time': datetime(2006, 1, 1, 12, 15, 0),
+ 'nominal_end_time': datetime(2006, 1, 1, 12, 30, 0),
+ 'observation_start_time': datetime(2006, 1, 1, 12, 15, 9, 304888),
+ 'observation_end_time': datetime(2006, 1, 1, 12, 27, 9, 304888),
+ },
'georef_offset_corrected': True,
'platform_name': 'MSG-3',
'sensor': 'seviri',
'units': '1',
'wavelength': (1, 2, 3),
- 'standard_name': 'counts'
+ 'standard_name': 'counts',
}
)
expected['acq_time'] = ('y', [np.datetime64('1958-01-02 00:00:01'),
np.datetime64('1958-01-02 00:00:02'),
np.datetime64('1958-01-02 00:00:03'),
np.datetime64('1958-01-02 00:00:04')])
- xr.testing.assert_equal(dataset, expected)
- assert 'raw_metadata' in dataset.attrs
- dataset.attrs.pop('raw_metadata')
- assert_attrs_equal(dataset.attrs, expected.attrs, tolerance=1e-4)
+ return expected
+
+ def test_get_dataset_with_raw_metadata(self, file_handler):
+ """Test provision of raw metadata."""
+ file_handler.include_raw_metadata = True
+ dataset_id = make_dataid(
+ name='VIS006',
+ resolution=3000,
+ calibration='counts'
+ )
+ dataset_info = {
+ 'units': '1',
+ 'wavelength': (1, 2, 3),
+ 'standard_name': 'counts'
+ }
+ res = file_handler.get_dataset(dataset_id, dataset_info)
+ assert 'raw_metadata' in res.attrs
def test_satpos_no_valid_orbit_polynomial(self, file_handler):
"""Test satellite position if there is no valid orbit polynomial."""
@@ -1314,3 +1371,39 @@ def test_header_type(file_content, exp_header_size):
fh = NativeMSGFileHandler('myfile', {}, None)
assert fh.header_type.itemsize == exp_header_size
assert '15_SECONDARY_PRODUCT_HEADER' in fh.header
+
+
+def test_header_warning():
+ """Test warning is raised for NOK quality flag."""
+ header_good = TestNativeMSGArea.create_test_header(
+ dataset_id=make_dataid(name='VIS006', resolution=3000),
+ earth_model=1,
+ is_full_disk=True,
+ is_rapid_scan=0,
+ good_qual='OK'
+ )
+ header_bad = TestNativeMSGArea.create_test_header(
+ dataset_id=make_dataid(name='VIS006', resolution=3000),
+ earth_model=1,
+ is_full_disk=True,
+ is_rapid_scan=0,
+ good_qual='NOK'
+ )
+
+ with mock.patch('satpy.readers.seviri_l1b_native.np.fromfile') as fromfile, \
+ mock.patch('satpy.readers.seviri_l1b_native.recarray2dict') as recarray2dict, \
+ mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._get_memmap') as _get_memmap, \
+ mock.patch('satpy.readers.seviri_l1b_native.NativeMSGFileHandler._read_trailer'), \
+ mock.patch("builtins.open", mock.mock_open(read_data=b'FormatName : NATIVE')):
+ recarray2dict.side_effect = (lambda x: x)
+ _get_memmap.return_value = np.arange(3)
+
+ exp_warning = "The quality flag for this file indicates not OK. Use this data with caution!"
+
+ fromfile.return_value = header_good
+ with pytest.warns(None):
+ NativeMSGFileHandler('myfile', {}, None)
+
+ fromfile.return_value = header_bad
+ with pytest.warns(UserWarning, match=exp_warning):
+ NativeMSGFileHandler('myfile', {}, None)
diff --git a/satpy/tests/reader_tests/test_seviri_l1b_nc.py b/satpy/tests/reader_tests/test_seviri_l1b_nc.py
index 4f29b46317..615ca49e64 100644
--- a/satpy/tests/reader_tests/test_seviri_l1b_nc.py
+++ b/satpy/tests/reader_tests/test_seviri_l1b_nc.py
@@ -25,11 +25,9 @@
import xarray as xr
from satpy.readers.seviri_l1b_nc import NCSEVIRIFileHandler
-from satpy.tests.reader_tests.test_seviri_l1b_calibration import (
- TestFileHandlerCalibrationBase
-)
from satpy.tests.reader_tests.test_seviri_base import ORBIT_POLYNOMIALS
-from satpy.tests.utils import make_dataid, assert_attrs_equal
+from satpy.tests.reader_tests.test_seviri_l1b_calibration import TestFileHandlerCalibrationBase
+from satpy.tests.utils import assert_attrs_equal, make_dataid
def to_cds_time(time):
@@ -46,8 +44,17 @@ def to_cds_time(time):
class TestNCSEVIRIFileHandler(TestFileHandlerCalibrationBase):
"""Unit tests for SEVIRI netCDF reader."""
- def _get_fake_dataset(self, counts):
- """Create a fake dataset."""
+ def _get_fake_dataset(self, counts, h5netcdf):
+ """Create a fake dataset.
+
+ Args:
+ counts (xr.DataArray):
+ Array with data.
+ h5netcdf (boolean):
+ If True an array attribute will be created which is common
+ for the h5netcdf backend in xarray for scalar values.
+
+ """
acq_time_day = np.repeat([1, 1], 11).reshape(2, 11)
acq_time_msec = np.repeat([1000, 2000], 11).reshape(2, 11)
orbit_poly_start_day, orbit_poly_start_msec = to_cds_time(
@@ -128,11 +135,24 @@ def _get_fake_dataset(self, counts):
'east_most_pixel': 1,
'west_most_pixel': 3712,
'south_most_line': 1,
+ 'vis_ir_grid_origin': 0,
'vis_ir_column_dir_grid_step': 3.0004032,
'vis_ir_line_dir_grid_step': 3.0004032,
'type_of_earth_model': '0x02',
}
)
+
+ if h5netcdf:
+ nattrs = {'equatorial_radius': np.array([6378.169]),
+ 'north_polar_radius': np.array([6356.5838]),
+ 'south_polar_radius': np.array([6356.5838]),
+ 'longitude_of_SSP': np.array([0.0]),
+ 'vis_ir_column_dir_grid_step': np.array([3.0004032]),
+ 'vis_ir_line_dir_grid_step': np.array([3.0004032])
+ }
+
+ ds.attrs.update(nattrs)
+
ds['VIS006'].attrs.update({
'scale_factor': self.gains_nominal[0],
'add_offset': self.offsets_nominal[0]
@@ -154,12 +174,17 @@ def _get_fake_dataset(self, counts):
return ds
+ @pytest.fixture
+ def h5netcdf(self):
+ """Fixture for xr backend choice."""
+ return False
+
@pytest.fixture(name='file_handler')
- def file_handler(self, counts):
+ def file_handler(self, counts, h5netcdf):
"""Create a mocked file handler."""
with mock.patch(
- 'satpy.readers.seviri_l1b_nc.xr.open_dataset',
- return_value=self._get_fake_dataset(counts)
+ 'satpy.readers.seviri_l1b_nc.open_dataset',
+ return_value=self._get_fake_dataset(counts=counts, h5netcdf=h5netcdf)
):
return NCSEVIRIFileHandler(
'filename',
@@ -200,10 +225,6 @@ def test_calibrate(
self, file_handler, channel, calibration, use_ext_coefs
):
"""Test the calibration."""
- file_handler.nc = file_handler.nc.rename({
- 'num_rows_vis_ir': 'y',
- 'num_columns_vis_ir': 'x'
- })
external_coefs = self.external_coefs if use_ext_coefs else {}
expected = self._get_expected(
channel=channel,
@@ -285,3 +306,9 @@ def test_satpos_no_valid_orbit_polynomial(self, file_handler):
res = file_handler.get_dataset(dataset_id, dataset_info)
assert 'satellite_actual_longitude' not in res.attrs[
'orbital_parameters']
+
+ @pytest.mark.parametrize('h5netcdf', [True])
+ def test_h5netcdf_pecularity(self, file_handler, h5netcdf):
+ """Test conversion of attributes when xarray is used with h5netcdf backend."""
+ fh = file_handler
+ assert isinstance(fh.mda['projection_parameters']['a'], float)
diff --git a/satpy/tests/reader_tests/test_seviri_l2_bufr.py b/satpy/tests/reader_tests/test_seviri_l2_bufr.py
index f7906c5912..0e6598ca99 100644
--- a/satpy/tests/reader_tests/test_seviri_l2_bufr.py
+++ b/satpy/tests/reader_tests/test_seviri_l2_bufr.py
@@ -19,16 +19,22 @@
import sys
import unittest
+from datetime import datetime
from unittest import mock
+
+import dask.array as da
import numpy as np
-from datetime import datetime
+import pytest
+from pyresample import geometry
-FILETYPE_INFO = {'file_type': 'seviri_l2_bufr_csr'}
+from satpy.tests.utils import make_dataid
+
+FILETYPE_INFO = {'file_type': 'seviri_l2_bufr_asr'}
FILENAME_INFO = {'start_time': '20191112000000',
- 'spacecraft': 'MSG4'}
+ 'spacecraft': 'MSG1'}
FILENAME_INFO2 = {'start_time': '20191112000000',
- 'spacecraft': 'MSG4',
+ 'spacecraft': 'MSG1',
'server': 'TESTSERVER'}
MPEF_PRODUCT_HEADER = {
'NominalTime': datetime(2019, 11, 6, 18, 0),
@@ -37,78 +43,239 @@
}
DATASET_INFO = {
+ 'name': 'testdata',
'key': '#1#brightnessTemperature',
+ 'coordinates': ('longitude', 'latitude'),
'fill_value': 0
}
+DATASET_INFO_LAT = {
+ 'name': 'latitude',
+ 'key': 'latitude',
+ 'fill_value': -1.e+100
+}
+
+DATASET_INFO_LON = {
+ 'name': 'longitude',
+ 'key': 'longitude',
+ 'fill_value': -1.e+100
+}
+
+
DATASET_ATTRS = {
'platform_name': 'MET08',
'ssp_lon': 41.5,
'seg_size': 16
}
+AREA_DEF = geometry.AreaDefinition(
+ 'msg_seviri_iodc_48km',
+ 'MSG SEVIRI Indian Ocean Data Coverage service area definition with 48 km resolution',
+ "",
+ {'a': 6378169., 'b': 6356583.8, 'lon_0': DATASET_ATTRS['ssp_lon'],
+ 'h': 35785831., 'proj': 'geos', 'units': 'm'},
+ 232,
+ 232,
+ (-5570248.6866, -5567248.2834, 5567248.2834, 5570248.6866)
+)
+
+AREA_DEF_FES = geometry.AreaDefinition(
+ 'msg_seviri_res_48km',
+ 'MSG SEVIRI Full Earth Scanning service area definition with 48 km resolution',
+ "",
+ {'a': 6378169., 'b': 6356583.8, 'lon_0': 0.0,
+ 'h': 35785831., 'proj': 'geos', 'units': 'm'},
+ 232,
+ 232,
+ (-5570248.6866, -5567248.2834, 5567248.2834, 5570248.6866)
+)
+
+AREA_DEF_EXT = geometry.AreaDefinition(
+ 'msg_seviri_iodc_9km_ext',
+ 'MSG SEVIRI Indian Ocean Data Coverage service area definition with 9 km resolution '
+ '(extended outside original 3km grid)',
+ "",
+ {'a': 6378169., 'b': 6356583.8, 'lon_0': DATASET_ATTRS['ssp_lon'],
+ 'h': 35785831., 'proj': 'geos', 'units': 'm'},
+ 1238,
+ 1238,
+ (-5571748.888268564, -5571748.888155806, 5571748.888155806, 5571748.888268564)
+)
-class TestSeviriL2Bufr(unittest.TestCase):
- """Test NativeMSGBufrHandler."""
+TEST_FILES = [
+ 'ASRBUFRProd_20191106130000Z_00_OMPEFS01_MET08_FES_E0000',
+ 'MSG1-SEVI-MSGASRE-0101-0101-20191106130000.000000000Z-20191106131702-1362128.bfr',
+ 'MSG1-SEVI-MSGASRE-0101-0101-20191106101500.000000000Z-20191106103218-1362148'
+]
+
+# Test data
+DATA = np.random.uniform(low=250, high=350, size=(128,))
+LAT = np.random.uniform(low=-80, high=80, size=(128,))
+LON = np.random.uniform(low=-38.5, high=121.5, size=(128,))
+
+
+class SeviriL2BufrData:
+ """Mock SEVIRI L2 BUFR data."""
@unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows")
- def seviri_l2_bufr_test(self, filename):
- """Test the SEVIRI BUFR handler."""
- from satpy.readers.seviri_l2_bufr import SeviriL2BufrFileHandler
+ def __init__(self, filename, with_adef=False, rect_lon='default'):
+ """Initialize by mocking test data for testing the SEVIRI L2 BUFR reader."""
import eccodes as ec
- buf1 = ec.codes_bufr_new_from_samples('BUFR4_local_satellite')
- ec.codes_set(buf1, 'unpack', 1)
- samp1 = np.random.uniform(low=250, high=350, size=(128,))
- # write the bufr test data twice as we want to read in and the concatenate the data in the reader
- # 55 id corresponds to METEOSAT 8
- ec.codes_set(buf1, 'satelliteIdentifier', 55)
- ec.codes_set_array(buf1, '#1#brightnessTemperature', samp1)
- ec.codes_set_array(buf1, '#1#brightnessTemperature', samp1)
-
- m = mock.mock_open()
+
+ from satpy.readers.seviri_l2_bufr import SeviriL2BufrFileHandler
+ self.buf1 = ec.codes_bufr_new_from_samples('BUFR4_local_satellite')
+ ec.codes_set(self.buf1, 'unpack', 1)
+ # write the bufr test data twice as we want to read in and then concatenate the data in the reader
+ # 55 id corresponds to METEOSAT 8`
+ ec.codes_set(self.buf1, 'satelliteIdentifier', 55)
+ ec.codes_set_array(self.buf1, 'latitude', LAT)
+ ec.codes_set_array(self.buf1, 'latitude', LAT)
+ ec.codes_set_array(self.buf1, 'longitude', LON)
+ ec.codes_set_array(self.buf1, 'longitude', LON)
+ ec.codes_set_array(self.buf1, '#1#brightnessTemperature', DATA)
+ ec.codes_set_array(self.buf1, '#1#brightnessTemperature', DATA)
+
+ self.m = mock.mock_open()
# only our offline product contain MPEF product headers so we get the metadata from there
if ('BUFRProd' in filename):
with mock.patch('satpy.readers.seviri_l2_bufr.np.fromfile') as fromfile:
fromfile.return_value = MPEF_PRODUCT_HEADER
with mock.patch('satpy.readers.seviri_l2_bufr.recarray2dict') as recarray2dict:
recarray2dict.side_effect = (lambda x: x)
- fh = SeviriL2BufrFileHandler(filename, FILENAME_INFO2, FILETYPE_INFO)
- fh.mpef_header = MPEF_PRODUCT_HEADER
+ self.fh = SeviriL2BufrFileHandler(filename, FILENAME_INFO2, FILETYPE_INFO,
+ with_area_definition=with_adef, rectification_longitude=rect_lon)
+ self.fh.mpef_header = MPEF_PRODUCT_HEADER
else:
# No Mpef Header so we get the metadata from the BUFR messages
- with mock.patch('satpy.readers.seviri_l2_bufr.open', m, create=True):
+ with mock.patch('satpy.readers.seviri_l2_bufr.open', self.m, create=True):
with mock.patch('eccodes.codes_bufr_new_from_file',
- side_effect=[buf1, None, buf1, None, buf1, None]) as ec1:
+ side_effect=[self.buf1, None, self.buf1, None, self.buf1, None]) as ec1:
ec1.return_value = ec1.side_effect
with mock.patch('eccodes.codes_set') as ec2:
ec2.return_value = 1
with mock.patch('eccodes.codes_release') as ec5:
ec5.return_value = 1
- fh = SeviriL2BufrFileHandler(filename, FILENAME_INFO, FILETYPE_INFO)
+ self.fh = SeviriL2BufrFileHandler(filename, FILENAME_INFO, FILETYPE_INFO,
+ with_area_definition=with_adef,
+ rectification_longitude=rect_lon)
- with mock.patch('satpy.readers.seviri_l2_bufr.open', m, create=True):
+ def get_data(self, dataset_info):
+ """Read data from mock file."""
+ with mock.patch('satpy.readers.seviri_l2_bufr.open', self.m, create=True):
with mock.patch('eccodes.codes_bufr_new_from_file',
- side_effect=[buf1, buf1, None]) as ec1:
+ side_effect=[self.buf1, self.buf1, None]) as ec1:
ec1.return_value = ec1.side_effect
with mock.patch('eccodes.codes_set') as ec2:
ec2.return_value = 1
with mock.patch('eccodes.codes_release') as ec5:
ec5.return_value = 1
- z = fh.get_dataset(None, DATASET_INFO)
- # concatenate the original test arrays as
- # get dataset will have read and concatented the data
- x1 = np.concatenate((samp1, samp1), axis=0)
- np.testing.assert_array_equal(z.values, x1)
- self.assertEqual(z.attrs['platform_name'],
- DATASET_ATTRS['platform_name'])
- self.assertEqual(z.attrs['ssp_lon'],
- DATASET_ATTRS['ssp_lon'])
- self.assertEqual(z.attrs['seg_size'],
- DATASET_ATTRS['seg_size'])
-
- def test_seviri_l2_bufr(self):
- """Call the test function."""
- self.seviri_l2_bufr_test('GIIBUFRProduct_20191106130000Z_00_OMPEFS04_MET11_FES_E0000')
- self.seviri_l2_bufr_test('MSG4-SEVI-MSGGIIN-0101-0101-20191106130000.000000000Z-20191106131702-1362128.bfr')
- self.seviri_l2_bufr_test('MSG4-SEVI-MSGGIIN-0101-0101-20191106101500.000000000Z-20191106103218-1362148')
+ z = self.fh.get_dataset(make_dataid(name=dataset_info['name'], resolution=48000), dataset_info)
+
+ return z
+
+
+@pytest.mark.parametrize("input_file", TEST_FILES)
+class TestSeviriL2BufrReader:
+ """Test SEVIRI L2 BUFR Reader."""
+
+ @staticmethod
+ def test_lonslats(input_file):
+ """Test reading of longitude and latitude data with SEVIRI L2 BUFR reader."""
+ bufr_obj = SeviriL2BufrData(input_file)
+ zlat = bufr_obj.get_data(DATASET_INFO_LAT)
+ zlon = bufr_obj.get_data(DATASET_INFO_LON)
+ np.testing.assert_array_equal(zlat.values, np.concatenate((LAT, LAT), axis=0))
+ np.testing.assert_array_equal(zlon.values, np.concatenate((LON, LON), axis=0))
+
+ @staticmethod
+ def test_attributes_with_swath_definition(input_file):
+ """Test correctness of dataset attributes with data loaded with a SwathDefinition (default behaviour)."""
+ bufr_obj = SeviriL2BufrData(input_file)
+ z = bufr_obj.get_data(DATASET_INFO)
+ assert z.attrs['platform_name'] == DATASET_ATTRS['platform_name']
+ assert z.attrs['ssp_lon'] == DATASET_ATTRS['ssp_lon']
+ assert z.attrs['seg_size'] == DATASET_ATTRS['seg_size']
+
+ @staticmethod
+ def test_attributes_with_area_definition(input_file):
+ """Test correctness of dataset attributes with data loaded with a AreaDefinition."""
+ bufr_obj = SeviriL2BufrData(input_file, with_adef=True)
+ _ = bufr_obj.get_data(DATASET_INFO_LAT) # We need to load the lat/lon data in order to
+ _ = bufr_obj.get_data(DATASET_INFO_LON) # populate the file handler with these data
+ z = bufr_obj.get_data(DATASET_INFO)
+ assert z.attrs['platform_name'] == DATASET_ATTRS['platform_name']
+ assert z.attrs['ssp_lon'] == DATASET_ATTRS['ssp_lon']
+ assert z.attrs['seg_size'] == DATASET_ATTRS['seg_size']
+
+ @staticmethod
+ def test_data_with_swath_definition(input_file):
+ """Test data loaded with SwathDefinition (default behaviour)."""
+ bufr_obj = SeviriL2BufrData(input_file)
+ with pytest.raises(NotImplementedError):
+ bufr_obj.fh.get_area_def(None)
+
+ # concatenate original test arrays as get_dataset will have read and concatented the data
+ x1 = np.concatenate((DATA, DATA), axis=0)
+ z = bufr_obj.get_data(DATASET_INFO)
+ np.testing.assert_array_equal(z.values, x1)
+
+ def test_data_with_area_definition(self, input_file):
+ """Test data loaded with AreaDefinition."""
+ bufr_obj = SeviriL2BufrData(input_file, with_adef=True)
+ _ = bufr_obj.get_data(DATASET_INFO_LAT) # We need to load the lat/lon data in order to
+ _ = bufr_obj.get_data(DATASET_INFO_LON) # populate the file handler with these data
+ z = bufr_obj.get_data(DATASET_INFO)
+
+ ad = bufr_obj.fh.get_area_def(None)
+ assert ad == AREA_DEF
+ data_1d = np.concatenate((DATA, DATA), axis=0)
+
+ # Put BUFR data on 2D grid that the 2D array returned by get_dataset should correspond to
+ lons_1d, lats_1d = da.compute(bufr_obj.fh.longitude, bufr_obj.fh.latitude)
+ icol, irow = ad.get_array_indices_from_lonlat(lons_1d, lats_1d)
+
+ data_2d = np.empty(ad.shape)
+ data_2d[:] = np.nan
+ data_2d[irow.compressed(), icol.compressed()] = data_1d[~irow.mask]
+ np.testing.assert_array_equal(z.values, data_2d)
+
+ # Test that the correct AreaDefinition is identified for products with 3 pixel segements
+ bufr_obj.fh.seg_size = 3
+ ad_ext = bufr_obj.fh._construct_area_def(make_dataid(name='dummmy', resolution=9000))
+ assert ad_ext == AREA_DEF_EXT
+
+ def test_data_with_rect_lon(self, input_file):
+ """Test data loaded with AreaDefinition and user defined rectification longitude."""
+ bufr_obj = SeviriL2BufrData(input_file, with_adef=True, rect_lon=0.0)
+ np.testing.assert_equal(bufr_obj.fh.ssp_lon, 0.0)
+ _ = bufr_obj.get_data(DATASET_INFO_LAT) # We need to load the lat/lon data in order to
+ _ = bufr_obj.get_data(DATASET_INFO_LON) # populate the file handler with these data
+ _ = bufr_obj.get_data(DATASET_INFO) # We need to lead the data in order to create the AreaDefinition
+
+ ad = bufr_obj.fh.get_area_def(None)
+ assert ad == AREA_DEF_FES
+
+
+class SeviriL2AMVBufrData:
+ """Mock SEVIRI L2 AMV BUFR data."""
+
+ @unittest.skipIf(sys.platform.startswith('win'), "'eccodes' not supported on Windows")
+ def __init__(self, filename):
+ """Initialize by mocking test data for testing the SEVIRI L2 BUFR reader."""
+ from satpy.readers.seviri_l2_bufr import SeviriL2BufrFileHandler
+
+ with mock.patch('satpy.readers.seviri_l2_bufr.np.fromfile'):
+ self.fh = SeviriL2BufrFileHandler(filename, FILENAME_INFO2,
+ filetype_info={'file_type': 'seviri_l2_bufr_amv'},
+ with_area_definition=True)
+
+
+class TestSeviriL2AMVBufrReader:
+ """Test SEVIRI L2 BUFR Reader for AMV data."""
+
+ @staticmethod
+ def test_amv_with_area_def():
+ """Test that AMV data can not be loaded with an area definition."""
+ bufr_obj = SeviriL2AMVBufrData('AMVBUFRProd_20201110124500Z_00_OMPEFS04_MET11_FES_E0000')
+ assert bufr_obj.fh.with_adef is False
diff --git a/satpy/tests/reader_tests/test_seviri_l2_grib.py b/satpy/tests/reader_tests/test_seviri_l2_grib.py
index 2e8cf915ed..837dbcdc06 100644
--- a/satpy/tests/reader_tests/test_seviri_l2_grib.py
+++ b/satpy/tests/reader_tests/test_seviri_l2_grib.py
@@ -18,16 +18,18 @@
"""SEVIRI L2 GRIB-reader test package."""
-import sys
-import numpy as np
import datetime
-
+import sys
import unittest
from unittest import mock
+import numpy as np
+
+from satpy.tests.utils import make_dataid
+
# Dictionary to be used as fake GRIB message
FAKE_MESSAGE = {
- 'longitudeOfSubSatellitePointInDegrees': 10.0,
+ 'longitudeOfSubSatellitePointInDegrees': 9.5,
'dataDate': 20191020,
'dataTime': 1745,
'Nx': 1000,
@@ -62,8 +64,7 @@ def setUp(self, ec_):
def test_data_reading(self, da_, xr_):
"""Test the reading of data from the product."""
from satpy import CHUNK_SIZE
- from satpy.readers.seviri_l2_grib import (SeviriL2GribFileHandler,
- REPEAT_CYCLE_DURATION)
+ from satpy.readers.seviri_l2_grib import REPEAT_CYCLE_DURATION, SeviriL2GribFileHandler
with mock.patch("builtins.open", mock.mock_open()) as mock_file:
with mock.patch('satpy.readers.seviri_l2_grib.ec', self.ec_):
self.reader = SeviriL2GribFileHandler(
@@ -76,12 +77,38 @@ def test_data_reading(self, da_, xr_):
filetype_info={}
)
- # Checks the correct file open call
- mock_file.assert_called_with('test.grib', 'rb')
+ dataset_id = make_dataid(name='dummmy', resolution=3000)
# Checks that the codes_grib_multi_support_on function has been called
self.ec_.codes_grib_multi_support_on.assert_called()
+ # Restarts the id generator and clears the call history
+ fake_gid_generator = (i for i in FAKE_GID)
+ self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator)
+ self.ec_.codes_grib_new_from_file.reset_mock()
+ self.ec_.codes_release.reset_mock()
+
+ # Checks the correct execution of the get_dataset function with a valid parameter_number
+ valid_dataset = self.reader.get_dataset(dataset_id, {'parameter_number': 30})
+ # Checks the correct file open call
+ mock_file.assert_called_with('test.grib', 'rb')
+ # Checks that the dataset has been created as a DataArray object
+ self.assertEqual(valid_dataset._extract_mock_name(), 'xr.DataArray()')
+ # Checks that codes_release has been called after each codes_grib_new_from_file call
+ # (except after the last one which has returned a None)
+ self.assertEqual(self.ec_.codes_grib_new_from_file.call_count,
+ self.ec_.codes_release.call_count + 1)
+
+ # Restarts the id generator and clears the call history
+ fake_gid_generator = (i for i in FAKE_GID)
+ self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator)
+ self.ec_.codes_grib_new_from_file.reset_mock()
+ self.ec_.codes_release.reset_mock()
+
+ # Checks the correct execution of the get_dataset function with an invalid parameter_number
+ invalid_dataset = self.reader.get_dataset(dataset_id, {'parameter_number': 50})
+ # Checks that the function returns None
+ self.assertEqual(invalid_dataset, None)
# Checks that codes_release has been called after each codes_grib_new_from_file call
# (except after the last one which has returned a None)
self.assertEqual(self.ec_.codes_grib_new_from_file.call_count,
@@ -91,15 +118,15 @@ def test_data_reading(self, da_, xr_):
self.assertEqual(REPEAT_CYCLE_DURATION, 15)
# Checks the correct execution of the _get_global_attributes and _get_metadata_from_msg functions
- global_attributes = self.reader._get_global_attributes()
- expected_global_attributes = {
+ attributes = self.reader._get_attributes()
+ expected_attributes = {
'orbital_parameters': {
- 'projection_longitude': 10.
+ 'projection_longitude': 9.5
},
'sensor': 'seviri',
'platform_name': 'Meteosat-11'
}
- self.assertEqual(global_attributes, expected_global_attributes)
+ self.assertEqual(attributes, expected_attributes)
# Checks the reading of an array from the message
self.reader._get_xarray_from_msg(0)
@@ -120,16 +147,16 @@ def test_data_reading(self, da_, xr_):
'a': 6400000.,
'b': 6300000.,
'h': 32000000.,
- 'ssp_lon': 10.,
+ 'ssp_lon': 9.5,
'nlines': 1000,
'ncols': 1200,
- 'a_name': 'geos_seviri',
- 'a_desc': 'Calculated area for SEVIRI L2 GRIB product',
- 'p_id': 'geos',
+ 'a_name': 'msg_seviri_rss_3km',
+ 'a_desc': 'MSG SEVIRI Rapid Scanning Service area definition with 3 km resolution',
+ 'p_id': '',
}
self.assertEqual(pdict, expected_pdict)
expected_area_dict = {
- 'center_point': 500.5,
+ 'center_point': 500,
'north': 1200,
'east': 1,
'west': 1000,
@@ -143,39 +170,12 @@ def test_data_reading(self, da_, xr_):
with mock.patch('satpy.readers.seviri_l2_grib.get_area_definition', mock.Mock()) as gad:
self.reader.get_area_def(mock.Mock(resolution=400.))
# Asserts that calculate_area_extent has been called with the correct arguments
+ expected_args = ({'center_point': 500, 'east': 1, 'west': 1000, 'south': 1, 'north': 1200,
+ 'column_step': 400., 'line_step': 400.},)
name, args, kwargs = cae.mock_calls[0]
- self.assertEqual(args[0]['resolution'], 400.)
+ self.assertEqual(args, expected_args)
# Asserts that get_area_definition has been called with the correct arguments
name, args, kwargs = gad.mock_calls[0]
self.assertEqual(args[0], expected_pdict)
# The second argument must be the return result of calculate_area_extent
self.assertEqual(args[1]._extract_mock_name(), 'calculate_area_extent()')
-
- # Restarts the id generator and clears the call history
- fake_gid_generator = (i for i in FAKE_GID)
- self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator)
- self.ec_.codes_grib_new_from_file.reset_mock()
- self.ec_.codes_release.reset_mock()
-
- # Checks the correct execution of the get_dataset function with a valid parameter_number
- valid_dataset = self.reader.get_dataset(None, {'parameter_number': 30})
- # Checks that the dataset has been created as a DataArray object
- self.assertEqual(valid_dataset._extract_mock_name(), 'xr.DataArray()')
- # Checks that codes_release has been called after each codes_grib_new_from_file call
- self.assertEqual(self.ec_.codes_grib_new_from_file.call_count,
- self.ec_.codes_release.call_count)
-
- # Restarts the id generator and clears the call history
- fake_gid_generator = (i for i in FAKE_GID)
- self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator)
- self.ec_.codes_grib_new_from_file.reset_mock()
- self.ec_.codes_release.reset_mock()
-
- # Checks the correct execution of the get_dataset function with an invalid parameter_number
- invalid_dataset = self.reader.get_dataset(None, {'parameter_number': 50})
- # Checks that the function returns None
- self.assertEqual(invalid_dataset, None)
- # Checks that codes_release has been called after each codes_grib_new_from_file call
- # (except after the last one which has returned a None)
- self.assertEqual(self.ec_.codes_grib_new_from_file.call_count,
- self.ec_.codes_release.call_count + 1)
diff --git a/satpy/tests/reader_tests/test_slstr_l1b.py b/satpy/tests/reader_tests/test_slstr_l1b.py
index 20e66aec2d..9f516b4cde 100644
--- a/satpy/tests/reader_tests/test_slstr_l1b.py
+++ b/satpy/tests/reader_tests/test_slstr_l1b.py
@@ -20,10 +20,12 @@
import unittest.mock as mock
import warnings
from datetime import datetime
+
import numpy as np
import xarray as xr
-from satpy.dataset.dataid import WavelengthRange, ModifierTuple, DataID
-from satpy.readers.slstr_l1b import NCSLSTR1B, NCSLSTRGeo, NCSLSTRAngles, NCSLSTRFlag
+
+from satpy.dataset.dataid import DataID, ModifierTuple, WavelengthRange
+from satpy.readers.slstr_l1b import NCSLSTR1B, NCSLSTRAngles, NCSLSTRFlag, NCSLSTRGeo
local_id_keys_config = {'name': {
'required': True,
@@ -93,6 +95,15 @@ def setUp(self, xr_):
'S9_BT_ao': self.rad,
'foo_radiance_an': self.rad,
'S5_solar_irradiances': self.rad,
+ 'geometry_tn': self.rad,
+ 'latitude_an': self.rad,
+ 'x_tx': self.rad,
+ 'y_tx': self.rad,
+ 'x_in': self.rad,
+ 'y_in': self.rad,
+ 'x_an': self.rad,
+ 'y_an': self.rad,
+ 'flags_an': self.rad,
'detector_an': det,
},
attrs={
@@ -110,9 +121,19 @@ def make_dataid(**items):
class TestSLSTRReader(TestSLSTRL1B):
"""Test various nc_slstr file handlers."""
+ class FakeSpl:
+ """Fake return function for SPL interpolation."""
+
+ @staticmethod
+ def ev(foo_x, foo_y):
+ """Fake function to return interpolated data."""
+ return np.zeros((3, 2))
+
@mock.patch('satpy.readers.slstr_l1b.xr')
- def test_instantiate(self, xr_):
+ @mock.patch('scipy.interpolate.RectBivariateSpline')
+ def test_instantiate(self, bvs_, xr_):
"""Test initialization of file handlers."""
+ bvs_.return_value = self.FakeSpl
xr_.open_dataset.return_value = self.fake_dataset
good_start = datetime.strptime(self.start_time,
@@ -122,6 +143,8 @@ def test_instantiate(self, xr_):
ds_id = make_dataid(name='foo', calibration='radiance',
stripe='a', view='nadir')
+ ds_id_500 = make_dataid(name='foo', calibration='radiance',
+ stripe='a', view='nadir', resolution=500)
filename_info = {'mission_id': 'S3A', 'dataset_name': 'foo',
'start_time': 0, 'end_time': 0,
'stripe': 'a', 'view': 'n'}
@@ -149,28 +172,29 @@ def test_instantiate(self, xr_):
filename_info = {'mission_id': 'S3A', 'dataset_name': 'foo',
'start_time': 0, 'end_time': 0,
'stripe': 'a', 'view': 'n'}
- test = NCSLSTRGeo('somedir/S1_radiance_an.nc', filename_info, 'c')
- test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'foo'}))
+ test = NCSLSTRGeo('somedir/geometry_an.nc', filename_info, 'c')
+ test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'latitude_{stripe:1s}{view:1s}'}))
self.assertEqual(test.start_time, good_start)
self.assertEqual(test.end_time, good_end)
xr_.open_dataset.assert_called()
xr_.open_dataset.reset_mock()
- test = NCSLSTRAngles('somedir/S1_radiance_an.nc', filename_info, 'c')
- # TODO: Make this test work
- # test.get_dataset(ds_id, filename_info)
+ test = NCSLSTRFlag('somedir/S1_radiance_an.nc', filename_info, 'c')
+ test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'flags_{stripe:1s}{view:1s}'}))
+ assert test.view == 'nadir'
+ assert test.stripe == 'a'
self.assertEqual(test.start_time, good_start)
self.assertEqual(test.end_time, good_end)
xr_.open_dataset.assert_called()
xr_.open_dataset.reset_mock()
- test = NCSLSTRFlag('somedir/S1_radiance_an.nc', filename_info, 'c')
- assert test.view == 'nadir'
- assert test.stripe == 'a'
+ test = NCSLSTRAngles('somedir/S1_radiance_an.nc', filename_info, 'c')
+ test.get_dataset(ds_id, dict(filename_info, **{'file_key': 'geometry_t{view:1s}'}))
self.assertEqual(test.start_time, good_start)
self.assertEqual(test.end_time, good_end)
xr_.open_dataset.assert_called()
xr_.open_dataset.reset_mock()
+ test.get_dataset(ds_id_500, dict(filename_info, **{'file_key': 'geometry_t{view:1s}'}))
class TestSLSTRCalibration(TestSLSTRL1B):
diff --git a/satpy/tests/reader_tests/test_slstr_l2.py b/satpy/tests/reader_tests/test_slstr_l2.py
deleted file mode 100644
index 290e217eb2..0000000000
--- a/satpy/tests/reader_tests/test_slstr_l2.py
+++ /dev/null
@@ -1,68 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-# Copyright (c) 2018 Satpy developers
-#
-# This file is part of satpy.
-#
-# satpy is free software: you can redistribute it and/or modify it under the
-# terms of the GNU General Public License as published by the Free Software
-# Foundation, either version 3 of the License, or (at your option) any later
-# version.
-#
-# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
-# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
-# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along with
-# satpy. If not, see .
-"""Module for testing the satpy.readers.slstr_l2 module."""
-
-import unittest
-from unittest import mock
-from unittest.mock import MagicMock
-from unittest.mock import patch
-import xarray as xr
-from satpy.readers.slstr_l2 import SLSTRL2FileHandler
-
-
-class TestSLSTRL2Reader(unittest.TestCase):
- """Test Sentinel-3 SST L2 reader."""
-
- @mock.patch('xarray.open_dataset')
- def test_instantiate(self, mocked_dataset):
- """Test initialization of file handlers."""
- filename_info = {}
- tmp = MagicMock(start_time='20191120T125002Z', stop_time='20191120T125002Z')
- tmp.rename.return_value = tmp
- xr.open_dataset.return_value = tmp
- SLSTRL2FileHandler('somedir/somefile.nc', filename_info, None)
- mocked_dataset.assert_called()
- mocked_dataset.reset_mock()
-
- with patch('tarfile.open') as tf:
- tf.return_value.__enter__.return_value = MagicMock(getnames=lambda *a: ["GHRSST-SSTskin.nc"])
- SLSTRL2FileHandler('somedir/somefile.tar', filename_info, None)
- mocked_dataset.assert_called()
- mocked_dataset.reset_mock()
-
- @mock.patch('xarray.open_dataset')
- def test_get_dataset(self, mocked_dataset):
- """Test retrieval of datasets."""
- filename_info = {}
- tmp = MagicMock(start_time='20191120T125002Z', stop_time='20191120T125002Z')
- tmp.rename.return_value = tmp
- xr.open_dataset.return_value = tmp
- test = SLSTRL2FileHandler('somedir/somefile.nc', filename_info, None)
- test.nc = {'longitude': xr.Dataset(),
- 'latitude': xr.Dataset(),
- 'sea_surface_temperature': xr.Dataset(),
- 'sea_ice_fraction': xr.Dataset(),
- }
- test.get_dataset('longitude', {'standard_name': 'longitude'})
- test.get_dataset('latitude', {'standard_name': 'latitude'})
- test.get_dataset('sea_surface_temperature', {'standard_name': 'sea_surface_temperature'})
- test.get_dataset('sea_ice_fraction', {'standard_name': 'sea_ice_fraction'})
- with self.assertRaises(KeyError):
- test.get_dataset('erroneous dataset', {'standard_name': 'erroneous dataset'})
- mocked_dataset.assert_called()
- mocked_dataset.reset_mock()
diff --git a/satpy/tests/reader_tests/test_smos_l2_wind.py b/satpy/tests/reader_tests/test_smos_l2_wind.py
index c17a1a9702..731cd64181 100644
--- a/satpy/tests/reader_tests/test_smos_l2_wind.py
+++ b/satpy/tests/reader_tests/test_smos_l2_wind.py
@@ -20,10 +20,12 @@
import os
import unittest
+from datetime import datetime
+from unittest import mock
+
import numpy as np
import xarray as xr
-from unittest import mock
-from datetime import datetime
+
from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler
@@ -163,6 +165,7 @@ def test_load_lon(self):
def test_adjust_lon(self):
"""Load adjust longitude dataset."""
from xarray import DataArray
+
from satpy.readers.smos_l2_wind import SMOSL2WINDFileHandler
smos_l2_wind_fh = SMOSL2WINDFileHandler('SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc',
{}, filetype_info={'file_type': 'smos_l2_wind'})
@@ -176,6 +179,7 @@ def test_adjust_lon(self):
def test_roll_dataset(self):
"""Load roll of dataset along the lon coordinate."""
from xarray import DataArray
+
from satpy.readers.smos_l2_wind import SMOSL2WINDFileHandler
smos_l2_wind_fh = SMOSL2WINDFileHandler('SM_OPER_MIR_SCNFSW_20200420T021649_20200420T035013_110_001_7.nc',
{}, filetype_info={'file_type': 'smos_l2_wind'})
diff --git a/satpy/tests/reader_tests/test_tropomi_l2.py b/satpy/tests/reader_tests/test_tropomi_l2.py
index baff02ca38..5b7ec3f2b6 100644
--- a/satpy/tests/reader_tests/test_tropomi_l2.py
+++ b/satpy/tests/reader_tests/test_tropomi_l2.py
@@ -20,12 +20,13 @@
import os
import unittest
-from unittest import mock
from datetime import datetime
+from unittest import mock
+
import numpy as np
-from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler
import xarray as xr
+from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler
DEFAULT_FILE_DTYPE = np.uint16
DEFAULT_FILE_SHAPE = (3246, 450)
@@ -40,7 +41,6 @@ class FakeNetCDF4FileHandlerTL2(FakeNetCDF4FileHandler):
def get_test_content(self, filename, filename_info, filetype_info):
"""Mimic reader input file content."""
- from xarray import DataArray
dt_s = filename_info.get('start_time', datetime(2016, 1, 1, 12, 0, 0))
dt_e = filename_info.get('end_time', datetime(2016, 1, 1, 12, 0, 0))
@@ -67,15 +67,7 @@ def get_test_content(self, filename, filename_info, filetype_info):
continue
file_content[k + '/shape'] = DEFAULT_FILE_SHAPE
- # convert to xarrays
- for key, val in file_content.items():
- if isinstance(val, np.ndarray):
- if 1 < val.ndim <= 2:
- file_content[key] = DataArray(val, dims=('scanline', 'ground_pixel'))
- elif val.ndim > 2:
- file_content[key] = DataArray(val, dims=('scanline', 'ground_pixel', 'corner'))
- else:
- file_content[key] = DataArray(val)
+ self._convert_data_content_to_dataarrays(file_content)
file_content['PRODUCT/latitude'].attrs['_FillValue'] = -999.0
file_content['PRODUCT/longitude'].attrs['_FillValue'] = -999.0
file_content['PRODUCT/SUPPORT_DATA/GEOLOCATIONS/latitude_bounds'].attrs['_FillValue'] = -999.0
@@ -91,6 +83,18 @@ def get_test_content(self, filename, filename_info, filetype_info):
return file_content
+ def _convert_data_content_to_dataarrays(self, file_content):
+ """Convert data content to xarray's dataarrays."""
+ from xarray import DataArray
+ for key, val in file_content.items():
+ if isinstance(val, np.ndarray):
+ if 1 < val.ndim <= 2:
+ file_content[key] = DataArray(val, dims=('scanline', 'ground_pixel'))
+ elif val.ndim > 2:
+ file_content[key] = DataArray(val, dims=('scanline', 'ground_pixel', 'corner'))
+ else:
+ file_content[key] = DataArray(val)
+
class TestTROPOMIL2Reader(unittest.TestCase):
"""Test TROPOMI L2 Reader."""
diff --git a/satpy/tests/reader_tests/test_utils.py b/satpy/tests/reader_tests/test_utils.py
index 0a7a605c4d..9df70d8a37 100644
--- a/satpy/tests/reader_tests/test_utils.py
+++ b/satpy/tests/reader_tests/test_utils.py
@@ -26,10 +26,14 @@
import numpy as np
import numpy.testing
import pyresample.geometry
+import pytest
import xarray as xr
-from satpy.readers import utils as hf
+from fsspec.implementations.memory import MemoryFile, MemoryFileSystem
from pyproj import CRS
+from satpy.readers import FSFile
+from satpy.readers import utils as hf
+
class TestHelpers(unittest.TestCase):
"""Test the area helpers."""
@@ -263,7 +267,7 @@ def test_reduce_mda(self):
@mock.patch('satpy.readers.utils.bz2.BZ2File')
@mock.patch('satpy.readers.utils.Popen')
- def test_unzip_file_pbzip2(self, mock_popen, mock_bz2):
+ def test_unzip_file(self, mock_popen, mock_bz2):
"""Test the bz2 file unzipping techniques."""
process_mock = mock.Mock()
attrs = {'communicate.return_value': (b'output', b'error'),
@@ -277,20 +281,24 @@ def test_unzip_file_pbzip2(self, mock_popen, mock_bz2):
filename = 'tester.DAT.bz2'
whichstr = 'satpy.readers.utils.which'
- # no bz2 installed
+ segment = 3
+ segmentstr = str(segment).zfill(2)
+ # no pbzip2 installed with prefix
with mock.patch(whichstr) as whichmock:
whichmock.return_value = None
- new_fname = hf.unzip_file(filename)
+ new_fname = hf.unzip_file(filename, prefix=segmentstr)
self.assertTrue(bz2_mock.read.called)
self.assertTrue(os.path.exists(new_fname))
+ self.assertEqual(os.path.split(new_fname)[1][0:2], segmentstr)
if os.path.exists(new_fname):
os.remove(new_fname)
- # bz2 installed
+ # pbzip2 installed without prefix
with mock.patch(whichstr) as whichmock:
whichmock.return_value = '/usr/bin/pbzip2'
new_fname = hf.unzip_file(filename)
self.assertTrue(mock_popen.called)
self.assertTrue(os.path.exists(new_fname))
+ self.assertNotEqual(os.path.split(new_fname)[1][0:2], segmentstr)
if os.path.exists(new_fname):
os.remove(new_fname)
@@ -298,6 +306,99 @@ def test_unzip_file_pbzip2(self, mock_popen, mock_bz2):
new_fname = hf.unzip_file(filename)
self.assertIsNone(new_fname)
+ @mock.patch('bz2.BZ2File')
+ def test_generic_open_BZ2File(self, bz2_mock):
+ """Test the generic_open method with bz2 filename input."""
+ mock_bz2_open = mock.MagicMock()
+ mock_bz2_open.read.return_value = b'TEST'
+ bz2_mock.return_value = mock_bz2_open
+
+ filename = 'tester.DAT.bz2'
+ with hf.generic_open(filename) as file_object:
+ data = file_object.read()
+ assert data == b'TEST'
+
+ assert mock_bz2_open.read.called
+
+ def test_generic_open_FSFile_MemoryFileSystem(self):
+ """Test the generic_open method with FSFile in MemoryFileSystem."""
+ mem_fs = MemoryFileSystem()
+ mem_file = MemoryFile(fs=mem_fs, path="{}test.DAT".format(mem_fs.root_marker), data=b"TEST")
+ mem_file.commit()
+ fsf = FSFile(mem_file)
+ with hf.generic_open(fsf) as file_object:
+ data = file_object.read()
+ assert data == b'TEST'
+
+ @mock.patch('satpy.readers.utils.open')
+ def test_generic_open_filename(self, open_mock):
+ """Test the generic_open method with filename (str)."""
+ mock_fn_open = mock.MagicMock()
+ mock_fn_open.read.return_value = b'TEST'
+ open_mock.return_value = mock_fn_open
+
+ filename = "test.DAT"
+ with hf.generic_open(filename) as file_object:
+ data = file_object.read()
+ assert data == b'TEST'
+
+ assert mock_fn_open.read.called
+
+ def test_generic_open_text(self):
+ """Test the bz2 file unzipping context manager using dummy text data."""
+ dummy_text_data = 'Hello'
+ dummy_text_filename = 'dummy.txt'
+ with open(dummy_text_filename, 'w') as f:
+ f.write(dummy_text_data)
+
+ with hf.generic_open(dummy_text_filename, 'r') as f:
+ read_text_data = f.read()
+
+ assert read_text_data == dummy_text_data
+
+ dummy_text_filename = 'dummy.txt.bz2'
+ with hf.bz2.open(dummy_text_filename, 'wt') as f:
+ f.write(dummy_text_data)
+
+ with hf.generic_open(dummy_text_filename, 'rt') as f:
+ read_text_data = f.read()
+
+ assert read_text_data == dummy_text_data
+
+ def test_generic_open_binary(self):
+ """Test the bz2 file unzipping context manager using dummy binary data."""
+ dummy_binary_data = b'Hello'
+ dummy_binary_filename = 'dummy.dat'
+ with open(dummy_binary_filename, 'wb') as f:
+ f.write(dummy_binary_data)
+
+ with hf.generic_open(dummy_binary_filename, 'rb') as f:
+ read_binary_data = f.read()
+
+ assert read_binary_data == dummy_binary_data
+
+ dummy_binary_filename = 'dummy.dat.bz2'
+ with hf.bz2.open(dummy_binary_filename, 'wb') as f:
+ f.write(dummy_binary_data)
+
+ with hf.generic_open(dummy_binary_filename, 'rb') as f:
+ read_binary_data = f.read()
+
+ assert read_binary_data == dummy_binary_data
+
+ @mock.patch("os.remove")
+ @mock.patch("satpy.readers.utils.unzip_file", return_value='dummy.txt')
+ def test_pro_reading_gets_unzipped_file(self, fake_unzip_file, fake_remove):
+ """Test the bz2 file unzipping context manager."""
+ filename = 'dummy.txt.bz2'
+ expected_filename = filename[:-4]
+
+ with hf.unzip_context(filename) as new_filename:
+ self.assertEqual(new_filename, expected_filename)
+
+ fake_unzip_file.assert_called_with(filename)
+ fake_remove.assert_called_with(expected_filename)
+
def test_apply_rad_correction(self):
"""Test radiance correction technique using user-supplied coefs."""
slope = 0.5
@@ -327,10 +428,10 @@ def test_get_user_calibration_factors(self):
hf.get_user_calibration_factors('IR108', radcor_dict)
-class TestSunEarthDistanceCorrection(unittest.TestCase):
+class TestSunEarthDistanceCorrection:
"""Tests for applying Sun-Earth distance correction to reflectance."""
- def setUp(self):
+ def setup_method(self):
"""Create input / output arrays for the tests."""
self.test_date = datetime(2020, 8, 15, 13, 0, 40)
@@ -338,11 +439,13 @@ def setUp(self):
attrs={'start_time': self.test_date,
'scheduled_time': self.test_date})
- corr_refl = xr.DataArray(da.from_array([10.50514689, 21.01029379,
- 42.02058758, 1.05051469,
- 102.95043957, 52.52573447]),
- attrs={'start_time': self.test_date,
- 'scheduled_time': self.test_date})
+ corr_refl = xr.DataArray(da.from_array([
+ 10.25484833, 20.50969667,
+ 41.01939333, 1.02548483,
+ 100.49751367, 51.27424167]),
+ attrs={'start_time': self.test_date,
+ 'scheduled_time': self.test_date},
+ )
self.raw_refl = raw_refl
self.corr_refl = corr_refl
@@ -352,37 +455,37 @@ def test_get_utc_time(self):
tmp_array = self.raw_refl.copy()
del tmp_array.attrs['scheduled_time']
utc_time = hf.get_array_date(tmp_array, None)
- self.assertEqual(utc_time, self.test_date)
+ assert utc_time == self.test_date
# Now check correct time is returned with 'scheduled_time'
tmp_array = self.raw_refl.copy()
del tmp_array.attrs['start_time']
utc_time = hf.get_array_date(tmp_array, None)
- self.assertEqual(utc_time, self.test_date)
+ assert utc_time == self.test_date
# Now check correct time is returned with utc_date passed
tmp_array = self.raw_refl.copy()
new_test_date = datetime(2019, 2, 1, 15, 2, 12)
utc_time = hf.get_array_date(tmp_array, new_test_date)
- self.assertEqual(utc_time, new_test_date)
+ assert utc_time == new_test_date
# Finally, ensure error is raised if no datetime is available
tmp_array = self.raw_refl.copy()
del tmp_array.attrs['scheduled_time']
del tmp_array.attrs['start_time']
- with self.assertRaises(KeyError):
+ with pytest.raises(KeyError):
hf.get_array_date(tmp_array, None)
def test_apply_sunearth_corr(self):
"""Test the correction of reflectances with sun-earth distance."""
out_refl = hf.apply_earthsun_distance_correction(self.raw_refl)
np.testing.assert_allclose(out_refl, self.corr_refl)
- self.assertTrue(out_refl.attrs['sun_earth_distance_correction_applied'])
+ assert out_refl.attrs['sun_earth_distance_correction_applied']
assert isinstance(out_refl.data, da.Array)
def test_remove_sunearth_corr(self):
"""Test the removal of the sun-earth distance correction."""
out_refl = hf.remove_earthsun_distance_correction(self.corr_refl)
np.testing.assert_allclose(out_refl, self.raw_refl)
- self.assertFalse(out_refl.attrs['sun_earth_distance_correction_applied'])
+ assert not out_refl.attrs['sun_earth_distance_correction_applied']
assert isinstance(out_refl.data, da.Array)
diff --git a/satpy/tests/reader_tests/test_vaisala_gld360.py b/satpy/tests/reader_tests/test_vaisala_gld360.py
index 888c7bf90c..9add08b1d2 100644
--- a/satpy/tests/reader_tests/test_vaisala_gld360.py
+++ b/satpy/tests/reader_tests/test_vaisala_gld360.py
@@ -17,6 +17,7 @@
# satpy. If not, see .
"""Unittesting the Vaisala GLD360 reader."""
+import unittest
from io import StringIO
import numpy as np
@@ -24,8 +25,6 @@
from satpy.readers.vaisala_gld360 import VaisalaGLD360TextFileHandler
from satpy.tests.utils import make_dataid
-import unittest
-
class TestVaisalaGLD360TextFileHandler(unittest.TestCase):
"""Test the VaisalaGLD360TextFileHandler."""
diff --git a/satpy/tests/reader_tests/test_vii_base_nc.py b/satpy/tests/reader_tests/test_vii_base_nc.py
index a4d1443ecc..c2afc04356 100644
--- a/satpy/tests/reader_tests/test_vii_base_nc.py
+++ b/satpy/tests/reader_tests/test_vii_base_nc.py
@@ -18,22 +18,17 @@
"""The vii_base_nc reader tests package."""
+import datetime
import os
+import unittest
+import uuid
+from unittest import mock
+
import numpy as np
import xarray as xr
-import datetime
from netCDF4 import Dataset
-import uuid
-
-from satpy.readers.vii_base_nc import ViiNCBaseFileHandler, SCAN_ALT_TIE_POINTS, \
- TIE_POINTS_FACTOR
-
-import unittest
-try:
- from unittest import mock
-except ImportError:
- import mock
+from satpy.readers.vii_base_nc import SCAN_ALT_TIE_POINTS, TIE_POINTS_FACTOR, ViiNCBaseFileHandler
TEST_FILE = 'test_file_vii_base_nc.nc'
diff --git a/satpy/tests/reader_tests/test_vii_l1b_nc.py b/satpy/tests/reader_tests/test_vii_l1b_nc.py
index 9d3be6eec3..cf33e7872e 100644
--- a/satpy/tests/reader_tests/test_vii_l1b_nc.py
+++ b/satpy/tests/reader_tests/test_vii_l1b_nc.py
@@ -15,23 +15,26 @@
#
# You should have received a copy of the GNU General Public License
# along with satpy. If not, see .
+"""The vii_l1b_nc reader tests package.
-"""The vii_l1b_nc reader tests package."""
+This version tests the readers for VII test data V2 as per PFS V4A.
+"""
+
+
+import datetime
import os
+import unittest
+import uuid
+import dask.array as da
import numpy as np
import xarray as xr
-import dask.array as da
-import datetime
from netCDF4 import Dataset
-import uuid
from satpy.readers.vii_l1b_nc import ViiL1bNCFileHandler
from satpy.readers.vii_utils import MEAN_EARTH_RADIUS
-import unittest
-
TEST_FILE = 'test_file_vii_l1b_nc.nc'
@@ -76,9 +79,9 @@ def setUp(self):
# Add variables to data/measurement_data group
sza = g1_2.createVariable('solar_zenith', np.float32,
- dimensions=('num_tie_points_act', 'num_tie_points_alt'))
+ dimensions=('num_tie_points_alt', 'num_tie_points_act'))
sza[:] = 25.0
- delta_lat = g1_2.createVariable('delta_lat', np.float32, dimensions=('num_pixels', 'num_lines'))
+ delta_lat = g1_2.createVariable('delta_lat', np.float32, dimensions=('num_lines', 'num_pixels'))
delta_lat[:] = 1.0
self.reader = ViiL1bNCFileHandler(
@@ -117,25 +120,25 @@ def test_calibration_functions(self):
angle_factor = 0.4
isi = 2.0
refl = self.reader._calibrate_refl(radiance, angle_factor, isi)
- expected_refl = np.array([[0.628318531, 1.256637061, 3.141592654],
- [4.398229715, 6.283185307, 12.56637061]])
+ expected_refl = np.array([[62.8318531, 125.6637061, 314.1592654],
+ [439.8229715, 628.3185307, 1256.637061]])
self.assertTrue(np.allclose(refl, expected_refl))
def test_functions(self):
"""Test the functions."""
# Checks that the _perform_orthorectification function is correctly executed
variable = xr.DataArray(
- dims=('num_pixels', 'num_lines'),
+ dims=('num_lines', 'num_pixels'),
name='test_name',
attrs={
'key_1': 'value_1',
'key_2': 'value_2'
},
- data=da.from_array(np.ones((72, 600)))
+ data=da.from_array(np.ones((600, 72)))
)
orthorect_variable = self.reader._perform_orthorectification(variable, 'data/measurement_data/delta_lat')
- expected_values = np.degrees(np.ones((72, 600)) / MEAN_EARTH_RADIUS) + np.ones((72, 600))
+ expected_values = np.degrees(np.ones((600, 72)) / MEAN_EARTH_RADIUS) + np.ones((600, 72))
self.assertTrue(np.allclose(orthorect_variable.values, expected_values))
# Checks that the _perform_calibration function is correctly executed in all cases
@@ -152,7 +155,7 @@ def test_functions(self):
calibrated_variable = self.reader._perform_calibration(variable,
{'calibration': 'brightness_temperature',
'chan_thermal_index': 3})
- expected_values = np.ones((72, 600)) * 302007.42728603
+ expected_values = np.full((600, 72), 1101.10413712)
self.assertTrue(np.allclose(calibrated_variable.values, expected_values))
# reflectance calibration: checks that the return value is correct
@@ -160,5 +163,5 @@ def test_functions(self):
{'calibration': 'reflectance',
'wavelength': [0.658, 0.668, 0.678],
'chan_solar_index': 2})
- expected_values = np.ones((72, 600)) * 1.733181982 * (0.678 - 0.658)
+ expected_values = np.full((600, 72), 173.3181982)
self.assertTrue(np.allclose(calibrated_variable.values, expected_values))
diff --git a/satpy/tests/reader_tests/test_vii_l2_nc.py b/satpy/tests/reader_tests/test_vii_l2_nc.py
index fc5d616db1..e431d16e73 100644
--- a/satpy/tests/reader_tests/test_vii_l2_nc.py
+++ b/satpy/tests/reader_tests/test_vii_l2_nc.py
@@ -18,19 +18,18 @@
"""The vii_2_nc reader tests package."""
+import datetime
import os
+import unittest
+import uuid
+
+import dask.array as da
import numpy as np
import xarray as xr
-import dask.array as da
-import datetime
from netCDF4 import Dataset
-import uuid
from satpy.readers.vii_l2_nc import ViiL2NCFileHandler
-import unittest
-
-
TEST_FILE = 'test_file_vii_l2_nc.nc'
@@ -48,14 +47,14 @@ def setUp(self):
g1 = nc.createGroup('data')
# Add dimensions to data group
- g1.createDimension('num_pixels', 10)
- g1.createDimension('num_lines', 100)
+ g1.createDimension('num_pixels', 100)
+ g1.createDimension('num_lines', 10)
# Create measurement_data group
g1_2 = g1.createGroup('measurement_data')
# Add variables to data/measurement_data group
- delta_lat = g1_2.createVariable('delta_lat', np.float32, dimensions=('num_pixels', 'num_lines'))
+ delta_lat = g1_2.createVariable('delta_lat', np.float32, dimensions=('num_lines', 'num_pixels'))
delta_lat[:] = 0.1
self.reader = ViiL2NCFileHandler(
@@ -83,7 +82,7 @@ def test_functions(self):
"""Test the functions."""
# Checks that the _perform_orthorectification function is correctly executed
variable = xr.DataArray(
- dims=('num_pixels', 'num_lines'),
+ dims=('num_lines', 'num_pixels'),
name='test_name',
attrs={
'key_1': 'value_1',
diff --git a/satpy/tests/reader_tests/test_vii_utils.py b/satpy/tests/reader_tests/test_vii_utils.py
index e2ce3cc6ac..ab90833887 100644
--- a/satpy/tests/reader_tests/test_vii_utils.py
+++ b/satpy/tests/reader_tests/test_vii_utils.py
@@ -18,10 +18,9 @@
"""The vii_utils reader tests package."""
-import satpy.readers.vii_utils
-
import unittest
+import satpy.readers.vii_utils
# Constants to be tested
C1 = 1.191062e+8
diff --git a/satpy/tests/reader_tests/test_viirs_compact.py b/satpy/tests/reader_tests/test_viirs_compact.py
index 0afa6c9da1..4ba287ff86 100644
--- a/satpy/tests/reader_tests/test_viirs_compact.py
+++ b/satpy/tests/reader_tests/test_viirs_compact.py
@@ -17,32 +17,827 @@
# satpy. If not, see .
"""Module for testing the satpy.readers.viirs_compact module."""
-import os
-import tempfile
-import unittest
from contextlib import suppress
import h5py
import numpy as np
+import pytest
+from satpy.tests.reader_tests.utils import fill_h5
-class TestCompact(unittest.TestCase):
- """Test class for reading compact viirs format."""
- def setUp(self):
- """Create a fake file from scratch."""
- fake_dnb = {
- "All_Data": {
- "ModeGran": {"value": 0},
- "ModeScan": {
+@pytest.fixture
+def fake_dnb():
+ """Create fake DNB content."""
+ fake_dnb = {
+ "All_Data": {
+ "ModeGran": {"value": 0},
+ "ModeScan": {
+ "value": np.array(
+ [
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 254,
+ 254,
+ 254,
+ 254,
+ 254,
+ 254,
+ 254,
+ 254,
+ 254,
+ 254,
+ 254,
+ 254,
+ 254,
+ 254,
+ 254,
+ 254,
+ 254,
+ 254,
+ 254,
+ 254,
+ 254,
+ 254,
+ 254,
+ 254,
+ 249,
+ ],
+ dtype=np.uint8,
+ )
+ },
+ "NumberOfScans": {"value": np.array([47])},
+ "VIIRS-DNB-GEO_All": {
+ "AlignmentCoefficient": {
"value": np.array(
[
+ 2.11257413e-02,
+ 2.11152732e-02,
+ 2.11079046e-02,
+ 2.10680142e-02,
+ 1.80840008e-02,
+ 1.80402063e-02,
+ 1.79968309e-02,
+ 1.79477539e-02,
+ 2.20463774e-03,
+ 2.17431062e-03,
+ 2.14360282e-03,
+ 2.11503846e-03,
+ 2.08630669e-03,
+ 2.05924874e-03,
+ 2.03177333e-03,
+ 2.00573727e-03,
+ 1.98072987e-03,
+ 1.95503305e-03,
+ 1.93077011e-03,
+ 1.90702057e-03,
+ 1.88353716e-03,
+ 1.86104013e-03,
+ 1.83863181e-03,
+ 1.81696517e-03,
+ 1.79550308e-03,
+ 1.77481642e-03,
+ 1.75439729e-03,
+ 1.73398503e-03,
+ 1.71459839e-03,
+ 1.69516564e-03,
+ 1.67622324e-03,
+ 1.65758410e-03,
+ 1.63990213e-03,
+ 1.62128301e-03,
+ 1.60375470e-03,
+ 1.58667017e-03,
+ 1.61543000e-03,
+ 1.59775047e-03,
+ 1.50719041e-03,
+ 1.48937735e-03,
+ 1.47257745e-03,
+ 1.50070526e-03,
+ 1.48288533e-03,
+ 9.29064234e-04,
+ 9.12246935e-04,
+ 8.95748264e-04,
+ 8.71886965e-04,
+ 8.55044520e-04,
+ 8.38686305e-04,
+ 8.18263041e-04,
+ 8.01501446e-04,
+ 7.85346841e-04,
+ 1.15984806e-03,
+ 1.14326552e-03,
+ 1.12648588e-03,
+ 1.11018715e-03,
+ 1.09399087e-03,
+ 1.19698711e-03,
+ 1.18051842e-03,
+ 1.16404379e-03,
+ 1.14832399e-03,
+ 9.92591376e-04,
+ 9.75896895e-04,
+ 9.59663419e-04,
+ 9.43415158e-04,
+ 9.27662419e-04,
+ 8.92253709e-04,
+ 8.75947590e-04,
+ 8.60177504e-04,
+ 8.44484195e-04,
+ 8.35279003e-04,
+ 8.19236680e-04,
+ 8.03303672e-04,
+ 7.87482015e-04,
+ 7.60449213e-04,
+ 7.44239136e-04,
+ 7.28625571e-04,
+ 7.12990935e-04,
+ 6.89090986e-04,
+ 6.73000410e-04,
+ 6.57248020e-04,
+ 6.41623745e-04,
+ 6.20219158e-04,
+ 6.04308851e-04,
+ 5.88596100e-04,
+ 5.73108089e-04,
+ 3.65344196e-04,
+ 3.49639275e-04,
+ 3.34273063e-04,
+ 4.81286290e-04,
+ 4.65485587e-04,
+ 4.49862011e-04,
+ 4.34543617e-04,
+ 4.19324206e-04,
+ 2.60536268e-04,
+ 2.45052564e-04,
+ 2.29740850e-04,
+ 2.34466774e-04,
+ 2.18822126e-04,
+ 2.03370175e-04,
+ 1.88058810e-04,
+ 1.60192372e-04,
+ 1.44485937e-04,
+ 1.28920830e-04,
+ 3.45615146e-04,
+ 3.30171984e-04,
+ 3.14682693e-04,
+ 2.99300562e-04,
+ 2.83925037e-04,
+ 2.68518896e-04,
+ 2.53254839e-04,
+ 2.37950648e-04,
+ 2.22716670e-04,
+ 2.07562072e-04,
+ 1.92296386e-04,
+ 1.77147449e-04,
+ 1.61994336e-04,
+ 1.46895778e-04,
+ 1.31844325e-04,
+ 1.16730320e-04,
+ 1.01757469e-04,
+ 8.67861963e-05,
+ 7.18669180e-05,
+ 5.70719567e-05,
+ 4.24701866e-05,
+ 2.84846719e-05,
+ 1.70599415e-05,
+ -1.47213286e-05,
+ -2.33691408e-05,
+ -3.68025649e-05,
+ -5.12388433e-05,
+ -6.59972284e-05,
+ -8.08926561e-05,
+ -9.58433884e-05,
+ -1.10882705e-04,
+ -1.25976600e-04,
+ -1.41044657e-04,
+ -1.56166439e-04,
+ -1.71307023e-04,
+ -1.86516074e-04,
+ -2.01731804e-04,
+ -2.16980450e-04,
+ -2.32271064e-04,
+ -2.47527263e-04,
+ -2.62940506e-04,
+ -2.78283434e-04,
+ -2.93711084e-04,
+ -3.09180934e-04,
+ -3.24661058e-04,
+ -3.40237195e-04,
+ -1.27807143e-04,
+ -1.43646437e-04,
+ -1.59638614e-04,
+ -1.87593061e-04,
+ -2.03169184e-04,
+ -2.18941437e-04,
+ -2.34920750e-04,
+ -2.30605408e-04,
+ -2.46262236e-04,
+ -2.62226094e-04,
+ -4.19838558e-04,
+ -4.35510388e-04,
+ -4.51152271e-04,
+ -4.67120990e-04,
+ -4.83241311e-04,
+ -3.37647041e-04,
+ -3.53568990e-04,
+ -3.69836489e-04,
+ -5.76354389e-04,
+ -5.92070050e-04,
+ -6.08178903e-04,
+ -6.24440494e-04,
+ -6.45648804e-04,
+ -6.61431870e-04,
+ -6.77491073e-04,
+ -6.93967624e-04,
+ -7.17683870e-04,
+ -7.33471534e-04,
+ -7.49999890e-04,
+ -7.66390527e-04,
+ -7.93468382e-04,
+ -8.09502264e-04,
+ -8.25728697e-04,
+ -8.42282083e-04,
+ -8.51265620e-04,
+ -8.67322611e-04,
+ -8.83649045e-04,
+ -9.00280487e-04,
+ -9.35055199e-04,
+ -9.51097580e-04,
+ -9.67527216e-04,
+ -9.84144746e-04,
+ -1.00128003e-03,
+ -1.15522649e-03,
+ -1.17168750e-03,
+ -1.18826574e-03,
+ -1.20496599e-03,
+ -1.10272120e-03,
+ -1.11865194e-03,
+ -1.13539130e-03,
+ -1.15241797e-03,
+ -1.16964686e-03,
+ -7.97322951e-04,
+ -8.14269355e-04,
+ -8.31696263e-04,
+ -8.51555436e-04,
+ -8.68656265e-04,
+ -8.86220601e-04,
+ -9.09406052e-04,
+ -9.26509325e-04,
+ -9.44124535e-04,
+ -1.49479776e-03,
+ -1.51314179e-03,
+ -1.48387800e-03,
+ -1.50146009e-03,
+ -1.51945755e-03,
+ -1.61006744e-03,
+ -1.62846781e-03,
+ -1.59783731e-03,
+ -1.61545863e-03,
+ -1.63336343e-03,
+ -1.65167439e-03,
+ -1.67034590e-03,
+ -1.68956630e-03,
+ -1.70884258e-03,
+ -1.72863202e-03,
+ -1.74859120e-03,
+ -1.76901231e-03,
+ -1.79015659e-03,
+ -1.81144674e-03,
+ -1.83329231e-03,
+ -1.85552111e-03,
+ -1.87840930e-03,
+ -1.90151483e-03,
+ -1.92550803e-03,
+ -1.94982730e-03,
+ -1.97511422e-03,
+ -2.00066133e-03,
+ -2.02709576e-03,
+ -2.05422146e-03,
+ -2.08215159e-03,
+ -2.11093877e-03,
+ -2.14011059e-03,
+ -2.17073411e-03,
+ -2.20196834e-03,
+ -2.23409734e-03,
+ -2.26700748e-03,
+ -2.30150856e-03,
+ -2.33719964e-03,
+ -2.37406371e-03,
+ -2.41223071e-03,
+ -2.45184498e-03,
+ -2.49327719e-03,
+ -2.53651105e-03,
+ -2.58166087e-03,
+ -2.62895599e-03,
+ -2.67871981e-03,
+ -2.73117283e-03,
+ -5.49861044e-03,
+ -5.55437338e-03,
+ -5.61159104e-03,
+ -5.67073002e-03,
+ -5.73173212e-03,
+ -5.79498662e-03,
+ -5.85969677e-03,
+ -5.92768658e-03,
+ -5.99809457e-03,
+ -6.07080618e-03,
+ -6.14715228e-03,
+ -6.22711331e-03,
+ ],
+ dtype=np.float32,
+ )
+ },
+ "ExpansionCoefficient": {
+ "value": np.array(
+ [
+ 1.17600127e-03,
+ 1.17271533e-03,
+ 1.17000856e-03,
+ 1.16674276e-03,
+ 2.11251900e-03,
+ 2.10516527e-03,
+ 2.09726905e-03,
+ 2.08941335e-03,
+ 1.63907595e-02,
+ 1.58577170e-02,
+ 1.53679820e-02,
+ 1.49007449e-02,
+ 1.44708352e-02,
+ 1.40612368e-02,
+ 1.36818690e-02,
+ 1.33193973e-02,
+ 1.29744308e-02,
+ 1.26568424e-02,
+ 1.23488475e-02,
+ 1.20567940e-02,
+ 1.17803067e-02,
+ 1.15150018e-02,
+ 1.12629030e-02,
+ 1.10203745e-02,
+ 1.07905651e-02,
+ 1.05690639e-02,
+ 1.03563424e-02,
+ 1.01526314e-02,
+ 9.95650515e-03,
+ 9.76785459e-03,
+ 9.58597753e-03,
+ 9.41115711e-03,
+ 9.23914276e-03,
+ 9.07964632e-03,
+ 8.92116502e-03,
+ 8.76654685e-03,
+ 9.04925726e-03,
+ 8.88936501e-03,
+ 9.14804544e-03,
+ 8.98920093e-03,
+ 8.83030891e-03,
+ 9.06952657e-03,
+ 8.90891161e-03,
+ 1.36343827e-02,
+ 1.32706892e-02,
+ 1.29242949e-02,
+ 1.36271119e-02,
+ 1.32572902e-02,
+ 1.29025253e-02,
+ 1.35165229e-02,
+ 1.31412474e-02,
+ 1.27808526e-02,
+ 8.91761761e-03,
+ 8.74674786e-03,
+ 8.58181808e-03,
+ 8.42147414e-03,
+ 8.26664641e-03,
+ 7.81304855e-03,
+ 7.67400907e-03,
+ 7.54208490e-03,
+ 7.40892906e-03,
+ 8.81091598e-03,
+ 8.62924196e-03,
+ 8.45206063e-03,
+ 8.28018785e-03,
+ 8.11239891e-03,
+ 8.62185098e-03,
+ 8.43446422e-03,
+ 8.25031102e-03,
+ 8.07087123e-03,
+ 8.30837712e-03,
+ 8.11944436e-03,
+ 7.93648325e-03,
+ 7.75875151e-03,
+ 8.14332347e-03,
+ 7.94676598e-03,
+ 7.75293307e-03,
+ 7.56529858e-03,
+ 7.88933039e-03,
+ 7.68536143e-03,
+ 7.48489471e-03,
+ 7.28917075e-03,
+ 7.55438488e-03,
+ 7.34063145e-03,
+ 7.13229552e-03,
+ 6.92783622e-03,
+ 1.06161544e-02,
+ 1.01234140e-02,
+ 9.64432582e-03,
+ 6.52031973e-03,
+ 6.29310543e-03,
+ 6.06948463e-03,
+ 5.84984245e-03,
+ 5.63343242e-03,
+ 8.61937553e-03,
+ 8.08268972e-03,
+ 7.55874207e-03,
+ 6.79610623e-03,
+ 6.32849289e-03,
+ 5.86955249e-03,
+ 5.41723240e-03,
+ 5.56734810e-03,
+ 5.01116784e-03,
+ 4.46233014e-03,
+ 1.40874484e-03,
+ 1.34475902e-03,
+ 1.28140685e-03,
+ 1.21824886e-03,
+ 1.15505024e-03,
+ 1.09222531e-03,
+ 1.02962845e-03,
+ 9.67168540e-04,
+ 9.04808170e-04,
+ 8.42478999e-04,
+ 7.80681905e-04,
+ 7.18652213e-04,
+ 6.56902499e-04,
+ 5.95146266e-04,
+ 5.33432467e-04,
+ 4.72071581e-04,
+ 4.10460081e-04,
+ 3.49062117e-04,
+ 2.87777104e-04,
+ 2.26464268e-04,
+ 1.65259655e-04,
+ 1.03993290e-04,
+ 4.27830964e-05,
+ -1.84028686e-05,
+ -7.95840388e-05,
+ -1.40780976e-04,
+ -2.01987947e-04,
+ -2.63233029e-04,
+ -3.24499299e-04,
+ -3.85862397e-04,
+ -4.47216793e-04,
+ -5.08567959e-04,
+ -5.70152479e-04,
+ -6.31901203e-04,
+ -6.93684444e-04,
+ -7.55490037e-04,
+ -8.17523745e-04,
+ -8.79664498e-04,
+ -9.41973762e-04,
+ -1.00450485e-03,
+ -1.06710335e-03,
+ -1.12990546e-03,
+ -1.19290419e-03,
+ -1.25615683e-03,
+ -1.31971564e-03,
+ -1.38323894e-03,
+ -4.38789371e-03,
+ -4.93527949e-03,
+ -5.48970094e-03,
+ -5.34658274e-03,
+ -5.79780247e-03,
+ -6.25621388e-03,
+ -6.72366377e-03,
+ -7.48283789e-03,
+ -8.00681766e-03,
+ -8.54192488e-03,
+ -5.58420410e-03,
+ -5.79793099e-03,
+ -6.01683883e-03,
+ -6.23886706e-03,
+ -6.46463828e-03,
+ -9.56355780e-03,
+ -1.00387875e-02,
+ -1.05282217e-02,
+ -6.87109074e-03,
+ -7.07587786e-03,
+ -7.28309387e-03,
+ -7.49528036e-03,
+ -7.23363785e-03,
+ -7.42882164e-03,
+ -7.62982434e-03,
+ -7.83343613e-03,
+ -7.51076965e-03,
+ -7.69859226e-03,
+ -7.88733363e-03,
+ -8.08352232e-03,
+ -7.69890239e-03,
+ -7.87641760e-03,
+ -8.05852562e-03,
+ -8.24564695e-03,
+ -8.00882280e-03,
+ -8.18727538e-03,
+ -8.36882368e-03,
+ -8.55544209e-03,
+ -8.04922916e-03,
+ -8.21674801e-03,
+ -8.38823151e-03,
+ -8.56383517e-03,
+ -8.74411128e-03,
+ -7.35407788e-03,
+ -7.48245185e-03,
+ -7.61653157e-03,
+ -7.75389513e-03,
+ -8.20003450e-03,
+ -8.35770369e-03,
+ -8.51695240e-03,
+ -8.67962278e-03,
+ -8.84699915e-03,
+ -1.26767000e-02,
+ -1.30308550e-02,
+ -1.34020159e-02,
+ -1.27902590e-02,
+ -1.31374933e-02,
+ -1.35022206e-02,
+ -1.28020663e-02,
+ -1.31427627e-02,
+ -1.35003338e-02,
+ -8.81921593e-03,
+ -8.97676684e-03,
+ -8.73885304e-03,
+ -8.89289286e-03,
+ -9.05076787e-03,
+ -8.79113190e-03,
+ -8.94579384e-03,
+ -8.66949651e-03,
+ -8.81993212e-03,
+ -8.97467043e-03,
+ -9.13402718e-03,
+ -9.29924846e-03,
+ -9.47104022e-03,
+ -9.64829233e-03,
+ -9.83224157e-03,
+ -1.00242840e-02,
+ -1.02243433e-02,
+ -1.04304748e-02,
+ -1.06464764e-02,
+ -1.08723603e-02,
+ -1.11076497e-02,
+ -1.13517633e-02,
+ -1.16107482e-02,
+ -1.18797245e-02,
+ -1.21643478e-02,
+ -1.24597261e-02,
+ -1.27725713e-02,
+ -1.31026637e-02,
+ -1.34509858e-02,
+ -1.38195883e-02,
+ -1.42097492e-02,
+ -1.46267340e-02,
+ -1.50670996e-02,
+ -1.55417984e-02,
+ -1.60482023e-02,
+ -1.65943075e-02,
+ -1.71795618e-02,
+ -1.78127103e-02,
+ -1.84999816e-02,
+ -1.92504879e-02,
+ -2.00698171e-02,
+ -2.09702197e-02,
+ -2.19654124e-02,
+ -2.30720937e-02,
+ -2.43106075e-02,
+ -2.57069822e-02,
+ -2.72962451e-02,
+ -1.43178934e-02,
+ -1.48085468e-02,
+ -1.53383436e-02,
+ -1.59113277e-02,
+ -1.65353119e-02,
+ -1.72161739e-02,
+ -1.79625414e-02,
+ -1.87847745e-02,
+ -1.96950957e-02,
+ -2.07099430e-02,
+ -2.18482167e-02,
+ -2.31328830e-02,
+ ],
+ dtype=np.float32,
+ )
+ },
+ "Latitude": {"value": np.random.rand(96, 332).astype(np.float32)},
+ "Longitude": {"value": np.random.rand(96, 332).astype(np.float32)},
+ "LunarAzimuthAngle": {
+ "value": np.random.rand(96, 332).astype(np.float32)
+ },
+ "LunarZenithAngle": {
+ "value": np.random.rand(96, 332).astype(np.float32)
+ },
+ "MidTime": {
+ "value": np.array(
+ [
+ 1950675122400462,
+ 1950675124187044,
+ 1950675125973621,
+ 1950675127760200,
+ 1950675129546777,
+ 1950675131333401,
+ 1950675133119981,
+ 1950675134906559,
+ 1950675136693138,
+ 1950675138479716,
+ 1950675140266341,
+ 1950675142052918,
+ 1950675143839498,
+ 1950675145626075,
+ 1950675147412654,
+ 1950675149199278,
+ 1950675150985857,
+ 1950675152772434,
+ 1950675154559014,
+ 1950675156345591,
+ 1950675158132216,
+ 1950675159918795,
+ 1950675161705373,
+ 1950675163491595,
+ 1950675165278173,
+ 1950675167064395,
+ 1950675168850973,
+ 1950675170637195,
+ 1950675172423773,
+ 1950675174209995,
+ 1950675175996573,
+ 1950675177782795,
+ 1950675179569373,
+ 1950675181355595,
+ 1950675183142173,
+ 1950675184928395,
+ 1950675186714973,
+ 1950675188501195,
+ 1950675190287773,
+ 1950675192073995,
+ 1950675193860573,
+ 1950675195646795,
+ 1950675197433373,
+ 1950675199219595,
+ 1950675201006173,
+ 1950675202792395,
+ 1950675204578973,
+ -993,
+ ]
+ )
+ },
+ "MoonIllumFraction": {"value": 11.518141746520996},
+ "MoonPhaseAngle": {"value": 140.32131958007812},
+ "NumberOfTiePointZoneGroupsScan": {"value": 62},
+ "NumberOfTiePointZoneGroupsTrack": {"value": 1},
+ "NumberOfTiePointZonesScan": {
+ "value": np.array(
+ [
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 28,
+ 2,
+ 3,
+ 2,
+ 3,
+ 3,
+ 3,
+ 5,
+ 4,
+ 5,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 3,
+ 5,
+ 3,
+ 4,
+ 3,
+ 23,
+ 23,
+ 3,
+ 4,
+ 3,
+ 5,
+ 3,
+ 4,
+ 4,
+ 4,
+ 4,
+ 4,
+ 5,
+ 4,
+ 5,
+ 3,
+ 3,
+ 3,
+ 2,
+ 3,
+ 2,
+ 40,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ 1,
+ ],
+ dtype=np.int32,
+ )
+ },
+ "NumberOfTiePointZonesTrack": {"value": 1},
+ "PadByte1": {"value": np.array([0, 0, 0], dtype=np.uint8)},
+ "QF1_SCAN_VIIRSSDRGEO": {
+ "value": np.array(
+ [
+ 0,
+ 128,
+ 0,
+ 128,
+ 0,
+ 128,
+ 0,
+ 128,
0,
+ 128,
0,
+ 128,
0,
+ 128,
0,
+ 128,
0,
+ 128,
+ 2,
+ 130,
+ 2,
+ 130,
+ 2,
+ 142,
+ 14,
+ 142,
+ 14,
+ 142,
+ 14,
+ 142,
+ 14,
+ 142,
+ 14,
+ 142,
+ 14,
+ 142,
+ 14,
+ 142,
+ 14,
+ 142,
+ 14,
+ 142,
+ 14,
+ 142,
+ 14,
+ 142,
+ 14,
0,
+ ],
+ dtype=np.uint8,
+ )
+ },
+ "QF2_SCAN_VIIRSSDRGEO": {
+ "value": np.array(
+ [
0,
0,
0,
@@ -60,2383 +855,1585 @@ def setUp(self):
0,
0,
0,
- 254,
- 254,
- 254,
- 254,
- 254,
- 254,
- 254,
- 254,
- 254,
- 254,
- 254,
- 254,
- 254,
- 254,
- 254,
- 254,
- 254,
- 254,
- 254,
- 254,
- 254,
- 254,
- 254,
- 254,
- 249,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 2,
],
dtype=np.uint8,
)
},
- "NumberOfScans": {"value": np.array([47])},
- "VIIRS-DNB-GEO_All": {
- "AlignmentCoefficient": {
- "value": np.array(
- [
- 2.11257413e-02,
- 2.11152732e-02,
- 2.11079046e-02,
- 2.10680142e-02,
- 1.80840008e-02,
- 1.80402063e-02,
- 1.79968309e-02,
- 1.79477539e-02,
- 2.20463774e-03,
- 2.17431062e-03,
- 2.14360282e-03,
- 2.11503846e-03,
- 2.08630669e-03,
- 2.05924874e-03,
- 2.03177333e-03,
- 2.00573727e-03,
- 1.98072987e-03,
- 1.95503305e-03,
- 1.93077011e-03,
- 1.90702057e-03,
- 1.88353716e-03,
- 1.86104013e-03,
- 1.83863181e-03,
- 1.81696517e-03,
- 1.79550308e-03,
- 1.77481642e-03,
- 1.75439729e-03,
- 1.73398503e-03,
- 1.71459839e-03,
- 1.69516564e-03,
- 1.67622324e-03,
- 1.65758410e-03,
- 1.63990213e-03,
- 1.62128301e-03,
- 1.60375470e-03,
- 1.58667017e-03,
- 1.61543000e-03,
- 1.59775047e-03,
- 1.50719041e-03,
- 1.48937735e-03,
- 1.47257745e-03,
- 1.50070526e-03,
- 1.48288533e-03,
- 9.29064234e-04,
- 9.12246935e-04,
- 8.95748264e-04,
- 8.71886965e-04,
- 8.55044520e-04,
- 8.38686305e-04,
- 8.18263041e-04,
- 8.01501446e-04,
- 7.85346841e-04,
- 1.15984806e-03,
- 1.14326552e-03,
- 1.12648588e-03,
- 1.11018715e-03,
- 1.09399087e-03,
- 1.19698711e-03,
- 1.18051842e-03,
- 1.16404379e-03,
- 1.14832399e-03,
- 9.92591376e-04,
- 9.75896895e-04,
- 9.59663419e-04,
- 9.43415158e-04,
- 9.27662419e-04,
- 8.92253709e-04,
- 8.75947590e-04,
- 8.60177504e-04,
- 8.44484195e-04,
- 8.35279003e-04,
- 8.19236680e-04,
- 8.03303672e-04,
- 7.87482015e-04,
- 7.60449213e-04,
- 7.44239136e-04,
- 7.28625571e-04,
- 7.12990935e-04,
- 6.89090986e-04,
- 6.73000410e-04,
- 6.57248020e-04,
- 6.41623745e-04,
- 6.20219158e-04,
- 6.04308851e-04,
- 5.88596100e-04,
- 5.73108089e-04,
- 3.65344196e-04,
- 3.49639275e-04,
- 3.34273063e-04,
- 4.81286290e-04,
- 4.65485587e-04,
- 4.49862011e-04,
- 4.34543617e-04,
- 4.19324206e-04,
- 2.60536268e-04,
- 2.45052564e-04,
- 2.29740850e-04,
- 2.34466774e-04,
- 2.18822126e-04,
- 2.03370175e-04,
- 1.88058810e-04,
- 1.60192372e-04,
- 1.44485937e-04,
- 1.28920830e-04,
- 3.45615146e-04,
- 3.30171984e-04,
- 3.14682693e-04,
- 2.99300562e-04,
- 2.83925037e-04,
- 2.68518896e-04,
- 2.53254839e-04,
- 2.37950648e-04,
- 2.22716670e-04,
- 2.07562072e-04,
- 1.92296386e-04,
- 1.77147449e-04,
- 1.61994336e-04,
- 1.46895778e-04,
- 1.31844325e-04,
- 1.16730320e-04,
- 1.01757469e-04,
- 8.67861963e-05,
- 7.18669180e-05,
- 5.70719567e-05,
- 4.24701866e-05,
- 2.84846719e-05,
- 1.70599415e-05,
- -1.47213286e-05,
- -2.33691408e-05,
- -3.68025649e-05,
- -5.12388433e-05,
- -6.59972284e-05,
- -8.08926561e-05,
- -9.58433884e-05,
- -1.10882705e-04,
- -1.25976600e-04,
- -1.41044657e-04,
- -1.56166439e-04,
- -1.71307023e-04,
- -1.86516074e-04,
- -2.01731804e-04,
- -2.16980450e-04,
- -2.32271064e-04,
- -2.47527263e-04,
- -2.62940506e-04,
- -2.78283434e-04,
- -2.93711084e-04,
- -3.09180934e-04,
- -3.24661058e-04,
- -3.40237195e-04,
- -1.27807143e-04,
- -1.43646437e-04,
- -1.59638614e-04,
- -1.87593061e-04,
- -2.03169184e-04,
- -2.18941437e-04,
- -2.34920750e-04,
- -2.30605408e-04,
- -2.46262236e-04,
- -2.62226094e-04,
- -4.19838558e-04,
- -4.35510388e-04,
- -4.51152271e-04,
- -4.67120990e-04,
- -4.83241311e-04,
- -3.37647041e-04,
- -3.53568990e-04,
- -3.69836489e-04,
- -5.76354389e-04,
- -5.92070050e-04,
- -6.08178903e-04,
- -6.24440494e-04,
- -6.45648804e-04,
- -6.61431870e-04,
- -6.77491073e-04,
- -6.93967624e-04,
- -7.17683870e-04,
- -7.33471534e-04,
- -7.49999890e-04,
- -7.66390527e-04,
- -7.93468382e-04,
- -8.09502264e-04,
- -8.25728697e-04,
- -8.42282083e-04,
- -8.51265620e-04,
- -8.67322611e-04,
- -8.83649045e-04,
- -9.00280487e-04,
- -9.35055199e-04,
- -9.51097580e-04,
- -9.67527216e-04,
- -9.84144746e-04,
- -1.00128003e-03,
- -1.15522649e-03,
- -1.17168750e-03,
- -1.18826574e-03,
- -1.20496599e-03,
- -1.10272120e-03,
- -1.11865194e-03,
- -1.13539130e-03,
- -1.15241797e-03,
- -1.16964686e-03,
- -7.97322951e-04,
- -8.14269355e-04,
- -8.31696263e-04,
- -8.51555436e-04,
- -8.68656265e-04,
- -8.86220601e-04,
- -9.09406052e-04,
- -9.26509325e-04,
- -9.44124535e-04,
- -1.49479776e-03,
- -1.51314179e-03,
- -1.48387800e-03,
- -1.50146009e-03,
- -1.51945755e-03,
- -1.61006744e-03,
- -1.62846781e-03,
- -1.59783731e-03,
- -1.61545863e-03,
- -1.63336343e-03,
- -1.65167439e-03,
- -1.67034590e-03,
- -1.68956630e-03,
- -1.70884258e-03,
- -1.72863202e-03,
- -1.74859120e-03,
- -1.76901231e-03,
- -1.79015659e-03,
- -1.81144674e-03,
- -1.83329231e-03,
- -1.85552111e-03,
- -1.87840930e-03,
- -1.90151483e-03,
- -1.92550803e-03,
- -1.94982730e-03,
- -1.97511422e-03,
- -2.00066133e-03,
- -2.02709576e-03,
- -2.05422146e-03,
- -2.08215159e-03,
- -2.11093877e-03,
- -2.14011059e-03,
- -2.17073411e-03,
- -2.20196834e-03,
- -2.23409734e-03,
- -2.26700748e-03,
- -2.30150856e-03,
- -2.33719964e-03,
- -2.37406371e-03,
- -2.41223071e-03,
- -2.45184498e-03,
- -2.49327719e-03,
- -2.53651105e-03,
- -2.58166087e-03,
- -2.62895599e-03,
- -2.67871981e-03,
- -2.73117283e-03,
- -5.49861044e-03,
- -5.55437338e-03,
- -5.61159104e-03,
- -5.67073002e-03,
- -5.73173212e-03,
- -5.79498662e-03,
- -5.85969677e-03,
- -5.92768658e-03,
- -5.99809457e-03,
- -6.07080618e-03,
- -6.14715228e-03,
- -6.22711331e-03,
- ],
- dtype=np.float32,
- )
- },
- "ExpansionCoefficient": {
- "value": np.array(
- [
- 1.17600127e-03,
- 1.17271533e-03,
- 1.17000856e-03,
- 1.16674276e-03,
- 2.11251900e-03,
- 2.10516527e-03,
- 2.09726905e-03,
- 2.08941335e-03,
- 1.63907595e-02,
- 1.58577170e-02,
- 1.53679820e-02,
- 1.49007449e-02,
- 1.44708352e-02,
- 1.40612368e-02,
- 1.36818690e-02,
- 1.33193973e-02,
- 1.29744308e-02,
- 1.26568424e-02,
- 1.23488475e-02,
- 1.20567940e-02,
- 1.17803067e-02,
- 1.15150018e-02,
- 1.12629030e-02,
- 1.10203745e-02,
- 1.07905651e-02,
- 1.05690639e-02,
- 1.03563424e-02,
- 1.01526314e-02,
- 9.95650515e-03,
- 9.76785459e-03,
- 9.58597753e-03,
- 9.41115711e-03,
- 9.23914276e-03,
- 9.07964632e-03,
- 8.92116502e-03,
- 8.76654685e-03,
- 9.04925726e-03,
- 8.88936501e-03,
- 9.14804544e-03,
- 8.98920093e-03,
- 8.83030891e-03,
- 9.06952657e-03,
- 8.90891161e-03,
- 1.36343827e-02,
- 1.32706892e-02,
- 1.29242949e-02,
- 1.36271119e-02,
- 1.32572902e-02,
- 1.29025253e-02,
- 1.35165229e-02,
- 1.31412474e-02,
- 1.27808526e-02,
- 8.91761761e-03,
- 8.74674786e-03,
- 8.58181808e-03,
- 8.42147414e-03,
- 8.26664641e-03,
- 7.81304855e-03,
- 7.67400907e-03,
- 7.54208490e-03,
- 7.40892906e-03,
- 8.81091598e-03,
- 8.62924196e-03,
- 8.45206063e-03,
- 8.28018785e-03,
- 8.11239891e-03,
- 8.62185098e-03,
- 8.43446422e-03,
- 8.25031102e-03,
- 8.07087123e-03,
- 8.30837712e-03,
- 8.11944436e-03,
- 7.93648325e-03,
- 7.75875151e-03,
- 8.14332347e-03,
- 7.94676598e-03,
- 7.75293307e-03,
- 7.56529858e-03,
- 7.88933039e-03,
- 7.68536143e-03,
- 7.48489471e-03,
- 7.28917075e-03,
- 7.55438488e-03,
- 7.34063145e-03,
- 7.13229552e-03,
- 6.92783622e-03,
- 1.06161544e-02,
- 1.01234140e-02,
- 9.64432582e-03,
- 6.52031973e-03,
- 6.29310543e-03,
- 6.06948463e-03,
- 5.84984245e-03,
- 5.63343242e-03,
- 8.61937553e-03,
- 8.08268972e-03,
- 7.55874207e-03,
- 6.79610623e-03,
- 6.32849289e-03,
- 5.86955249e-03,
- 5.41723240e-03,
- 5.56734810e-03,
- 5.01116784e-03,
- 4.46233014e-03,
- 1.40874484e-03,
- 1.34475902e-03,
- 1.28140685e-03,
- 1.21824886e-03,
- 1.15505024e-03,
- 1.09222531e-03,
- 1.02962845e-03,
- 9.67168540e-04,
- 9.04808170e-04,
- 8.42478999e-04,
- 7.80681905e-04,
- 7.18652213e-04,
- 6.56902499e-04,
- 5.95146266e-04,
- 5.33432467e-04,
- 4.72071581e-04,
- 4.10460081e-04,
- 3.49062117e-04,
- 2.87777104e-04,
- 2.26464268e-04,
- 1.65259655e-04,
- 1.03993290e-04,
- 4.27830964e-05,
- -1.84028686e-05,
- -7.95840388e-05,
- -1.40780976e-04,
- -2.01987947e-04,
- -2.63233029e-04,
- -3.24499299e-04,
- -3.85862397e-04,
- -4.47216793e-04,
- -5.08567959e-04,
- -5.70152479e-04,
- -6.31901203e-04,
- -6.93684444e-04,
- -7.55490037e-04,
- -8.17523745e-04,
- -8.79664498e-04,
- -9.41973762e-04,
- -1.00450485e-03,
- -1.06710335e-03,
- -1.12990546e-03,
- -1.19290419e-03,
- -1.25615683e-03,
- -1.31971564e-03,
- -1.38323894e-03,
- -4.38789371e-03,
- -4.93527949e-03,
- -5.48970094e-03,
- -5.34658274e-03,
- -5.79780247e-03,
- -6.25621388e-03,
- -6.72366377e-03,
- -7.48283789e-03,
- -8.00681766e-03,
- -8.54192488e-03,
- -5.58420410e-03,
- -5.79793099e-03,
- -6.01683883e-03,
- -6.23886706e-03,
- -6.46463828e-03,
- -9.56355780e-03,
- -1.00387875e-02,
- -1.05282217e-02,
- -6.87109074e-03,
- -7.07587786e-03,
- -7.28309387e-03,
- -7.49528036e-03,
- -7.23363785e-03,
- -7.42882164e-03,
- -7.62982434e-03,
- -7.83343613e-03,
- -7.51076965e-03,
- -7.69859226e-03,
- -7.88733363e-03,
- -8.08352232e-03,
- -7.69890239e-03,
- -7.87641760e-03,
- -8.05852562e-03,
- -8.24564695e-03,
- -8.00882280e-03,
- -8.18727538e-03,
- -8.36882368e-03,
- -8.55544209e-03,
- -8.04922916e-03,
- -8.21674801e-03,
- -8.38823151e-03,
- -8.56383517e-03,
- -8.74411128e-03,
- -7.35407788e-03,
- -7.48245185e-03,
- -7.61653157e-03,
- -7.75389513e-03,
- -8.20003450e-03,
- -8.35770369e-03,
- -8.51695240e-03,
- -8.67962278e-03,
- -8.84699915e-03,
- -1.26767000e-02,
- -1.30308550e-02,
- -1.34020159e-02,
- -1.27902590e-02,
- -1.31374933e-02,
- -1.35022206e-02,
- -1.28020663e-02,
- -1.31427627e-02,
- -1.35003338e-02,
- -8.81921593e-03,
- -8.97676684e-03,
- -8.73885304e-03,
- -8.89289286e-03,
- -9.05076787e-03,
- -8.79113190e-03,
- -8.94579384e-03,
- -8.66949651e-03,
- -8.81993212e-03,
- -8.97467043e-03,
- -9.13402718e-03,
- -9.29924846e-03,
- -9.47104022e-03,
- -9.64829233e-03,
- -9.83224157e-03,
- -1.00242840e-02,
- -1.02243433e-02,
- -1.04304748e-02,
- -1.06464764e-02,
- -1.08723603e-02,
- -1.11076497e-02,
- -1.13517633e-02,
- -1.16107482e-02,
- -1.18797245e-02,
- -1.21643478e-02,
- -1.24597261e-02,
- -1.27725713e-02,
- -1.31026637e-02,
- -1.34509858e-02,
- -1.38195883e-02,
- -1.42097492e-02,
- -1.46267340e-02,
- -1.50670996e-02,
- -1.55417984e-02,
- -1.60482023e-02,
- -1.65943075e-02,
- -1.71795618e-02,
- -1.78127103e-02,
- -1.84999816e-02,
- -1.92504879e-02,
- -2.00698171e-02,
- -2.09702197e-02,
- -2.19654124e-02,
- -2.30720937e-02,
- -2.43106075e-02,
- -2.57069822e-02,
- -2.72962451e-02,
- -1.43178934e-02,
- -1.48085468e-02,
- -1.53383436e-02,
- -1.59113277e-02,
- -1.65353119e-02,
- -1.72161739e-02,
- -1.79625414e-02,
- -1.87847745e-02,
- -1.96950957e-02,
- -2.07099430e-02,
- -2.18482167e-02,
- -2.31328830e-02,
- ],
- dtype=np.float32,
- )
- },
- "Latitude": {"value": np.random.rand(96, 332).astype(np.float32)},
- "Longitude": {"value": np.random.rand(96, 332).astype(np.float32)},
- "LunarAzimuthAngle": {
- "value": np.random.rand(96, 332).astype(np.float32)
- },
- "LunarZenithAngle": {
- "value": np.random.rand(96, 332).astype(np.float32)
- },
- "MidTime": {
- "value": np.array(
+ "SCAttitude": {
+ "value": np.array(
+ [
+ [-9.22587514e-01, 3.92340779e00, 5.93621433e-01],
+ [-2.82428920e-01, 3.98425841e00, 7.05978215e-01],
+ [5.63421488e-01, 3.83695555e00, 3.93174857e-01],
+ [-3.16407561e-01, 3.85351181e00, 5.33868372e-01],
+ [-1.10977542e00, 3.82791996e00, 6.06707633e-01],
+ [-1.46703672e00, 3.94862103e00, 6.45296216e-01],
+ [-1.14162290e00, 3.79930806e00, 7.45548725e-01],
+ [-1.56181908e00, 3.68108273e00, 6.49301231e-01],
+ [-1.46823406e00, 3.63365412e00, 5.03535330e-01],
+ [-1.02590537e00, 3.64477968e00, 5.22250295e-01],
+ [-5.35379410e-01, 3.69151831e00, 4.32526857e-01],
+ [-5.78065366e-02, 3.37806726e00, 4.95986529e-02],
+ [-2.40110800e-01, 3.22970843e00, -9.55391768e-03],
+ [-6.54527247e-01, 3.16465378e00, 1.89672917e-01],
+ [-1.35780311e00, 3.24750924e00, 1.63008988e-01],
+ [-1.47417045e00, 3.39788198e00, 1.84387550e-01],
+ [-1.74577117e00, 3.53278613e00, 1.89606979e-01],
+ [-1.46304774e00, 3.22666740e00, 1.59070507e-01],
+ [-4.05473042e00, 3.06258607e00, 1.10443914e00],
+ [-5.91582203e00, 2.83895302e00, 1.79846287e00],
+ [-7.04713678e00, 2.55699897e00, 2.23985386e00],
+ [-7.43741798e00, 2.21711683e00, 2.42266488e00],
+ [-7.06249666e00, 1.81872594e00, 2.33713675e00],
+ [-5.96051836e00, 1.36609375e00, 1.99506497e00],
+ [-4.13137341e00, 8.60225558e-01, 1.39551389e00],
+ [-1.57741416e00, 3.02793205e-01, 5.36690295e-01],
+ [7.63817742e-12, 1.11727738e-10, 2.74194088e-11],
+ [-1.24213686e-11, 8.01499769e-11, -1.34056446e-11],
+ [1.78272761e-11, 9.04948685e-11, 1.77389995e-11],
+ [-1.47259357e-11, 9.37734057e-11, -3.89882709e-11],
+ [-1.94052344e-11, 1.49411969e-10, -2.48492286e-11],
+ [3.40418752e-12, 1.25333730e-10, 1.14499972e-11],
+ [5.64890669e-12, 1.35170833e-10, 2.27858565e-11],
+ [8.78361273e-12, 1.02109009e-10, -5.92111386e-12],
+ [1.47398396e-11, 8.59943505e-11, -8.54686872e-13],
+ [-5.35027361e-12, 1.25450331e-10, -1.54262800e-11],
+ [2.12667054e-11, 1.57356642e-10, 2.54392306e-11],
+ [-6.39285022e-12, 1.42791029e-10, -8.58749790e-12],
+ [-2.18451160e-11, 9.94347313e-11, -2.18451160e-11],
+ [1.77587389e-11, 1.16834944e-10, 3.09037483e-11],
+ [5.09583955e-12, 1.06878555e-10, 1.30452402e-11],
+ [-1.25895900e-11, 1.06217646e-10, -1.07971496e-11],
+ [1.45264981e-11, 1.03935242e-10, 1.73963136e-11],
+ [-1.41730258e-12, 7.72037989e-11, 1.15057850e-11],
+ [1.99397634e-11, 1.36618120e-10, 4.70010628e-11],
+ [1.24784124e-11, 1.14499965e-10, 4.69658253e-12],
+ [-1.83001236e-11, 5.19546177e-11, -1.31873679e-11],
+ [-9.99299988e02, -9.99299988e02, -9.99299988e02],
+ ],
+ dtype=np.float32,
+ )
+ },
+ "SCPosition": {
+ "value": np.array(
+ [
+ [2.3191672e06, -4.5127075e06, 5.1096645e06],
+ [2.3202438e06, -4.5225140e06, 5.1005205e06],
+ [2.3213098e06, -4.5323050e06, 5.0913595e06],
+ [2.3223650e06, -4.5420810e06, 5.0821800e06],
+ [2.3234100e06, -4.5518415e06, 5.0729835e06],
+ [2.3244445e06, -4.5615875e06, 5.0637700e06],
+ [2.3254692e06, -4.5713185e06, 5.0545390e06],
+ [2.3264830e06, -4.5810340e06, 5.0452915e06],
+ [2.3274862e06, -4.5907340e06, 5.0360255e06],
+ [2.3284792e06, -4.6004185e06, 5.0267430e06],
+ [2.3294620e06, -4.6100885e06, 5.0174430e06],
+ [2.3304345e06, -4.6197430e06, 5.0081270e06],
+ [2.3313962e06, -4.6293820e06, 4.9987935e06],
+ [2.3323475e06, -4.6390050e06, 4.9894420e06],
+ [2.3332888e06, -4.6486130e06, 4.9800740e06],
+ [2.3342195e06, -4.6582060e06, 4.9706890e06],
+ [2.3351398e06, -4.6677835e06, 4.9612880e06],
+ [2.3360495e06, -4.6773440e06, 4.9518685e06],
+ [2.3369522e06, -4.6868750e06, 4.9424430e06],
+ [2.3378502e06, -4.6963695e06, 4.9330150e06],
+ [2.3387432e06, -4.7058270e06, 4.9235845e06],
+ [2.3396312e06, -4.7152475e06, 4.9141520e06],
+ [2.3405140e06, -4.7246290e06, 4.9047175e06],
+ [2.3413915e06, -4.7339725e06, 4.8952825e06],
+ [2.3422642e06, -4.7432805e06, 4.8858430e06],
+ [2.3431318e06, -4.7525505e06, 4.8764035e06],
+ [2.3439710e06, -4.7618790e06, 4.8668965e06],
+ [2.3447770e06, -4.7712820e06, 4.8573130e06],
+ [2.3455728e06, -4.7806710e06, 4.8477115e06],
+ [2.3463582e06, -4.7900425e06, 4.8380950e06],
+ [2.3471335e06, -4.7994005e06, 4.8284610e06],
+ [2.3478980e06, -4.8087395e06, 4.8188110e06],
+ [2.3486522e06, -4.8180645e06, 4.8091435e06],
+ [2.3493960e06, -4.8273715e06, 4.7994615e06],
+ [2.3501298e06, -4.8366645e06, 4.7897610e06],
+ [2.3508530e06, -4.8459395e06, 4.7800465e06],
+ [2.3515658e06, -4.8552000e06, 4.7703130e06],
+ [2.3522680e06, -4.8644420e06, 4.7605655e06],
+ [2.3529602e06, -4.8736700e06, 4.7508000e06],
+ [2.3536420e06, -4.8828800e06, 4.7410205e06],
+ [2.3543132e06, -4.8920755e06, 4.7312230e06],
+ [2.3549740e06, -4.9012520e06, 4.7214105e06],
+ [2.3556248e06, -4.9104145e06, 4.7115800e06],
+ [2.3562650e06, -4.9195590e06, 4.7017360e06],
+ [2.3568952e06, -4.9286890e06, 4.6918745e06],
+ [2.3575145e06, -4.9378000e06, 4.6819980e06],
+ [2.3581235e06, -4.9468960e06, 4.6721035e06],
+ [-9.9929999e02, -9.9929999e02, -9.9929999e02],
+ ],
+ dtype=np.float32,
+ )
+ },
+ "SCSolarAzimuthAngle": {
+ "value": np.array(
+ [
+ -140.6137,
+ -140.54446,
+ -140.47484,
+ -140.40486,
+ -140.33464,
+ -140.26427,
+ -140.19333,
+ -140.12198,
+ -140.05042,
+ -139.97855,
+ -139.90648,
+ -139.83394,
+ -139.76117,
+ -139.68803,
+ -139.61465,
+ -139.54103,
+ -139.46695,
+ -139.3923,
+ -139.31741,
+ -139.2424,
+ -139.16727,
+ -139.09201,
+ -139.01662,
+ -138.94112,
+ -138.86546,
+ -138.78972,
+ -138.71251,
+ -138.63487,
+ -138.5569,
+ -138.4786,
+ -138.39995,
+ -138.32097,
+ -138.24161,
+ -138.16193,
+ -138.0819,
+ -138.00153,
+ -137.92078,
+ -137.8397,
+ -137.75827,
+ -137.67648,
+ -137.59433,
+ -137.51183,
+ -137.42896,
+ -137.34573,
+ -137.26213,
+ -137.17819,
+ -137.09386,
+ -999.3,
+ ],
+ dtype=np.float32,
+ )
+ },
+ "SCSolarZenithAngle": {
+ "value": np.array(
+ [
+ 135.88528,
+ 135.96703,
+ 136.04868,
+ 136.1302,
+ 136.21165,
+ 136.2931,
+ 136.37451,
+ 136.4556,
+ 136.53659,
+ 136.61748,
+ 136.69843,
+ 136.77931,
+ 136.86021,
+ 136.94092,
+ 137.02148,
+ 137.10208,
+ 137.18248,
+ 137.26239,
+ 137.34204,
+ 137.42155,
+ 137.50092,
+ 137.58014,
+ 137.65923,
+ 137.73816,
+ 137.81696,
+ 137.8956,
+ 137.97507,
+ 138.05447,
+ 138.13382,
+ 138.21303,
+ 138.29218,
+ 138.37122,
+ 138.45016,
+ 138.529,
+ 138.60777,
+ 138.68642,
+ 138.76498,
+ 138.84343,
+ 138.9218,
+ 139.00005,
+ 139.07823,
+ 139.15627,
+ 139.23422,
+ 139.31207,
+ 139.38983,
+ 139.46748,
+ 139.54503,
+ -999.3,
+ ],
+ dtype=np.float32,
+ )
+ },
+ "SCVelocity": {
+ "value": np.array(
+ [
+ [605.31726, -5492.9614, -5113.397],
+ [599.4935, -5484.5615, -5123.1396],
+ [593.66986, -5476.142, -5132.8657],
+ [587.8464, -5467.7017, -5142.573],
+ [582.02313, -5459.241, -5152.263],
+ [576.19995, -5450.7607, -5161.936],
+ [570.37714, -5442.2607, -5171.592],
+ [564.5546, -5433.741, -5181.2295],
+ [558.73236, -5425.2, -5190.849],
+ [552.9104, -5416.6396, -5200.4517],
+ [547.0887, -5408.06, -5210.0366],
+ [541.26746, -5399.4604, -5219.6035],
+ [535.44666, -5390.841, -5229.153],
+ [529.6263, -5382.201, -5238.684],
+ [523.8063, -5373.5415, -5248.1978],
+ [517.9866, -5364.863, -5257.694],
+ [512.16754, -5356.1646, -5267.1724],
+ [506.34906, -5347.446, -5276.632],
+ [500.53455, -5338.72, -5286.0645],
+ [494.72552, -5329.993, -5295.466],
+ [488.9218, -5321.265, -5304.8364],
+ [483.1238, -5312.536, -5314.1743],
+ [477.33157, -5303.806, -5323.4795],
+ [471.546, -5295.0767, -5332.7515],
+ [465.7647, -5286.344, -5341.9937],
+ [459.99005, -5277.613, -5351.2026],
+ [454.19785, -5268.798, -5360.442],
+ [448.38614, -5259.887, -5369.7207],
+ [442.57404, -5250.955, -5378.983],
+ [436.7639, -5242.0063, -5388.225],
+ [430.9534, -5233.0366, -5397.4517],
+ [425.145, -5224.0483, -5406.6567],
+ [419.33627, -5215.0396, -5415.845],
+ [413.52963, -5206.013, -5425.014],
+ [407.72275, -5196.9663, -5434.1665],
+ [401.91797, -5187.9023, -5443.299],
+ [396.11307, -5178.8164, -5452.4136],
+ [390.3103, -5169.7134, -5461.508],
+ [384.50742, -5160.59, -5470.586],
+ [378.70673, -5151.4497, -5479.644],
+ [372.90598, -5142.288, -5488.6846],
+ [367.1075, -5133.109, -5497.7046],
+ [361.309, -5123.9097, -5506.708],
+ [355.5128, -5114.6934, -5515.691],
+ [349.71658, -5105.4565, -5524.657],
+ [343.9228, -5096.202, -5533.602],
+ [338.12906, -5086.927, -5542.53],
+ [-999.3, -999.3, -999.3],
+ ],
+ dtype=np.float32,
+ )
+ },
+ "SatelliteAzimuthAngle": {
+ "value": np.random.rand(96, 332).astype(np.float32)
+ },
+ "SatelliteZenithAngle": {
+ "value": np.random.rand(96, 332).astype(np.float32)
+ },
+ "SolarAzimuthAngle": {
+ "value": np.random.rand(96, 332).astype(np.float32)
+ },
+ "SolarZenithAngle": {
+ "value": np.random.rand(96, 332).astype(np.float32)
+ },
+ "StartTime": {
+ "value": np.array(
+ [
+ 1950675122120971,
+ 1950675123907557,
+ 1950675125694139,
+ 1950675127480722,
+ 1950675129267304,
+ 1950675131053910,
+ 1950675132840494,
+ 1950675134627077,
+ 1950675136413660,
+ 1950675138200243,
+ 1950675139986850,
+ 1950675141773433,
+ 1950675143560016,
+ 1950675145346598,
+ 1950675147133181,
+ 1950675148919788,
+ 1950675150706371,
+ 1950675152492953,
+ 1950675154279537,
+ 1950675156066119,
+ 1950675157852726,
+ 1950675159639309,
+ 1950675161425892,
+ 1950675163212109,
+ 1950675164998692,
+ 1950675166784909,
+ 1950675168571492,
+ 1950675170357709,
+ 1950675172144292,
+ 1950675173930509,
+ 1950675175717092,
+ 1950675177503309,
+ 1950675179289892,
+ 1950675181076109,
+ 1950675182862692,
+ 1950675184648909,
+ 1950675186435492,
+ 1950675188221709,
+ 1950675190008292,
+ 1950675191794509,
+ 1950675193581092,
+ 1950675195367309,
+ 1950675197153892,
+ 1950675198940109,
+ 1950675200726692,
+ 1950675202512909,
+ 1950675204299492,
+ -993,
+ ]
+ )
+ },
+ "TiePointZoneGroupLocationScanCompact": {
+ "value": np.array(
+ [
+ 0,
+ 2,
+ 4,
+ 6,
+ 8,
+ 10,
+ 12,
+ 14,
+ 16,
+ 45,
+ 48,
+ 52,
+ 55,
+ 59,
+ 63,
+ 67,
+ 73,
+ 78,
+ 84,
+ 89,
+ 94,
+ 99,
+ 104,
+ 109,
+ 113,
+ 119,
+ 123,
+ 128,
+ 132,
+ 156,
+ 180,
+ 184,
+ 189,
+ 193,
+ 199,
+ 203,
+ 208,
+ 213,
+ 218,
+ 223,
+ 228,
+ 234,
+ 239,
+ 245,
+ 249,
+ 253,
+ 257,
+ 260,
+ 264,
+ 267,
+ 308,
+ 310,
+ 312,
+ 314,
+ 316,
+ 318,
+ 320,
+ 322,
+ 324,
+ 326,
+ 328,
+ 330,
+ ],
+ dtype=np.int32,
+ )
+ },
+ "TiePointZoneGroupLocationTrackCompact": {"value": 0},
+ "attrs": {
+ "OriginalFilename": np.array(
+ [
[
- 1950675122400462,
- 1950675124187044,
- 1950675125973621,
- 1950675127760200,
- 1950675129546777,
- 1950675131333401,
- 1950675133119981,
- 1950675134906559,
- 1950675136693138,
- 1950675138479716,
- 1950675140266341,
- 1950675142052918,
- 1950675143839498,
- 1950675145626075,
- 1950675147412654,
- 1950675149199278,
- 1950675150985857,
- 1950675152772434,
- 1950675154559014,
- 1950675156345591,
- 1950675158132216,
- 1950675159918795,
- 1950675161705373,
- 1950675163491595,
- 1950675165278173,
- 1950675167064395,
- 1950675168850973,
- 1950675170637195,
- 1950675172423773,
- 1950675174209995,
- 1950675175996573,
- 1950675177782795,
- 1950675179569373,
- 1950675181355595,
- 1950675183142173,
- 1950675184928395,
- 1950675186714973,
- 1950675188501195,
- 1950675190287773,
- 1950675192073995,
- 1950675193860573,
- 1950675195646795,
- 1950675197433373,
- 1950675199219595,
- 1950675201006173,
- 1950675202792395,
- 1950675204578973,
- -993,
+ b"GDNBO_j01_d20191025_t0611251_e0612478_b10015_c20191025062405837630_cspp_dev.h5"
]
- )
- },
- "MoonIllumFraction": {"value": 11.518141746520996},
- "MoonPhaseAngle": {"value": 140.32131958007812},
- "NumberOfTiePointZoneGroupsScan": {"value": 62},
- "NumberOfTiePointZoneGroupsTrack": {"value": 1},
- "NumberOfTiePointZonesScan": {
- "value": np.array(
- [
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 28,
- 2,
- 3,
- 2,
- 3,
- 3,
- 3,
- 5,
- 4,
- 5,
- 4,
- 4,
- 4,
- 4,
- 4,
- 3,
- 5,
- 3,
- 4,
- 3,
- 23,
- 23,
- 3,
- 4,
- 3,
- 5,
- 3,
- 4,
- 4,
- 4,
- 4,
- 4,
- 5,
- 4,
- 5,
- 3,
- 3,
- 3,
- 2,
- 3,
- 2,
- 40,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- 1,
- ],
- dtype=np.int32,
- )
- },
- "NumberOfTiePointZonesTrack": {"value": 1},
- "PadByte1": {"value": np.array([0, 0, 0], dtype=np.uint8)},
- "QF1_SCAN_VIIRSSDRGEO": {
- "value": np.array(
- [
- 0,
- 128,
- 0,
- 128,
- 0,
- 128,
- 0,
- 128,
- 0,
- 128,
- 0,
- 128,
- 0,
- 128,
- 0,
- 128,
- 0,
- 128,
- 2,
- 130,
- 2,
- 130,
- 2,
- 142,
- 14,
- 142,
- 14,
- 142,
- 14,
- 142,
- 14,
- 142,
- 14,
- 142,
- 14,
- 142,
- 14,
- 142,
- 14,
- 142,
- 14,
- 142,
- 14,
- 142,
- 14,
- 142,
- 14,
- 0,
- ],
- dtype=np.uint8,
- )
- },
- "QF2_SCAN_VIIRSSDRGEO": {
- "value": np.array(
- [
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 2,
- ],
- dtype=np.uint8,
- )
- },
- "SCAttitude": {
- "value": np.array(
+ ],
+ dtype="|S78",
+ )
+ },
+ },
+ "VIIRS-DNB-SDR_All": {
+ "NumberOfBadChecksums": {
+ "value": np.array(
+ [
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ -993,
+ ],
+ dtype=np.int32,
+ )
+ },
+ "NumberOfDiscardedPkts": {
+ "value": np.array(
+ [
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ -993,
+ ],
+ dtype=np.int32,
+ )
+ },
+ "NumberOfMissingPkts": {
+ "value": np.array(
+ [
+ 17,
+ 17,
+ 17,
+ 17,
+ 17,
+ 17,
+ 17,
+ 17,
+ 17,
+ 17,
+ 17,
+ 17,
+ 17,
+ 17,
+ 17,
+ 17,
+ 17,
+ 17,
+ 17,
+ 17,
+ 17,
+ 17,
+ 18,
+ 479,
+ 479,
+ 479,
+ 479,
+ 479,
+ 479,
+ 479,
+ 479,
+ 479,
+ 479,
+ 479,
+ 479,
+ 479,
+ 479,
+ 479,
+ 479,
+ 479,
+ 479,
+ 479,
+ 479,
+ 479,
+ 479,
+ 479,
+ 479,
+ -993,
+ ],
+ dtype=np.int32,
+ )
+ },
+ "PadByte1": {"value": np.array([0, 0, 0], dtype=np.uint8)},
+ "QF1_VIIRSDNBSDR": {
+ "value": (np.random.rand(768, 4064) * 255).astype(np.uint8)
+ },
+ "QF2_SCAN_SDR": {
+ "value": np.array(
+ [
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 1,
+ 0,
+ 0,
+ ],
+ dtype=np.uint8,
+ )
+ },
+ "QF3_SCAN_RDR": {
+ "value": np.array(
+ [
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 0,
+ 64,
+ 64,
+ 64,
+ 64,
+ 64,
+ 64,
+ 64,
+ 64,
+ 64,
+ 64,
+ 64,
+ 64,
+ 64,
+ 64,
+ 64,
+ 64,
+ 64,
+ 64,
+ 64,
+ 64,
+ 64,
+ 64,
+ 64,
+ 64,
+ 64,
+ ],
+ dtype=np.uint8,
+ )
+ },
+ "Radiance": {"value": np.random.rand(768, 4064).astype(np.float32)},
+ "attrs": {
+ "OriginalFilename": np.array(
+ [
[
- [-9.22587514e-01, 3.92340779e00, 5.93621433e-01],
- [-2.82428920e-01, 3.98425841e00, 7.05978215e-01],
- [5.63421488e-01, 3.83695555e00, 3.93174857e-01],
- [-3.16407561e-01, 3.85351181e00, 5.33868372e-01],
- [-1.10977542e00, 3.82791996e00, 6.06707633e-01],
- [-1.46703672e00, 3.94862103e00, 6.45296216e-01],
- [-1.14162290e00, 3.79930806e00, 7.45548725e-01],
- [-1.56181908e00, 3.68108273e00, 6.49301231e-01],
- [-1.46823406e00, 3.63365412e00, 5.03535330e-01],
- [-1.02590537e00, 3.64477968e00, 5.22250295e-01],
- [-5.35379410e-01, 3.69151831e00, 4.32526857e-01],
- [-5.78065366e-02, 3.37806726e00, 4.95986529e-02],
- [-2.40110800e-01, 3.22970843e00, -9.55391768e-03],
- [-6.54527247e-01, 3.16465378e00, 1.89672917e-01],
- [-1.35780311e00, 3.24750924e00, 1.63008988e-01],
- [-1.47417045e00, 3.39788198e00, 1.84387550e-01],
- [-1.74577117e00, 3.53278613e00, 1.89606979e-01],
- [-1.46304774e00, 3.22666740e00, 1.59070507e-01],
- [-4.05473042e00, 3.06258607e00, 1.10443914e00],
- [-5.91582203e00, 2.83895302e00, 1.79846287e00],
- [-7.04713678e00, 2.55699897e00, 2.23985386e00],
- [-7.43741798e00, 2.21711683e00, 2.42266488e00],
- [-7.06249666e00, 1.81872594e00, 2.33713675e00],
- [-5.96051836e00, 1.36609375e00, 1.99506497e00],
- [-4.13137341e00, 8.60225558e-01, 1.39551389e00],
- [-1.57741416e00, 3.02793205e-01, 5.36690295e-01],
- [7.63817742e-12, 1.11727738e-10, 2.74194088e-11],
- [-1.24213686e-11, 8.01499769e-11, -1.34056446e-11],
- [1.78272761e-11, 9.04948685e-11, 1.77389995e-11],
- [-1.47259357e-11, 9.37734057e-11, -3.89882709e-11],
- [-1.94052344e-11, 1.49411969e-10, -2.48492286e-11],
- [3.40418752e-12, 1.25333730e-10, 1.14499972e-11],
- [5.64890669e-12, 1.35170833e-10, 2.27858565e-11],
- [8.78361273e-12, 1.02109009e-10, -5.92111386e-12],
- [1.47398396e-11, 8.59943505e-11, -8.54686872e-13],
- [-5.35027361e-12, 1.25450331e-10, -1.54262800e-11],
- [2.12667054e-11, 1.57356642e-10, 2.54392306e-11],
- [-6.39285022e-12, 1.42791029e-10, -8.58749790e-12],
- [-2.18451160e-11, 9.94347313e-11, -2.18451160e-11],
- [1.77587389e-11, 1.16834944e-10, 3.09037483e-11],
- [5.09583955e-12, 1.06878555e-10, 1.30452402e-11],
- [-1.25895900e-11, 1.06217646e-10, -1.07971496e-11],
- [1.45264981e-11, 1.03935242e-10, 1.73963136e-11],
- [-1.41730258e-12, 7.72037989e-11, 1.15057850e-11],
- [1.99397634e-11, 1.36618120e-10, 4.70010628e-11],
- [1.24784124e-11, 1.14499965e-10, 4.69658253e-12],
- [-1.83001236e-11, 5.19546177e-11, -1.31873679e-11],
- [-9.99299988e02, -9.99299988e02, -9.99299988e02],
- ],
- dtype=np.float32,
- )
- },
- "SCPosition": {
- "value": np.array(
+ b"SVDNB_j01_d20191025_t0611251_e0612478_b10015_c20191025062427398006_cspp_dev.h5"
+ ]
+ ],
+ dtype="|S78",
+ ),
+ "PixelOffsetScan": np.array([[0.5]], dtype=np.float32),
+ "PixelOffsetTrack": np.array([[0.5]], dtype=np.float32),
+ "TiePointZoneGroupLocationScan": np.array(
+ [
+ [0],
+ [2],
+ [4],
+ [6],
+ [8],
+ [10],
+ [12],
+ [14],
+ [16],
+ [464],
+ [496],
+ [544],
+ [576],
+ [648],
+ [720],
+ [792],
+ [872],
+ [928],
+ [1008],
+ [1072],
+ [1136],
+ [1200],
+ [1264],
+ [1328],
+ [1400],
+ [1480],
+ [1552],
+ [1640],
+ [1712],
+ [1896],
+ [2080],
+ [2152],
+ [2240],
+ [2312],
+ [2392],
+ [2464],
+ [2528],
+ [2592],
+ [2656],
+ [2720],
+ [2784],
+ [2864],
+ [2920],
+ [3000],
+ [3072],
+ [3144],
+ [3216],
+ [3248],
+ [3296],
+ [3328],
+ [3968],
+ [3976],
+ [3984],
+ [3992],
+ [4000],
+ [4008],
+ [4016],
+ [4024],
+ [4032],
+ [4040],
+ [4048],
+ [4056],
+ ],
+ dtype=np.int32,
+ ),
+ "TiePointZoneGroupLocationTrack": np.array(
+ [[0]], dtype=np.int32
+ ),
+ "TiePointZoneSizeScan": np.array(
+ [
+ [2],
+ [2],
+ [2],
+ [2],
+ [2],
+ [2],
+ [2],
+ [2],
+ [16],
+ [16],
+ [16],
+ [16],
+ [24],
+ [24],
+ [24],
+ [16],
+ [14],
+ [16],
+ [16],
+ [16],
+ [16],
+ [16],
+ [16],
+ [24],
+ [16],
+ [24],
+ [22],
+ [24],
+ [8],
+ [8],
+ [24],
+ [22],
+ [24],
+ [16],
+ [24],
+ [16],
+ [16],
+ [16],
+ [16],
+ [16],
+ [16],
+ [14],
+ [16],
+ [24],
+ [24],
+ [24],
+ [16],
+ [16],
+ [16],
+ [16],
+ [8],
+ [8],
+ [8],
+ [8],
+ [8],
+ [8],
+ [8],
+ [8],
+ [8],
+ [8],
+ [8],
+ [8],
+ ],
+ dtype=np.int32,
+ ),
+ "TiePointZoneSizeTrack": np.array([[16]], dtype=np.int32),
+ },
+ },
+ "attrs": {"MissionStartTime": np.array([[1698019234000000]])},
+ },
+ "Data_Products": {
+ "VIIRS-DNB-GEO": {
+ "VIIRS-DNB-GEO_Aggr": {
+ "attrs": {
+ "AggregateBeginningDate": np.array(
+ [[b"20191025"]], dtype="|S9"
+ ),
+ "AggregateBeginningGranuleID": np.array(
+ [[b"J01002526558865"]], dtype="|S16"
+ ),
+ "AggregateBeginningOrbitNumber": np.array(
+ [[10015]], dtype=np.uint64
+ ),
+ "AggregateBeginningTime": np.array(
+ [[b"061125.120971Z"]], dtype="|S15"
+ ),
+ "AggregateEndingDate": np.array(
+ [[b"20191025"]], dtype="|S9"
+ ),
+ "AggregateEndingGranuleID": np.array(
+ [[b"J01002526558865"]], dtype="|S16"
+ ),
+ "AggregateEndingOrbitNumber": np.array(
+ [[10015]], dtype=np.uint64
+ ),
+ "AggregateEndingTime": np.array(
+ [[b"061247.849492Z"]], dtype="|S15"
+ ),
+ "AggregateNumberGranules": np.array([[1]], dtype=np.uint64),
+ }
+ },
+ "VIIRS-DNB-GEO_Gran_0": {
+ "attrs": {
+ "Ascending/Descending_Indicator": np.array(
+ [[1]], dtype=np.uint8
+ ),
+ "Beginning_Date": np.array([[b"20191025"]], dtype="|S9"),
+ "Beginning_Time": np.array(
+ [[b"061125.120971Z"]], dtype="|S15"
+ ),
+ "East_Bounding_Coordinate": np.array(
+ [[-45.09228]], dtype=np.float32
+ ),
+ "Ending_Date": np.array([[b"20191025"]], dtype="|S9"),
+ "Ending_Time": np.array(
+ [[b"061247.849492Z"]], dtype="|S15"
+ ),
+ "G-Ring_Latitude": np.array(
[
- [2.3191672e06, -4.5127075e06, 5.1096645e06],
- [2.3202438e06, -4.5225140e06, 5.1005205e06],
- [2.3213098e06, -4.5323050e06, 5.0913595e06],
- [2.3223650e06, -4.5420810e06, 5.0821800e06],
- [2.3234100e06, -4.5518415e06, 5.0729835e06],
- [2.3244445e06, -4.5615875e06, 5.0637700e06],
- [2.3254692e06, -4.5713185e06, 5.0545390e06],
- [2.3264830e06, -4.5810340e06, 5.0452915e06],
- [2.3274862e06, -4.5907340e06, 5.0360255e06],
- [2.3284792e06, -4.6004185e06, 5.0267430e06],
- [2.3294620e06, -4.6100885e06, 5.0174430e06],
- [2.3304345e06, -4.6197430e06, 5.0081270e06],
- [2.3313962e06, -4.6293820e06, 4.9987935e06],
- [2.3323475e06, -4.6390050e06, 4.9894420e06],
- [2.3332888e06, -4.6486130e06, 4.9800740e06],
- [2.3342195e06, -4.6582060e06, 4.9706890e06],
- [2.3351398e06, -4.6677835e06, 4.9612880e06],
- [2.3360495e06, -4.6773440e06, 4.9518685e06],
- [2.3369522e06, -4.6868750e06, 4.9424430e06],
- [2.3378502e06, -4.6963695e06, 4.9330150e06],
- [2.3387432e06, -4.7058270e06, 4.9235845e06],
- [2.3396312e06, -4.7152475e06, 4.9141520e06],
- [2.3405140e06, -4.7246290e06, 4.9047175e06],
- [2.3413915e06, -4.7339725e06, 4.8952825e06],
- [2.3422642e06, -4.7432805e06, 4.8858430e06],
- [2.3431318e06, -4.7525505e06, 4.8764035e06],
- [2.3439710e06, -4.7618790e06, 4.8668965e06],
- [2.3447770e06, -4.7712820e06, 4.8573130e06],
- [2.3455728e06, -4.7806710e06, 4.8477115e06],
- [2.3463582e06, -4.7900425e06, 4.8380950e06],
- [2.3471335e06, -4.7994005e06, 4.8284610e06],
- [2.3478980e06, -4.8087395e06, 4.8188110e06],
- [2.3486522e06, -4.8180645e06, 4.8091435e06],
- [2.3493960e06, -4.8273715e06, 4.7994615e06],
- [2.3501298e06, -4.8366645e06, 4.7897610e06],
- [2.3508530e06, -4.8459395e06, 4.7800465e06],
- [2.3515658e06, -4.8552000e06, 4.7703130e06],
- [2.3522680e06, -4.8644420e06, 4.7605655e06],
- [2.3529602e06, -4.8736700e06, 4.7508000e06],
- [2.3536420e06, -4.8828800e06, 4.7410205e06],
- [2.3543132e06, -4.8920755e06, 4.7312230e06],
- [2.3549740e06, -4.9012520e06, 4.7214105e06],
- [2.3556248e06, -4.9104145e06, 4.7115800e06],
- [2.3562650e06, -4.9195590e06, 4.7017360e06],
- [2.3568952e06, -4.9286890e06, 4.6918745e06],
- [2.3575145e06, -4.9378000e06, 4.6819980e06],
- [2.3581235e06, -4.9468960e06, 4.6721035e06],
- [-9.9929999e02, -9.9929999e02, -9.9929999e02],
+ [41.84151],
+ [44.31062],
+ [46.78565],
+ [45.41409],
+ [41.07657],
+ [38.81504],
+ [36.53401],
+ [40.55788],
],
dtype=np.float32,
- )
- },
- "SCSolarAzimuthAngle": {
- "value": np.array(
+ ),
+ "G-Ring_Longitude": np.array(
[
- -140.6137,
- -140.54446,
- -140.47484,
- -140.40486,
- -140.33464,
- -140.26427,
- -140.19333,
- -140.12198,
- -140.05042,
- -139.97855,
- -139.90648,
- -139.83394,
- -139.76117,
- -139.68803,
- -139.61465,
- -139.54103,
- -139.46695,
- -139.3923,
- -139.31741,
- -139.2424,
- -139.16727,
- -139.09201,
- -139.01662,
- -138.94112,
- -138.86546,
- -138.78972,
- -138.71251,
- -138.63487,
- -138.5569,
- -138.4786,
- -138.39995,
- -138.32097,
- -138.24161,
- -138.16193,
- -138.0819,
- -138.00153,
- -137.92078,
- -137.8397,
- -137.75827,
- -137.67648,
- -137.59433,
- -137.51183,
- -137.42896,
- -137.34573,
- -137.26213,
- -137.17819,
- -137.09386,
- -999.3,
+ [-82.66234],
+ [-82.55624],
+ [-82.48891],
+ [-62.80042],
+ [-45.09228],
+ [-46.58502],
+ [-47.95933],
+ [-64.54196],
],
dtype=np.float32,
- )
- },
- "SCSolarZenithAngle": {
- "value": np.array(
+ ),
+ "LeapSecondsGranuleStart": np.array([[37]], dtype=np.int32),
+ "N_Algorithm_Version": np.array(
+ [[b"1.O.000.014"]], dtype="|S12"
+ ),
+ "N_Anc_Filename": np.array(
[
- 135.88528,
- 135.96703,
- 136.04868,
- 136.1302,
- 136.21165,
- 136.2931,
- 136.37451,
- 136.4556,
- 136.53659,
- 136.61748,
- 136.69843,
- 136.77931,
- 136.86021,
- 136.94092,
- 137.02148,
- 137.10208,
- 137.18248,
- 137.26239,
- 137.34204,
- 137.42155,
- 137.50092,
- 137.58014,
- 137.65923,
- 137.73816,
- 137.81696,
- 137.8956,
- 137.97507,
- 138.05447,
- 138.13382,
- 138.21303,
- 138.29218,
- 138.37122,
- 138.45016,
- 138.529,
- 138.60777,
- 138.68642,
- 138.76498,
- 138.84343,
- 138.9218,
- 139.00005,
- 139.07823,
- 139.15627,
- 139.23422,
- 139.31207,
- 139.38983,
- 139.46748,
- 139.54503,
- -999.3,
+ [
+ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0691_1.O.0.0"
+ ],
+ [
+ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0692_1.O.0.0"
+ ],
+ [
+ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0693_1.O.0.0"
+ ],
+ [
+ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0719_1.O.0.0"
+ ],
+ [
+ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0720_1.O.0.0"
+ ],
+ [
+ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0721_1.O.0.0"
+ ],
+ [
+ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0722_1.O.0.0"
+ ],
+ [
+ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0723_1.O.0.0"
+ ],
+ [
+ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0724_1.O.0.0"
+ ],
+ [
+ b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0725_1.O.0.0"
+ ],
+ [
+ b"off_Planet-Eph-ANC_Static_JPL_000f_20151008_200001010000Z_20000101000000Z_ee00000000000000Z_np" # noqa
+ ],
+ [
+ b"off_USNO-PolarWander-UT1-ANC_Ser7_USNO_000f_20191025_201910250000Z_20191025000109Z_ee20191101120000Z_np" # noqa
+ ],
],
- dtype=np.float32,
- )
- },
- "SCVelocity": {
- "value": np.array(
+ dtype="|S104",
+ ),
+ "N_Aux_Filename": np.array(
[
- [605.31726, -5492.9614, -5113.397],
- [599.4935, -5484.5615, -5123.1396],
- [593.66986, -5476.142, -5132.8657],
- [587.8464, -5467.7017, -5142.573],
- [582.02313, -5459.241, -5152.263],
- [576.19995, -5450.7607, -5161.936],
- [570.37714, -5442.2607, -5171.592],
- [564.5546, -5433.741, -5181.2295],
- [558.73236, -5425.2, -5190.849],
- [552.9104, -5416.6396, -5200.4517],
- [547.0887, -5408.06, -5210.0366],
- [541.26746, -5399.4604, -5219.6035],
- [535.44666, -5390.841, -5229.153],
- [529.6263, -5382.201, -5238.684],
- [523.8063, -5373.5415, -5248.1978],
- [517.9866, -5364.863, -5257.694],
- [512.16754, -5356.1646, -5267.1724],
- [506.34906, -5347.446, -5276.632],
- [500.53455, -5338.72, -5286.0645],
- [494.72552, -5329.993, -5295.466],
- [488.9218, -5321.265, -5304.8364],
- [483.1238, -5312.536, -5314.1743],
- [477.33157, -5303.806, -5323.4795],
- [471.546, -5295.0767, -5332.7515],
- [465.7647, -5286.344, -5341.9937],
- [459.99005, -5277.613, -5351.2026],
- [454.19785, -5268.798, -5360.442],
- [448.38614, -5259.887, -5369.7207],
- [442.57404, -5250.955, -5378.983],
- [436.7639, -5242.0063, -5388.225],
- [430.9534, -5233.0366, -5397.4517],
- [425.145, -5224.0483, -5406.6567],
- [419.33627, -5215.0396, -5415.845],
- [413.52963, -5206.013, -5425.014],
- [407.72275, -5196.9663, -5434.1665],
- [401.91797, -5187.9023, -5443.299],
- [396.11307, -5178.8164, -5452.4136],
- [390.3103, -5169.7134, -5461.508],
- [384.50742, -5160.59, -5470.586],
- [378.70673, -5151.4497, -5479.644],
- [372.90598, -5142.288, -5488.6846],
- [367.1075, -5133.109, -5497.7046],
- [361.309, -5123.9097, -5506.708],
- [355.5128, -5114.6934, -5515.691],
- [349.71658, -5105.4565, -5524.657],
- [343.9228, -5096.202, -5533.602],
- [338.12906, -5086.927, -5542.53],
- [-999.3, -999.3, -999.3],
+ [
+ b"CMNGEO-PARAM-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"CmnGeo-SAA-AC_j01_20151008180000Z_20170807130000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"TLE-AUX_j01_20191024053224Z_20191024000000Z_ee00000000000000Z_-_nobc_ops_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-SDR-GEO-DNB-PARAM-LUT_j01_20180507121508Z_20180315000000Z_ee00000000000000Z_PS-1-O-CCR-3963-006-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-SDR-GEO-IMG-PARAM-LUT_j01_20180430182354Z_20180315000000Z_ee00000000000000Z_PS-1-O-CCR-3963-006-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-SDR-GEO-MOD-PARAM-LUT_j01_20180430182652Z_20180315000000Z_ee00000000000000Z_PS-1-O-CCR-3963-006-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-SDR-QA-LUT_j01_20180109121411Z_20180409000000Z_ee00000000000000Z_PS-1-O-CCR-3742-003-LE-PE_all-_all_all-_ops" # noqa
+ ],
],
- dtype=np.float32,
- )
- },
- "SatelliteAzimuthAngle": {
- "value": np.random.rand(96, 332).astype(np.float32)
- },
- "SatelliteZenithAngle": {
- "value": np.random.rand(96, 332).astype(np.float32)
- },
- "SolarAzimuthAngle": {
- "value": np.random.rand(96, 332).astype(np.float32)
- },
- "SolarZenithAngle": {
- "value": np.random.rand(96, 332).astype(np.float32)
- },
- "StartTime": {
- "value": np.array(
- [
- 1950675122120971,
- 1950675123907557,
- 1950675125694139,
- 1950675127480722,
- 1950675129267304,
- 1950675131053910,
- 1950675132840494,
- 1950675134627077,
- 1950675136413660,
- 1950675138200243,
- 1950675139986850,
- 1950675141773433,
- 1950675143560016,
- 1950675145346598,
- 1950675147133181,
- 1950675148919788,
- 1950675150706371,
- 1950675152492953,
- 1950675154279537,
- 1950675156066119,
- 1950675157852726,
- 1950675159639309,
- 1950675161425892,
- 1950675163212109,
- 1950675164998692,
- 1950675166784909,
- 1950675168571492,
- 1950675170357709,
- 1950675172144292,
- 1950675173930509,
- 1950675175717092,
- 1950675177503309,
- 1950675179289892,
- 1950675181076109,
- 1950675182862692,
- 1950675184648909,
- 1950675186435492,
- 1950675188221709,
- 1950675190008292,
- 1950675191794509,
- 1950675193581092,
- 1950675195367309,
- 1950675197153892,
- 1950675198940109,
- 1950675200726692,
- 1950675202512909,
- 1950675204299492,
- -993,
- ]
- )
- },
- "TiePointZoneGroupLocationScanCompact": {
- "value": np.array(
+ dtype="|S126",
+ ),
+ "N_Beginning_Orbit_Number": np.array(
+ [[10015]], dtype=np.uint64
+ ),
+ "N_Beginning_Time_IET": np.array(
+ [[1950675122120971]], dtype=np.uint64
+ ),
+ "N_Creation_Date": np.array([[b"20191025"]], dtype="|S9"),
+ "N_Creation_Time": np.array(
+ [[b"062136.412867Z"]], dtype="|S15"
+ ),
+ "N_Day_Night_Flag": np.array([[b"Night"]], dtype="|S6"),
+ "N_Ending_Time_IET": np.array(
+ [[1950675204849492]], dtype=np.uint64
+ ),
+ "N_Granule_ID": np.array(
+ [[b"J01002526558865"]], dtype="|S16"
+ ),
+ "N_Granule_Status": np.array([[b"N/A"]], dtype="|S4"),
+ "N_Granule_Version": np.array([[b"A1"]], dtype="|S3"),
+ "N_IDPS_Mode": np.array([[b"N/A"]], dtype="|S4"),
+ "N_Input_Prod": np.array(
[
- 0,
- 2,
- 4,
- 6,
- 8,
- 10,
- 12,
- 14,
- 16,
- 45,
- 48,
- 52,
- 55,
- 59,
- 63,
- 67,
- 73,
- 78,
- 84,
- 89,
- 94,
- 99,
- 104,
- 109,
- 113,
- 119,
- 123,
- 128,
- 132,
- 156,
- 180,
- 184,
- 189,
- 193,
- 199,
- 203,
- 208,
- 213,
- 218,
- 223,
- 228,
- 234,
- 239,
- 245,
- 249,
- 253,
- 257,
- 260,
- 264,
- 267,
- 308,
- 310,
- 312,
- 314,
- 316,
- 318,
- 320,
- 322,
- 324,
- 326,
- 328,
- 330,
+ [b"SPACECRAFT-DIARY-RDR:J01002526558800:A1"],
+ [b"SPACECRAFT-DIARY-RDR:J01002526559000:A1"],
+ [b"VIIRS-SCIENCE-RDR:J01002526558865:A1"],
],
- dtype=np.int32,
- )
- },
- "TiePointZoneGroupLocationTrackCompact": {"value": 0},
- "attrs": {
- "OriginalFilename": np.array(
+ dtype="|S40",
+ ),
+ "N_JPSS_Document_Ref": np.array(
[
[
- b"GDNBO_j01_d20191025_t0611251_e0612478_b10015_c20191025062405837630_cspp_dev.h5"
- ]
+ b"474-00448-02-06_JPSS-DD-Vol-II-Part-6_0200H.pdf"
+ ],
+ [
+ b"474-00448-02-06_JPSS-VIIRS-SDR-DD-Part-6_0200H_VIIRS-DNB-GEO-PP.xml"
+ ],
+ [
+ b"474-00448-03-06_JPSS-OAD-Vol-III-Part-6-VIIRS-RDR-SDR_-1.pdf"
+ ],
],
- dtype="|S78",
- )
- },
- },
- "VIIRS-DNB-SDR_All": {
- "NumberOfBadChecksums": {
- "value": np.array(
+ dtype="|S68",
+ ),
+ "N_LEOA_Flag": np.array([[b"On"]], dtype="|S3"),
+ "N_Nadir_Latitude_Max": np.array(
+ [[45.3722]], dtype=np.float32
+ ),
+ "N_Nadir_Latitude_Min": np.array(
+ [[40.6172]], dtype=np.float32
+ ),
+ "N_Nadir_Longitude_Max": np.array(
+ [[-62.80047]], dtype=np.float32
+ ),
+ "N_Nadir_Longitude_Min": np.array(
+ [[-64.51342]], dtype=np.float32
+ ),
+ "N_Number_Of_Scans": np.array([[47]], dtype=np.int32),
+ "N_Primary_Label": np.array(
+ [[b"Non-Primary"]], dtype="|S12"
+ ),
+ "N_Quality_Summary_Names": np.array(
[
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- -993,
+ [b"Automatic Quality Flag"],
+ [b"Percent Missing Data"],
+ [b"Percent Out of Bounds"],
],
- dtype=np.int32,
- )
- },
- "NumberOfDiscardedPkts": {
- "value": np.array(
+ dtype="|S23",
+ ),
+ "N_Quality_Summary_Values": np.array(
+ [[1], [61], [0]], dtype=np.int32
+ ),
+ "N_Reference_ID": np.array(
+ [[b"VIIRS-DNB-GEO:J01002526558865:A1"]], dtype="|S33"
+ ),
+ "N_Software_Version": np.array(
+ [[b"CSPP_SDR_3_1_3"]], dtype="|S15"
+ ),
+ "N_Spacecraft_Maneuver": np.array(
+ [[b"Normal Operations"]], dtype="|S18"
+ ),
+ "North_Bounding_Coordinate": np.array(
+ [[46.8018]], dtype=np.float32
+ ),
+ "South_Bounding_Coordinate": np.array(
+ [[36.53401]], dtype=np.float32
+ ),
+ "West_Bounding_Coordinate": np.array(
+ [[-82.66234]], dtype=np.float32
+ ),
+ }
+ },
+ "attrs": {
+ "Instrument_Short_Name": np.array([[b"VIIRS"]], dtype="|S6"),
+ "N_Anc_Type_Tasked": np.array([[b"Official"]], dtype="|S9"),
+ "N_Collection_Short_Name": np.array(
+ [[b"VIIRS-DNB-GEO"]], dtype="|S14"
+ ),
+ "N_Dataset_Type_Tag": np.array([[b"GEO"]], dtype="|S4"),
+ "N_Processing_Domain": np.array([[b"ops"]], dtype="|S4"),
+ "Operational_Mode": np.array(
+ [[b"J01 Normal Operations, VIIRS Operational"]],
+ dtype="|S41",
+ ),
+ },
+ },
+ "VIIRS-DNB-SDR": {
+ "VIIRS-DNB-SDR_Aggr": {
+ "attrs": {
+ "AggregateBeginningDate": np.array(
+ [[b"20191025"]], dtype="|S9"
+ ),
+ "AggregateBeginningGranuleID": np.array(
+ [[b"J01002526558865"]], dtype="|S16"
+ ),
+ "AggregateBeginningOrbitNumber": np.array(
+ [[10015]], dtype=np.uint64
+ ),
+ "AggregateBeginningTime": np.array(
+ [[b"061125.120971Z"]], dtype="|S15"
+ ),
+ "AggregateEndingDate": np.array(
+ [[b"20191025"]], dtype="|S9"
+ ),
+ "AggregateEndingGranuleID": np.array(
+ [[b"J01002526558865"]], dtype="|S16"
+ ),
+ "AggregateEndingOrbitNumber": np.array(
+ [[10015]], dtype=np.uint64
+ ),
+ "AggregateEndingTime": np.array(
+ [[b"061247.849492Z"]], dtype="|S15"
+ ),
+ "AggregateNumberGranules": np.array([[1]], dtype=np.uint64),
+ }
+ },
+ "VIIRS-DNB-SDR_Gran_0": {
+ "attrs": {
+ "Ascending/Descending_Indicator": np.array(
+ [[1]], dtype=np.uint8
+ ),
+ "Band_ID": np.array([[b"N/A"]], dtype="|S4"),
+ "Beginning_Date": np.array([[b"20191025"]], dtype="|S9"),
+ "Beginning_Time": np.array(
+ [[b"061125.120971Z"]], dtype="|S15"
+ ),
+ "East_Bounding_Coordinate": np.array(
+ [[-45.09281]], dtype=np.float32
+ ),
+ "Ending_Date": np.array([[b"20191025"]], dtype="|S9"),
+ "Ending_Time": np.array(
+ [[b"061247.849492Z"]], dtype="|S15"
+ ),
+ "G-Ring_Latitude": np.array(
[
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- -993,
+ [41.84157],
+ [44.31069],
+ [46.78591],
+ [45.41409],
+ [41.07675],
+ [38.81512],
+ [36.53402],
+ [40.55788],
],
- dtype=np.int32,
- )
- },
- "NumberOfMissingPkts": {
- "value": np.array(
+ dtype=np.float32,
+ ),
+ "G-Ring_Longitude": np.array(
[
- 17,
- 17,
- 17,
- 17,
- 17,
- 17,
- 17,
- 17,
- 17,
- 17,
- 17,
- 17,
- 17,
- 17,
- 17,
- 17,
- 17,
- 17,
- 17,
- 17,
- 17,
- 17,
- 18,
- 479,
- 479,
- 479,
- 479,
- 479,
- 479,
- 479,
- 479,
- 479,
- 479,
- 479,
- 479,
- 479,
- 479,
- 479,
- 479,
- 479,
- 479,
- 479,
- 479,
- 479,
- 479,
- 479,
- 479,
- -993,
+ [-82.65787],
+ [-82.55148],
+ [-82.47269],
+ [-62.80042],
+ [-45.09281],
+ [-46.58528],
+ [-47.95936],
+ [-64.54196],
],
- dtype=np.int32,
- )
- },
- "PadByte1": {"value": np.array([0, 0, 0], dtype=np.uint8)},
- "QF1_VIIRSDNBSDR": {
- "value": (np.random.rand(768, 4064) * 255).astype(np.uint8)
- },
- "QF2_SCAN_SDR": {
- "value": np.array(
+ dtype=np.float32,
+ ),
+ "N_Algorithm_Version": np.array(
+ [[b"1.O.000.015"]], dtype="|S12"
+ ),
+ "N_Anc_Filename": np.array(
[
- 0,
- 1,
- 0,
- 1,
- 0,
- 1,
- 0,
- 1,
- 0,
- 1,
- 0,
- 1,
- 0,
- 1,
- 0,
- 1,
- 0,
- 1,
- 0,
- 1,
- 0,
- 1,
- 0,
- 1,
- 0,
- 1,
- 0,
- 1,
- 0,
- 1,
- 0,
- 1,
- 0,
- 1,
- 0,
- 1,
- 0,
- 1,
- 0,
- 1,
- 0,
- 1,
- 0,
- 1,
- 0,
- 1,
- 0,
- 0,
+ [
+ b"off_Planet-Eph-ANC_Static_JPL_000f_20151008_200001010000Z_20000101000000Z_ee00000000000000Z_np" # noqa
+ ],
+ [
+ b"off_USNO-PolarWander-UT1-ANC_Ser7_USNO_000f_20191025_201910250000Z_20191025000109Z_ee20191101120000Z_np" # noqa
+ ],
],
- dtype=np.uint8,
- )
- },
- "QF3_SCAN_RDR": {
- "value": np.array(
+ dtype="|S104",
+ ),
+ "N_Aux_Filename": np.array(
[
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 0,
- 64,
- 64,
- 64,
- 64,
- 64,
- 64,
- 64,
- 64,
- 64,
- 64,
- 64,
- 64,
- 64,
- 64,
- 64,
- 64,
- 64,
- 64,
- 64,
- 64,
- 64,
- 64,
- 64,
- 64,
- 64,
+ [
+ b"CMNGEO-PARAM-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-DNB-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-I1-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-I2-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-I3-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-I4-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-I5-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-M1-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-M10-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-M11-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-M12-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-M13-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-M14-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-M15-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-M16-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-M2-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-M3-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-M4-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-M5-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-M6-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-M7-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-M8-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-M9-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-RSBAUTOCAL-HISTORY-AUX_j01_20191024021527Z_20191024000000Z_ee00000000000000Z_-_nobc_ops_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-RSBAUTOCAL-VOLT-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-EDD154640-109C-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-SDR-BB-TEMP-COEFFS-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-SDR-CAL-AUTOMATE-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-Pred-SideA-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-SDR-COEFF-A-LUT_j01_20180109114311Z_20180409000000Z_ee00000000000000Z_PS-1-O-CCR-3742-003-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-SDR-COEFF-B-LUT_j01_20180109101739Z_20180409000000Z_ee00000000000000Z_PS-1-O-CCR-3742-004-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-SDR-DELTA-C-LUT_j01_20180109000000Z_20180409000000Z_ee00000000000000Z_PS-1-O-CCR-3742-003-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-SDR-DG-ANOMALY-DN-LIMITS-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-SideA-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-SDR-DNB-DN0-LUT_j01_20190930000000Z_20190928000000Z_ee00000000000000Z_PS-1-O-CCR-4262-026-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-SDR-DNB-FRAME-TO-ZONE-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-Op21-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-SDR-DNB-GAIN-RATIOS-LUT_j01_20190930000000Z_20190928000000Z_ee00000000000000Z_PS-1-O-CCR-4262-025-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-SDR-DNB-LGS-GAINS-LUT_j01_20180413122703Z_20180412000000Z_ee00000000000000Z_PS-1-O-CCR-3918-005-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-SDR-DNB-RVF-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-Op21-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-SDR-DNB-STRAY-LIGHT-CORRECTION-LUT_j01_20190930160523Z_20191001000000Z_ee00000000000000Z_PS-1-O-CCR-4322-024-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-SDR-EBBT-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-SDR-EMISSIVE-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-SDR-F-PREDICTED-LUT_j01_20180413123333Z_20180412000000Z_ee00000000000000Z_PS-1-O-CCR-3918-006-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-SDR-GAIN-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-SDR-HAM-ER-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-SDR-OBC-ER-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-SDR-OBC-RR-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-SDR-OBS-TO-PIXELS-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-SameAsSNPP-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-SDR-QA-LUT_j01_20180109121411Z_20180409000000Z_ee00000000000000Z_PS-1-O-CCR-3742-003-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-SDR-RADIOMETRIC-PARAM-V3-LUT_j01_20161117000000Z_20180111000000Z_ee00000000000000Z_PS-1-O-CCR-17-3436-v003-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-SDR-REFLECTIVE-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-SameAsSNPP-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-SDR-RELATIVE-SPECTRAL-RESPONSE-LUT_j01_20161031000000Z_20180111000000Z_ee00000000000000Z_PS-1-O-CCR-17-3436-v003-FusedM9-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-SDR-RTA-ER-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-SDR-RVF-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-M16-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-SDR-SOLAR-IRAD-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-Thuillier2002-LE-PE_all-_all_all-_ops" # noqa
+ ],
+ [
+ b"VIIRS-SDR-TELE-COEFFS-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-SideA-LE-PE_all-_all_all-_ops" # noqa
+ ],
],
- dtype=np.uint8,
- )
- },
- "Radiance": {"value": np.random.rand(768, 4064).astype(np.float32)},
- "attrs": {
- "OriginalFilename": np.array(
+ dtype="|S151",
+ ),
+ "N_Beginning_Orbit_Number": np.array(
+ [[10015]], dtype=np.uint64
+ ),
+ "N_Beginning_Time_IET": np.array(
+ [[1950675122120971]], dtype=np.uint64
+ ),
+ "N_Creation_Date": np.array([[b"20191025"]], dtype="|S9"),
+ "N_Creation_Time": np.array(
+ [[b"062411.116253Z"]], dtype="|S15"
+ ),
+ "N_Day_Night_Flag": np.array([[b"Night"]], dtype="|S6"),
+ "N_Ending_Time_IET": np.array(
+ [[1950675204849492]], dtype=np.uint64
+ ),
+ "N_Graceful_Degradation": np.array([[b"No"]], dtype="|S3"),
+ "N_Granule_ID": np.array(
+ [[b"J01002526558865"]], dtype="|S16"
+ ),
+ "N_Granule_Status": np.array([[b"N/A"]], dtype="|S4"),
+ "N_Granule_Version": np.array([[b"A1"]], dtype="|S3"),
+ "N_IDPS_Mode": np.array([[b"N/A"]], dtype="|S4"),
+ "N_Input_Prod": np.array(
[
- [
- b"SVDNB_j01_d20191025_t0611251_e0612478_b10015_c20191025062427398006_cspp_dev.h5"
- ]
+ [b"GEO-VIIRS-OBC-IP:J01002526558865:A1"],
+ [b"SPACECRAFT-DIARY-RDR:J01002526558800:A1"],
+ [b"SPACECRAFT-DIARY-RDR:J01002526559000:A1"],
+ [b"VIIRS-DNB-GEO:J01002526558865:A1"],
+ [b"VIIRS-IMG-RGEO-TC:J01002526558865:A1"],
+ [b"VIIRS-MOD-RGEO-TC:J01002526558865:A1"],
+ [b"VIIRS-SCIENCE-RDR:J01002526558012:A1"],
+ [b"VIIRS-SCIENCE-RDR:J01002526558865:A1"],
],
- dtype="|S78",
+ dtype="|S40",
),
- "PixelOffsetScan": np.array([[0.5]], dtype=np.float32),
- "PixelOffsetTrack": np.array([[0.5]], dtype=np.float32),
- "TiePointZoneGroupLocationScan": np.array(
+ "N_JPSS_Document_Ref": np.array(
[
- [0],
- [2],
- [4],
- [6],
- [8],
- [10],
- [12],
- [14],
- [16],
- [464],
- [496],
- [544],
- [576],
- [648],
- [720],
- [792],
- [872],
- [928],
- [1008],
- [1072],
- [1136],
- [1200],
- [1264],
- [1328],
- [1400],
- [1480],
- [1552],
- [1640],
- [1712],
- [1896],
- [2080],
- [2152],
- [2240],
- [2312],
- [2392],
- [2464],
- [2528],
- [2592],
- [2656],
- [2720],
- [2784],
- [2864],
- [2920],
- [3000],
- [3072],
- [3144],
- [3216],
- [3248],
- [3296],
- [3328],
- [3968],
- [3976],
- [3984],
- [3992],
- [4000],
- [4008],
- [4016],
- [4024],
- [4032],
- [4040],
- [4048],
- [4056],
+ [
+ b"474-00448-02-06_JPSS-DD-Vol-II-Part-6_0200H.pdf"
+ ],
+ [
+ b"474-00448-02-06_JPSS-VIIRS-SDR-DD-Part-6_0200H_VIIRS-DNB-SDR-PP.xml"
+ ],
+ [
+ b"474-00448-03-06_JPSS-OAD-Vol-III-Part-6-VIIRS-RDR-SDR_-1.pdf"
+ ],
],
- dtype=np.int32,
+ dtype="|S68",
+ ),
+ "N_LEOA_Flag": np.array([[b"On"]], dtype="|S3"),
+ "N_Nadir_Latitude_Max": np.array(
+ [[45.3722]], dtype=np.float32
+ ),
+ "N_Nadir_Latitude_Min": np.array(
+ [[40.6172]], dtype=np.float32
),
- "TiePointZoneGroupLocationTrack": np.array(
- [[0]], dtype=np.int32
+ "N_Nadir_Longitude_Max": np.array(
+ [[-62.80047]], dtype=np.float32
),
- "TiePointZoneSizeScan": np.array(
+ "N_Nadir_Longitude_Min": np.array(
+ [[-64.51342]], dtype=np.float32
+ ),
+ "N_Number_Of_Scans": np.array([[47]], dtype=np.int32),
+ "N_Percent_Erroneous_Data": np.array(
+ [[0.0]], dtype=np.float32
+ ),
+ "N_Percent_Missing_Data": np.array(
+ [[51.05127]], dtype=np.float32
+ ),
+ "N_Percent_Not-Applicable_Data": np.array(
+ [[0.0]], dtype=np.float32
+ ),
+ "N_Primary_Label": np.array(
+ [[b"Non-Primary"]], dtype="|S12"
+ ),
+ "N_Quality_Summary_Names": np.array(
[
- [2],
- [2],
- [2],
- [2],
- [2],
- [2],
- [2],
- [2],
- [16],
- [16],
- [16],
- [16],
- [24],
- [24],
- [24],
- [16],
- [14],
- [16],
- [16],
- [16],
- [16],
- [16],
- [16],
- [24],
- [16],
- [24],
- [22],
- [24],
- [8],
- [8],
- [24],
- [22],
- [24],
- [16],
- [24],
- [16],
- [16],
- [16],
- [16],
- [16],
- [16],
- [14],
- [16],
- [24],
- [24],
- [24],
- [16],
- [16],
- [16],
- [16],
- [8],
- [8],
- [8],
- [8],
- [8],
- [8],
- [8],
- [8],
- [8],
- [8],
- [8],
- [8],
+ [b"Scan Quality Exclusion"],
+ [b"Summary VIIRS SDR Quality"],
],
- dtype=np.int32,
+ dtype="|S26",
),
- "TiePointZoneSizeTrack": np.array([[16]], dtype=np.int32),
- },
- },
- "attrs": {"MissionStartTime": np.array([[1698019234000000]])},
- },
- "Data_Products": {
- "VIIRS-DNB-GEO": {
- "VIIRS-DNB-GEO_Aggr": {
- "attrs": {
- "AggregateBeginningDate": np.array(
- [[b"20191025"]], dtype="|S9"
- ),
- "AggregateBeginningGranuleID": np.array(
- [[b"J01002526558865"]], dtype="|S16"
- ),
- "AggregateBeginningOrbitNumber": np.array(
- [[10015]], dtype=np.uint64
- ),
- "AggregateBeginningTime": np.array(
- [[b"061125.120971Z"]], dtype="|S15"
- ),
- "AggregateEndingDate": np.array(
- [[b"20191025"]], dtype="|S9"
- ),
- "AggregateEndingGranuleID": np.array(
- [[b"J01002526558865"]], dtype="|S16"
- ),
- "AggregateEndingOrbitNumber": np.array(
- [[10015]], dtype=np.uint64
- ),
- "AggregateEndingTime": np.array(
- [[b"061247.849492Z"]], dtype="|S15"
- ),
- "AggregateNumberGranules": np.array([[1]], dtype=np.uint64),
- }
- },
- "VIIRS-DNB-GEO_Gran_0": {
- "attrs": {
- "Ascending/Descending_Indicator": np.array(
- [[1]], dtype=np.uint8
- ),
- "Beginning_Date": np.array([[b"20191025"]], dtype="|S9"),
- "Beginning_Time": np.array(
- [[b"061125.120971Z"]], dtype="|S15"
- ),
- "East_Bounding_Coordinate": np.array(
- [[-45.09228]], dtype=np.float32
- ),
- "Ending_Date": np.array([[b"20191025"]], dtype="|S9"),
- "Ending_Time": np.array(
- [[b"061247.849492Z"]], dtype="|S15"
- ),
- "G-Ring_Latitude": np.array(
- [
- [41.84151],
- [44.31062],
- [46.78565],
- [45.41409],
- [41.07657],
- [38.81504],
- [36.53401],
- [40.55788],
- ],
- dtype=np.float32,
- ),
- "G-Ring_Longitude": np.array(
- [
- [-82.66234],
- [-82.55624],
- [-82.48891],
- [-62.80042],
- [-45.09228],
- [-46.58502],
- [-47.95933],
- [-64.54196],
- ],
- dtype=np.float32,
- ),
- "LeapSecondsGranuleStart": np.array([[37]], dtype=np.int32),
- "N_Algorithm_Version": np.array(
- [[b"1.O.000.014"]], dtype="|S12"
- ),
- "N_Anc_Filename": np.array(
- [
- [
- b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0691_1.O.0.0"
- ],
- [
- b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0692_1.O.0.0"
- ],
- [
- b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0693_1.O.0.0"
- ],
- [
- b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0719_1.O.0.0"
- ],
- [
- b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0720_1.O.0.0"
- ],
- [
- b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0721_1.O.0.0"
- ],
- [
- b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0722_1.O.0.0"
- ],
- [
- b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0723_1.O.0.0"
- ],
- [
- b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0724_1.O.0.0"
- ],
- [
- b"Terrain-Eco-ANC-Tile_20030125000000Z_ee00000000000000Z_NA_NA_N0725_1.O.0.0"
- ],
- [
- b"off_Planet-Eph-ANC_Static_JPL_000f_20151008_200001010000Z_20000101000000Z_ee00000000000000Z_np" # noqa
- ],
- [
- b"off_USNO-PolarWander-UT1-ANC_Ser7_USNO_000f_20191025_201910250000Z_20191025000109Z_ee20191101120000Z_np" # noqa
- ],
- ],
- dtype="|S104",
- ),
- "N_Aux_Filename": np.array(
- [
- [
- b"CMNGEO-PARAM-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"CmnGeo-SAA-AC_j01_20151008180000Z_20170807130000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"TLE-AUX_j01_20191024053224Z_20191024000000Z_ee00000000000000Z_-_nobc_ops_all-_ops" # noqa
- ],
- [
- b"VIIRS-SDR-GEO-DNB-PARAM-LUT_j01_20180507121508Z_20180315000000Z_ee00000000000000Z_PS-1-O-CCR-3963-006-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-SDR-GEO-IMG-PARAM-LUT_j01_20180430182354Z_20180315000000Z_ee00000000000000Z_PS-1-O-CCR-3963-006-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-SDR-GEO-MOD-PARAM-LUT_j01_20180430182652Z_20180315000000Z_ee00000000000000Z_PS-1-O-CCR-3963-006-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-SDR-QA-LUT_j01_20180109121411Z_20180409000000Z_ee00000000000000Z_PS-1-O-CCR-3742-003-LE-PE_all-_all_all-_ops" # noqa
- ],
- ],
- dtype="|S126",
- ),
- "N_Beginning_Orbit_Number": np.array(
- [[10015]], dtype=np.uint64
- ),
- "N_Beginning_Time_IET": np.array(
- [[1950675122120971]], dtype=np.uint64
- ),
- "N_Creation_Date": np.array([[b"20191025"]], dtype="|S9"),
- "N_Creation_Time": np.array(
- [[b"062136.412867Z"]], dtype="|S15"
- ),
- "N_Day_Night_Flag": np.array([[b"Night"]], dtype="|S6"),
- "N_Ending_Time_IET": np.array(
- [[1950675204849492]], dtype=np.uint64
- ),
- "N_Granule_ID": np.array(
- [[b"J01002526558865"]], dtype="|S16"
- ),
- "N_Granule_Status": np.array([[b"N/A"]], dtype="|S4"),
- "N_Granule_Version": np.array([[b"A1"]], dtype="|S3"),
- "N_IDPS_Mode": np.array([[b"N/A"]], dtype="|S4"),
- "N_Input_Prod": np.array(
- [
- [b"SPACECRAFT-DIARY-RDR:J01002526558800:A1"],
- [b"SPACECRAFT-DIARY-RDR:J01002526559000:A1"],
- [b"VIIRS-SCIENCE-RDR:J01002526558865:A1"],
- ],
- dtype="|S40",
- ),
- "N_JPSS_Document_Ref": np.array(
- [
- [
- b"474-00448-02-06_JPSS-DD-Vol-II-Part-6_0200H.pdf"
- ],
- [
- b"474-00448-02-06_JPSS-VIIRS-SDR-DD-Part-6_0200H_VIIRS-DNB-GEO-PP.xml"
- ],
- [
- b"474-00448-03-06_JPSS-OAD-Vol-III-Part-6-VIIRS-RDR-SDR_-1.pdf"
- ],
- ],
- dtype="|S68",
- ),
- "N_LEOA_Flag": np.array([[b"On"]], dtype="|S3"),
- "N_Nadir_Latitude_Max": np.array(
- [[45.3722]], dtype=np.float32
- ),
- "N_Nadir_Latitude_Min": np.array(
- [[40.6172]], dtype=np.float32
- ),
- "N_Nadir_Longitude_Max": np.array(
- [[-62.80047]], dtype=np.float32
- ),
- "N_Nadir_Longitude_Min": np.array(
- [[-64.51342]], dtype=np.float32
- ),
- "N_Number_Of_Scans": np.array([[47]], dtype=np.int32),
- "N_Primary_Label": np.array(
- [[b"Non-Primary"]], dtype="|S12"
- ),
- "N_Quality_Summary_Names": np.array(
- [
- [b"Automatic Quality Flag"],
- [b"Percent Missing Data"],
- [b"Percent Out of Bounds"],
- ],
- dtype="|S23",
- ),
- "N_Quality_Summary_Values": np.array(
- [[1], [61], [0]], dtype=np.int32
- ),
- "N_Reference_ID": np.array(
- [[b"VIIRS-DNB-GEO:J01002526558865:A1"]], dtype="|S33"
- ),
- "N_Software_Version": np.array(
- [[b"CSPP_SDR_3_1_3"]], dtype="|S15"
- ),
- "N_Spacecraft_Maneuver": np.array(
- [[b"Normal Operations"]], dtype="|S18"
- ),
- "North_Bounding_Coordinate": np.array(
- [[46.8018]], dtype=np.float32
- ),
- "South_Bounding_Coordinate": np.array(
- [[36.53401]], dtype=np.float32
- ),
- "West_Bounding_Coordinate": np.array(
- [[-82.66234]], dtype=np.float32
- ),
- }
- },
- "attrs": {
- "Instrument_Short_Name": np.array([[b"VIIRS"]], dtype="|S6"),
- "N_Anc_Type_Tasked": np.array([[b"Official"]], dtype="|S9"),
- "N_Collection_Short_Name": np.array(
- [[b"VIIRS-DNB-GEO"]], dtype="|S14"
- ),
- "N_Dataset_Type_Tag": np.array([[b"GEO"]], dtype="|S4"),
- "N_Processing_Domain": np.array([[b"ops"]], dtype="|S4"),
- "Operational_Mode": np.array(
- [[b"J01 Normal Operations, VIIRS Operational"]],
- dtype="|S41",
- ),
- },
- },
- "VIIRS-DNB-SDR": {
- "VIIRS-DNB-SDR_Aggr": {
- "attrs": {
- "AggregateBeginningDate": np.array(
- [[b"20191025"]], dtype="|S9"
- ),
- "AggregateBeginningGranuleID": np.array(
- [[b"J01002526558865"]], dtype="|S16"
- ),
- "AggregateBeginningOrbitNumber": np.array(
- [[10015]], dtype=np.uint64
- ),
- "AggregateBeginningTime": np.array(
- [[b"061125.120971Z"]], dtype="|S15"
- ),
- "AggregateEndingDate": np.array(
- [[b"20191025"]], dtype="|S9"
- ),
- "AggregateEndingGranuleID": np.array(
- [[b"J01002526558865"]], dtype="|S16"
- ),
- "AggregateEndingOrbitNumber": np.array(
- [[10015]], dtype=np.uint64
- ),
- "AggregateEndingTime": np.array(
- [[b"061247.849492Z"]], dtype="|S15"
- ),
- "AggregateNumberGranules": np.array([[1]], dtype=np.uint64),
- }
- },
- "VIIRS-DNB-SDR_Gran_0": {
- "attrs": {
- "Ascending/Descending_Indicator": np.array(
- [[1]], dtype=np.uint8
- ),
- "Band_ID": np.array([[b"N/A"]], dtype="|S4"),
- "Beginning_Date": np.array([[b"20191025"]], dtype="|S9"),
- "Beginning_Time": np.array(
- [[b"061125.120971Z"]], dtype="|S15"
- ),
- "East_Bounding_Coordinate": np.array(
- [[-45.09281]], dtype=np.float32
- ),
- "Ending_Date": np.array([[b"20191025"]], dtype="|S9"),
- "Ending_Time": np.array(
- [[b"061247.849492Z"]], dtype="|S15"
- ),
- "G-Ring_Latitude": np.array(
- [
- [41.84157],
- [44.31069],
- [46.78591],
- [45.41409],
- [41.07675],
- [38.81512],
- [36.53402],
- [40.55788],
- ],
- dtype=np.float32,
- ),
- "G-Ring_Longitude": np.array(
- [
- [-82.65787],
- [-82.55148],
- [-82.47269],
- [-62.80042],
- [-45.09281],
- [-46.58528],
- [-47.95936],
- [-64.54196],
- ],
- dtype=np.float32,
- ),
- "N_Algorithm_Version": np.array(
- [[b"1.O.000.015"]], dtype="|S12"
- ),
- "N_Anc_Filename": np.array(
- [
- [
- b"off_Planet-Eph-ANC_Static_JPL_000f_20151008_200001010000Z_20000101000000Z_ee00000000000000Z_np" # noqa
- ],
- [
- b"off_USNO-PolarWander-UT1-ANC_Ser7_USNO_000f_20191025_201910250000Z_20191025000109Z_ee20191101120000Z_np" # noqa
- ],
- ],
- dtype="|S104",
- ),
- "N_Aux_Filename": np.array(
- [
- [
- b"CMNGEO-PARAM-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-DNB-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-I1-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-I2-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-I3-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-I4-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-I5-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-M1-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-M10-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-M11-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-M12-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-M13-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-M14-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-M15-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-M16-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-M2-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-M3-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-M4-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-M5-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-M6-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-M7-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-M8-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-M9-SDR-DQTT_j01_20151008180000Z_20020101010000Z_ee00000000000000Z_PS-1-O-NPP-1-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-RSBAUTOCAL-HISTORY-AUX_j01_20191024021527Z_20191024000000Z_ee00000000000000Z_-_nobc_ops_all-_ops" # noqa
- ],
- [
- b"VIIRS-RSBAUTOCAL-VOLT-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-EDD154640-109C-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-SDR-BB-TEMP-COEFFS-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-SDR-CAL-AUTOMATE-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-Pred-SideA-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-SDR-COEFF-A-LUT_j01_20180109114311Z_20180409000000Z_ee00000000000000Z_PS-1-O-CCR-3742-003-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-SDR-COEFF-B-LUT_j01_20180109101739Z_20180409000000Z_ee00000000000000Z_PS-1-O-CCR-3742-004-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-SDR-DELTA-C-LUT_j01_20180109000000Z_20180409000000Z_ee00000000000000Z_PS-1-O-CCR-3742-003-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-SDR-DG-ANOMALY-DN-LIMITS-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-SideA-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-SDR-DNB-DN0-LUT_j01_20190930000000Z_20190928000000Z_ee00000000000000Z_PS-1-O-CCR-4262-026-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-SDR-DNB-FRAME-TO-ZONE-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-Op21-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-SDR-DNB-GAIN-RATIOS-LUT_j01_20190930000000Z_20190928000000Z_ee00000000000000Z_PS-1-O-CCR-4262-025-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-SDR-DNB-LGS-GAINS-LUT_j01_20180413122703Z_20180412000000Z_ee00000000000000Z_PS-1-O-CCR-3918-005-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-SDR-DNB-RVF-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-Op21-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-SDR-DNB-STRAY-LIGHT-CORRECTION-LUT_j01_20190930160523Z_20191001000000Z_ee00000000000000Z_PS-1-O-CCR-4322-024-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-SDR-EBBT-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-SDR-EMISSIVE-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-SDR-F-PREDICTED-LUT_j01_20180413123333Z_20180412000000Z_ee00000000000000Z_PS-1-O-CCR-3918-006-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-SDR-GAIN-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-SDR-HAM-ER-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-SDR-OBC-ER-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-SDR-OBC-RR-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-SDR-OBS-TO-PIXELS-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-SameAsSNPP-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-SDR-QA-LUT_j01_20180109121411Z_20180409000000Z_ee00000000000000Z_PS-1-O-CCR-3742-003-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-SDR-RADIOMETRIC-PARAM-V3-LUT_j01_20161117000000Z_20180111000000Z_ee00000000000000Z_PS-1-O-CCR-17-3436-v003-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-SDR-REFLECTIVE-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-SameAsSNPP-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-SDR-RELATIVE-SPECTRAL-RESPONSE-LUT_j01_20161031000000Z_20180111000000Z_ee00000000000000Z_PS-1-O-CCR-17-3436-v003-FusedM9-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-SDR-RTA-ER-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-SDR-RVF-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-M16-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-SDR-SOLAR-IRAD-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-Thuillier2002-LE-PE_all-_all_all-_ops" # noqa
- ],
- [
- b"VIIRS-SDR-TELE-COEFFS-LUT_j01_20160331000000Z_20170807130000Z_ee00000000000000Z_PS-1-O-CCR-16-2859-v002-SideA-LE-PE_all-_all_all-_ops" # noqa
- ],
- ],
- dtype="|S151",
- ),
- "N_Beginning_Orbit_Number": np.array(
- [[10015]], dtype=np.uint64
- ),
- "N_Beginning_Time_IET": np.array(
- [[1950675122120971]], dtype=np.uint64
- ),
- "N_Creation_Date": np.array([[b"20191025"]], dtype="|S9"),
- "N_Creation_Time": np.array(
- [[b"062411.116253Z"]], dtype="|S15"
- ),
- "N_Day_Night_Flag": np.array([[b"Night"]], dtype="|S6"),
- "N_Ending_Time_IET": np.array(
- [[1950675204849492]], dtype=np.uint64
- ),
- "N_Graceful_Degradation": np.array([[b"No"]], dtype="|S3"),
- "N_Granule_ID": np.array(
- [[b"J01002526558865"]], dtype="|S16"
- ),
- "N_Granule_Status": np.array([[b"N/A"]], dtype="|S4"),
- "N_Granule_Version": np.array([[b"A1"]], dtype="|S3"),
- "N_IDPS_Mode": np.array([[b"N/A"]], dtype="|S4"),
- "N_Input_Prod": np.array(
- [
- [b"GEO-VIIRS-OBC-IP:J01002526558865:A1"],
- [b"SPACECRAFT-DIARY-RDR:J01002526558800:A1"],
- [b"SPACECRAFT-DIARY-RDR:J01002526559000:A1"],
- [b"VIIRS-DNB-GEO:J01002526558865:A1"],
- [b"VIIRS-IMG-RGEO-TC:J01002526558865:A1"],
- [b"VIIRS-MOD-RGEO-TC:J01002526558865:A1"],
- [b"VIIRS-SCIENCE-RDR:J01002526558012:A1"],
- [b"VIIRS-SCIENCE-RDR:J01002526558865:A1"],
- ],
- dtype="|S40",
- ),
- "N_JPSS_Document_Ref": np.array(
- [
- [
- b"474-00448-02-06_JPSS-DD-Vol-II-Part-6_0200H.pdf"
- ],
- [
- b"474-00448-02-06_JPSS-VIIRS-SDR-DD-Part-6_0200H_VIIRS-DNB-SDR-PP.xml"
- ],
- [
- b"474-00448-03-06_JPSS-OAD-Vol-III-Part-6-VIIRS-RDR-SDR_-1.pdf"
- ],
- ],
- dtype="|S68",
- ),
- "N_LEOA_Flag": np.array([[b"On"]], dtype="|S3"),
- "N_Nadir_Latitude_Max": np.array(
- [[45.3722]], dtype=np.float32
- ),
- "N_Nadir_Latitude_Min": np.array(
- [[40.6172]], dtype=np.float32
- ),
- "N_Nadir_Longitude_Max": np.array(
- [[-62.80047]], dtype=np.float32
- ),
- "N_Nadir_Longitude_Min": np.array(
- [[-64.51342]], dtype=np.float32
- ),
- "N_Number_Of_Scans": np.array([[47]], dtype=np.int32),
- "N_Percent_Erroneous_Data": np.array(
- [[0.0]], dtype=np.float32
- ),
- "N_Percent_Missing_Data": np.array(
- [[51.05127]], dtype=np.float32
- ),
- "N_Percent_Not-Applicable_Data": np.array(
- [[0.0]], dtype=np.float32
- ),
- "N_Primary_Label": np.array(
- [[b"Non-Primary"]], dtype="|S12"
- ),
- "N_Quality_Summary_Names": np.array(
- [
- [b"Scan Quality Exclusion"],
- [b"Summary VIIRS SDR Quality"],
- ],
- dtype="|S26",
- ),
- "N_Quality_Summary_Values": np.array(
- [[24], [49]], dtype=np.int32
- ),
- "N_RSB_Index": np.array([[17]], dtype=np.int32),
- "N_Reference_ID": np.array(
- [[b"VIIRS-DNB-SDR:J01002526558865:A1"]], dtype="|S33"
- ),
- "N_Satellite/Local_Azimuth_Angle_Max": np.array(
- [[179.9995]], dtype=np.float32
- ),
- "N_Satellite/Local_Azimuth_Angle_Min": np.array(
- [[-179.9976]], dtype=np.float32
- ),
- "N_Satellite/Local_Zenith_Angle_Max": np.array(
- [[69.83973]], dtype=np.float32
- ),
- "N_Satellite/Local_Zenith_Angle_Min": np.array(
- [[0.00898314]], dtype=np.float32
- ),
- "N_Software_Version": np.array(
- [[b"CSPP_SDR_3_1_3"]], dtype="|S15"
- ),
- "N_Solar_Azimuth_Angle_Max": np.array(
- [[73.93496]], dtype=np.float32
- ),
- "N_Solar_Azimuth_Angle_Min": np.array(
- [[23.83542]], dtype=np.float32
- ),
- "N_Solar_Zenith_Angle_Max": np.array(
- [[147.5895]], dtype=np.float32
- ),
- "N_Solar_Zenith_Angle_Min": np.array(
- [[126.3929]], dtype=np.float32
- ),
- "N_Spacecraft_Maneuver": np.array(
- [[b"Normal Operations"]], dtype="|S18"
- ),
- "North_Bounding_Coordinate": np.array(
- [[46.8018]], dtype=np.float32
- ),
- "South_Bounding_Coordinate": np.array(
- [[36.53402]], dtype=np.float32
- ),
- "West_Bounding_Coordinate": np.array(
- [[-82.65787]], dtype=np.float32
- ),
- }
- },
- "attrs": {
- "Instrument_Short_Name": np.array([[b"VIIRS"]], dtype="|S6"),
- "N_Collection_Short_Name": np.array(
- [[b"VIIRS-DNB-SDR"]], dtype="|S14"
+ "N_Quality_Summary_Values": np.array(
+ [[24], [49]], dtype=np.int32
+ ),
+ "N_RSB_Index": np.array([[17]], dtype=np.int32),
+ "N_Reference_ID": np.array(
+ [[b"VIIRS-DNB-SDR:J01002526558865:A1"]], dtype="|S33"
+ ),
+ "N_Satellite/Local_Azimuth_Angle_Max": np.array(
+ [[179.9995]], dtype=np.float32
+ ),
+ "N_Satellite/Local_Azimuth_Angle_Min": np.array(
+ [[-179.9976]], dtype=np.float32
+ ),
+ "N_Satellite/Local_Zenith_Angle_Max": np.array(
+ [[69.83973]], dtype=np.float32
+ ),
+ "N_Satellite/Local_Zenith_Angle_Min": np.array(
+ [[0.00898314]], dtype=np.float32
+ ),
+ "N_Software_Version": np.array(
+ [[b"CSPP_SDR_3_1_3"]], dtype="|S15"
),
- "N_Dataset_Type_Tag": np.array([[b"SDR"]], dtype="|S4"),
- "N_Instrument_Flight_SW_Version": np.array(
- [[20], [65534]], dtype=np.int32
+ "N_Solar_Azimuth_Angle_Max": np.array(
+ [[73.93496]], dtype=np.float32
),
- "N_Processing_Domain": np.array([[b"ops"]], dtype="|S4"),
- "Operational_Mode": np.array(
- [[b"J01 Normal Operations, VIIRS Operational"]],
- dtype="|S41",
+ "N_Solar_Azimuth_Angle_Min": np.array(
+ [[23.83542]], dtype=np.float32
),
- },
+ "N_Solar_Zenith_Angle_Max": np.array(
+ [[147.5895]], dtype=np.float32
+ ),
+ "N_Solar_Zenith_Angle_Min": np.array(
+ [[126.3929]], dtype=np.float32
+ ),
+ "N_Spacecraft_Maneuver": np.array(
+ [[b"Normal Operations"]], dtype="|S18"
+ ),
+ "North_Bounding_Coordinate": np.array(
+ [[46.8018]], dtype=np.float32
+ ),
+ "South_Bounding_Coordinate": np.array(
+ [[36.53402]], dtype=np.float32
+ ),
+ "West_Bounding_Coordinate": np.array(
+ [[-82.65787]], dtype=np.float32
+ ),
+ }
+ },
+ "attrs": {
+ "Instrument_Short_Name": np.array([[b"VIIRS"]], dtype="|S6"),
+ "N_Collection_Short_Name": np.array(
+ [[b"VIIRS-DNB-SDR"]], dtype="|S14"
+ ),
+ "N_Dataset_Type_Tag": np.array([[b"SDR"]], dtype="|S4"),
+ "N_Instrument_Flight_SW_Version": np.array(
+ [[20], [65534]], dtype=np.int32
+ ),
+ "N_Processing_Domain": np.array([[b"ops"]], dtype="|S4"),
+ "Operational_Mode": np.array(
+ [[b"J01 Normal Operations, VIIRS Operational"]],
+ dtype="|S41",
+ ),
},
},
- "attrs": {
- "CVIIRS_Version": np.array([[b"2.0.1"]], dtype="|S5"),
- "Compact_VIIRS_SDR_Version": np.array([[b"3.1"]], dtype="|S3"),
- "Distributor": np.array([[b"cspp"]], dtype="|S5"),
- "Mission_Name": np.array([[b"JPSS-1"]], dtype="|S7"),
- "N_Dataset_Source": np.array([[b"all-"]], dtype="|S5"),
- "N_GEO_Ref": np.array(
+ },
+ "attrs": {
+ "CVIIRS_Version": np.array([[b"2.0.1"]], dtype="|S5"),
+ "Compact_VIIRS_SDR_Version": np.array([[b"3.1"]], dtype="|S3"),
+ "Distributor": np.array([[b"cspp"]], dtype="|S5"),
+ "Mission_Name": np.array([[b"JPSS-1"]], dtype="|S7"),
+ "N_Dataset_Source": np.array([[b"all-"]], dtype="|S5"),
+ "N_GEO_Ref": np.array(
+ [
[
- [
- b"GDNBO_j01_d20191025_t0611251_e0612478_b10015_c20191025062405837630_cspp_dev.h5"
- ]
- ],
- dtype="|S78",
- ),
- "N_HDF_Creation_Date": np.array([[b"20191025"]], dtype="|S8"),
- "N_HDF_Creation_Time": np.array([[b"062502.927000Z"]], dtype="|S14"),
- "Platform_Short_Name": np.array([[b"J01"]], dtype="|S4"),
- "Satellite_Id_Filename": np.array([[b"j01"]], dtype="|S3"),
- },
- }
- self.filename = os.path.join(
- tempfile.gettempdir(),
- "SVDNBC_j01_d20191025_t0611251_e0612478_b10015_c20191025062459000870_eum_ops.h5",
- )
- h5f = h5py.File(self.filename, mode="w")
+ b"GDNBO_j01_d20191025_t0611251_e0612478_b10015_c20191025062405837630_cspp_dev.h5"
+ ]
+ ],
+ dtype="|S78",
+ ),
+ "N_HDF_Creation_Date": np.array([[b"20191025"]], dtype="|S8"),
+ "N_HDF_Creation_Time": np.array([[b"062502.927000Z"]], dtype="|S14"),
+ "Platform_Short_Name": np.array([[b"J01"]], dtype="|S4"),
+ "Satellite_Id_Filename": np.array([[b"j01"]], dtype="|S3"),
+ },
+ }
+ return fake_dnb
+
- def fill_h5(root, stuff):
- for key, val in stuff.items():
- if key in ["value", "attrs"]:
- continue
- if "value" in val:
- root[key] = val["value"]
- else:
- grp = root.create_group(key)
- fill_h5(grp, stuff[key])
- if "attrs" in val:
- for attrs, val in val["attrs"].items():
- root[key].attrs[attrs] = val
+@pytest.fixture
+def fake_dnb_file(fake_dnb, tmp_path):
+ """Create an hdf5 file in viirs_compact format with DNB data in it."""
+ filename = tmp_path / "SVDNBC_j01_d20191025_t0611251_e0612478_b10015_c20191025062459000870_eum_ops.h5"
+ h5f = h5py.File(filename, mode="w")
- fill_h5(h5f, fake_dnb)
- for attr, val in fake_dnb["attrs"].items():
- h5f.attrs[attr] = val
- h5f.close()
+ fill_h5(h5f, fake_dnb)
+ for attr, val in fake_dnb["attrs"].items():
+ h5f.attrs[attr] = val
+ h5f.close()
+ return filename
+
+class TestCompact:
+ """Test class for reading compact viirs format."""
+
+ @pytest.fixture(autouse=True)
+ def setup_method(self, fake_dnb_file):
+ """Create a fake file from scratch."""
+ self.filename = fake_dnb_file
self.client = None
def _dataset_iterator(self):
@@ -2462,10 +2459,10 @@ def _dataset_iterator(self):
def test_get_dataset(self):
"""Retrieve datasets from a DNB file."""
for ds in self._dataset_iterator():
- self.assertEqual(ds.shape, (752, 4064))
- self.assertEqual(ds.dtype, np.float32)
- self.assertEqual(ds.compute().shape, (752, 4064))
- self.assertEqual(ds.attrs['rows_per_scan'], 16)
+ assert ds.shape == (752, 4064)
+ assert ds.dtype == np.float32
+ assert ds.compute().shape == (752, 4064)
+ assert ds.attrs['rows_per_scan'] == 16
def test_distributed(self):
"""Check that distributed computations work."""
@@ -2474,11 +2471,9 @@ def test_distributed(self):
for ds in self._dataset_iterator():
# Check that the computation is running fine.
- self.assertEqual(ds.compute().shape, (752, 4064))
+ assert ds.compute().shape == (752, 4064)
- def tearDown(self):
+ def teardown_method(self):
"""Destroy."""
- with suppress(OSError):
- os.remove(self.filename)
with suppress(AttributeError):
self.client.close()
diff --git a/satpy/tests/reader_tests/test_viirs_edr_active_fires.py b/satpy/tests/reader_tests/test_viirs_edr_active_fires.py
index 9a1045b81f..df94283fba 100644
--- a/satpy/tests/reader_tests/test_viirs_edr_active_fires.py
+++ b/satpy/tests/reader_tests/test_viirs_edr_active_fires.py
@@ -22,18 +22,19 @@
"""
+import io
import os
import unittest
from unittest import mock
-import numpy as np
-import io
+
import dask.dataframe as dd
+import numpy as np
import pandas as pd
-from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler
+
from satpy.readers.file_handlers import BaseFileHandler
+from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler
from satpy.tests.utils import convert_file_content_to_data_array
-
DEFAULT_FILE_SHAPE = (1, 100)
DEFAULT_LATLON_FILE_DTYPE = np.float32
@@ -60,8 +61,8 @@ def get_test_content(self, filename, filename_info, filename_type):
"""Mimic reader input file content."""
file_content = {}
file_content['/attr/data_id'] = "AFMOD"
- file_content['satellite_name'] = "npp"
- file_content['sensor'] = 'VIIRS'
+ file_content['/attr/satellite_name'] = "NPP"
+ file_content['/attr/instrument_name'] = 'VIIRS'
file_content['Fire Pixels/FP_latitude'] = DEFAULT_LATLON_FILE_DATA
file_content['Fire Pixels/FP_longitude'] = DEFAULT_LATLON_FILE_DATA
@@ -86,15 +87,15 @@ def get_test_content(self, filename, filename_info, filename_type):
"""Mimic reader input file content."""
file_content = {}
file_content['/attr/data_id'] = "AFIMG"
- file_content['satellite_name'] = "npp"
- file_content['sensor'] = 'VIIRS'
+ file_content['/attr/satellite_name'] = "NPP"
+ file_content['/attr/instrument_name'] = 'VIIRS'
- file_content['FP_latitude'] = DEFAULT_LATLON_FILE_DATA
- file_content['FP_longitude'] = DEFAULT_LATLON_FILE_DATA
- file_content['FP_power'] = DEFAULT_POWER_FILE_DATA
- file_content['FP_T4'] = DEFAULT_M13_FILE_DATA
- file_content['FP_T4/attr/units'] = 'kelvins'
- file_content['FP_confidence'] = DEFAULT_DETECTION_FILE_DATA
+ file_content['Fire Pixels/FP_latitude'] = DEFAULT_LATLON_FILE_DATA
+ file_content['Fire Pixels/FP_longitude'] = DEFAULT_LATLON_FILE_DATA
+ file_content['Fire Pixels/FP_power'] = DEFAULT_POWER_FILE_DATA
+ file_content['Fire Pixels/FP_T4'] = DEFAULT_M13_FILE_DATA
+ file_content['Fire Pixels/FP_T4/attr/units'] = 'kelvins'
+ file_content['Fire Pixels/FP_confidence'] = DEFAULT_DETECTION_FILE_DATA
attrs = ('FP_latitude', 'FP_longitude', 'FP_T13', 'FP_confidence')
convert_file_content_to_data_array(
@@ -207,7 +208,7 @@ def test_load_dataset(self):
for v in datasets.values():
self.assertEqual(v.attrs['units'], 'MW')
self.assertEqual(v.attrs['platform_name'], 'NOAA-21')
- self.assertEqual(v.attrs['sensor'], 'VIIRS')
+ self.assertEqual(v.attrs['sensor'], 'viirs')
class TestImgVIIRSActiveFiresNetCDF4(unittest.TestCase):
@@ -264,7 +265,7 @@ def test_load_dataset(self):
for v in datasets.values():
self.assertEqual(v.attrs['units'], 'MW')
self.assertEqual(v.attrs['platform_name'], 'Suomi-NPP')
- self.assertEqual(v.attrs['sensor'], 'VIIRS')
+ self.assertEqual(v.attrs['sensor'], 'viirs')
@mock.patch('satpy.readers.viirs_edr_active_fires.dd.read_csv')
diff --git a/satpy/tests/reader_tests/test_viirs_edr_flood.py b/satpy/tests/reader_tests/test_viirs_edr_flood.py
index 21d1c5f0e1..9b544dc9f1 100644
--- a/satpy/tests/reader_tests/test_viirs_edr_flood.py
+++ b/satpy/tests/reader_tests/test_viirs_edr_flood.py
@@ -19,9 +19,10 @@
import os
import unittest
from unittest import mock
+
import numpy as np
-from satpy.tests.reader_tests.test_hdf4_utils import FakeHDF4FileHandler
+from satpy.tests.reader_tests.test_hdf4_utils import FakeHDF4FileHandler
DEFAULT_FILE_DTYPE = np.uint16
DEFAULT_FILE_SHAPE = (10, 300)
diff --git a/satpy/tests/reader_tests/test_viirs_l1b.py b/satpy/tests/reader_tests/test_viirs_l1b.py
index 3386f3b686..0d3b2ad1b9 100644
--- a/satpy/tests/reader_tests/test_viirs_l1b.py
+++ b/satpy/tests/reader_tests/test_viirs_l1b.py
@@ -18,10 +18,11 @@
"""Module for testing the satpy.readers.viirs_l1b module."""
import os
-import unittest
-from unittest import mock
from datetime import datetime, timedelta
+from unittest import mock
+
import numpy as np
+
from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler
from satpy.tests.utils import convert_file_content_to_data_array
@@ -36,25 +37,20 @@
DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0)
-class FakeNetCDF4FileHandler2(FakeNetCDF4FileHandler):
+class FakeNetCDF4FileHandlerDay(FakeNetCDF4FileHandler):
"""Swap-in NetCDF4 File Handler."""
+ M_REFL_BANDS = [f"M{band_num:02d}" for band_num in range(1, 12)]
+ M_BT_BANDS = [f"M{band_num:02d}" for band_num in range(12, 17)]
+ M_BANDS = M_REFL_BANDS + M_BT_BANDS
+ I_REFL_BANDS = [f"I{band_num:02d}" for band_num in range(1, 4)]
+ I_BT_BANDS = [f"I{band_num:02d}" for band_num in range(4, 6)]
+ I_BANDS = I_REFL_BANDS + I_BT_BANDS
+
def get_test_content(self, filename, filename_info, filetype_info):
"""Mimic reader input file content."""
dt = filename_info.get('start_time', datetime(2016, 1, 1, 12, 0, 0))
file_type = filename[:5].lower()
- # num_lines = {
- # 'vl1bi': 3248 * 2,
- # 'vl1bm': 3248,
- # 'vl1bd': 3248,
- # }[file_type]
- # num_pixels = {
- # 'vl1bi': 6400,
- # 'vl1bm': 3200,
- # 'vl1bd': 4064,
- # }[file_type]
- # num_scans = 203
- # num_luts = 65536
num_lines = DEFAULT_FILE_SHAPE[0]
num_pixels = DEFAULT_FILE_SHAPE[1]
num_scans = 5
@@ -67,9 +63,16 @@ def get_test_content(self, filename, filename_info, filetype_info):
'/attr/time_coverage_start': dt.strftime('%Y-%m-%dT%H:%M:%S.000Z'),
'/attr/time_coverage_end': (dt + timedelta(minutes=6)).strftime('%Y-%m-%dT%H:%M:%S.000Z'),
'/attr/orbit_number': 26384,
- '/attr/instrument': 'viirs',
+ '/attr/instrument': 'VIIRS',
'/attr/platform': 'Suomi-NPP',
}
+ self._fill_contents_with_default_data(file_content, file_type)
+ self._set_dataset_specific_metadata(file_content)
+ convert_file_content_to_data_array(file_content)
+ return file_content
+
+ def _fill_contents_with_default_data(self, file_content, file_type):
+ """Fill file contents with default data."""
if file_type.startswith('vgeo'):
file_content['/attr/OrbitNumber'] = file_content.pop('/attr/orbit_number')
file_content['geolocation_data/latitude'] = DEFAULT_LAT_DATA
@@ -82,32 +85,18 @@ def get_test_content(self, filename, filename_info, filetype_info):
file_content['geolocation_data/lunar_zenith'] = DEFAULT_LON_DATA
file_content['geolocation_data/lunar_azimuth'] = DEFAULT_LON_DATA
elif file_type == 'vl1bm':
- file_content['observation_data/M01'] = DEFAULT_FILE_DATA
- file_content['observation_data/M02'] = DEFAULT_FILE_DATA
- file_content['observation_data/M03'] = DEFAULT_FILE_DATA
- file_content['observation_data/M04'] = DEFAULT_FILE_DATA
- file_content['observation_data/M05'] = DEFAULT_FILE_DATA
- file_content['observation_data/M06'] = DEFAULT_FILE_DATA
- file_content['observation_data/M07'] = DEFAULT_FILE_DATA
- file_content['observation_data/M08'] = DEFAULT_FILE_DATA
- file_content['observation_data/M09'] = DEFAULT_FILE_DATA
- file_content['observation_data/M10'] = DEFAULT_FILE_DATA
- file_content['observation_data/M11'] = DEFAULT_FILE_DATA
- file_content['observation_data/M12'] = DEFAULT_FILE_DATA
- file_content['observation_data/M13'] = DEFAULT_FILE_DATA
- file_content['observation_data/M14'] = DEFAULT_FILE_DATA
- file_content['observation_data/M15'] = DEFAULT_FILE_DATA
- file_content['observation_data/M16'] = DEFAULT_FILE_DATA
+ for m_band in self.M_BANDS:
+ file_content[f'observation_data/{m_band}'] = DEFAULT_FILE_DATA
elif file_type == 'vl1bi':
- file_content['observation_data/I01'] = DEFAULT_FILE_DATA
- file_content['observation_data/I02'] = DEFAULT_FILE_DATA
- file_content['observation_data/I03'] = DEFAULT_FILE_DATA
- file_content['observation_data/I04'] = DEFAULT_FILE_DATA
- file_content['observation_data/I05'] = DEFAULT_FILE_DATA
+ for i_band in self.I_BANDS:
+ file_content[f'observation_data/{i_band}'] = DEFAULT_FILE_DATA
elif file_type == 'vl1bd':
file_content['observation_data/DNB_observations'] = DEFAULT_FILE_DATA
file_content['observation_data/DNB_observations/attr/units'] = 'Watts/cm^2/steradian'
+ @staticmethod
+ def _set_dataset_specific_metadata(file_content):
+ """Set dataset-specific metadata."""
for k in list(file_content.keys()):
if not k.startswith('observation_data') and not k.startswith('geolocation_data'):
continue
@@ -136,26 +125,37 @@ def get_test_content(self, filename, filename_info, filetype_info):
file_content[k + '/attr/scale_factor'] = 1.1
file_content[k + '/attr/add_offset'] = 0.1
- convert_file_content_to_data_array(file_content)
- return file_content
+class FakeNetCDF4FileHandlerNight(FakeNetCDF4FileHandlerDay):
+ """Same as the day file handler, but some day-only bands are missing.
-class TestVIIRSL1BReader(unittest.TestCase):
+ This matches what happens in real world files where reflectance bands
+ are removed in night data to save space.
+
+ """
+
+ M_BANDS = FakeNetCDF4FileHandlerDay.M_BT_BANDS
+ I_BANDS = FakeNetCDF4FileHandlerDay.I_BT_BANDS
+
+
+class TestVIIRSL1BReaderDay:
"""Test VIIRS L1B Reader."""
yaml_file = "viirs_l1b.yaml"
+ fake_cls = FakeNetCDF4FileHandlerDay
+ has_reflectance_bands = True
- def setUp(self):
+ def setup_method(self):
"""Wrap NetCDF4 file handler with our own fake handler."""
from satpy._config import config_search_paths
from satpy.readers.viirs_l1b import VIIRSL1BFileHandler
self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file))
# http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library
- self.p = mock.patch.object(VIIRSL1BFileHandler, '__bases__', (FakeNetCDF4FileHandler2,))
+ self.p = mock.patch.object(VIIRSL1BFileHandler, '__bases__', (self.fake_cls,))
self.fake_handler = self.p.start()
self.p.is_local = True
- def tearDown(self):
+ def teardown_method(self):
"""Stop wrapping the NetCDF4 file handler."""
self.p.stop()
@@ -166,10 +166,24 @@ def test_init(self):
loadables = r.select_files_from_pathnames([
'VL1BM_snpp_d20161130_t012400_c20161130054822.nc',
])
- self.assertEqual(len(loadables), 1)
+ assert len(loadables) == 1
r.create_filehandlers(loadables)
# make sure we have some files
- self.assertTrue(r.file_handlers)
+ assert r.file_handlers
+
+ def test_available_datasets_m_bands(self):
+ """Test available datasets for M band files."""
+ from satpy.readers import load_reader
+ r = load_reader(self.reader_configs)
+ loadables = r.select_files_from_pathnames([
+ 'VL1BM_snpp_d20161130_t012400_c20161130054822.nc',
+ 'VGEOM_snpp_d20161130_t012400_c20161130054822.nc',
+ ])
+ r.create_filehandlers(loadables)
+ avail_names = r.available_dataset_names
+ angles = {"satellite_azimuth_angle", "satellite_zenith_angle", "solar_azimuth_angle", "solar_zenith_angle"}
+ geo = {"m_lon", "m_lat"}
+ assert set(avail_names) == set(self.fake_cls.M_BANDS) | angles | geo
def test_load_every_m_band_bt(self):
"""Test loading all M band brightness temperatures."""
@@ -185,13 +199,14 @@ def test_load_every_m_band_bt(self):
'M14',
'M15',
'M16'])
- self.assertEqual(len(datasets), 5)
+ assert len(datasets) == 5
for v in datasets.values():
- self.assertEqual(v.attrs['calibration'], 'brightness_temperature')
- self.assertEqual(v.attrs['units'], 'K')
- self.assertEqual(v.attrs['rows_per_scan'], 2)
- self.assertEqual(v.attrs['area'].lons.attrs['rows_per_scan'], 2)
- self.assertEqual(v.attrs['area'].lats.attrs['rows_per_scan'], 2)
+ assert v.attrs['calibration'] == 'brightness_temperature'
+ assert v.attrs['units'] == 'K'
+ assert v.attrs['rows_per_scan'] == 2
+ assert v.attrs['area'].lons.attrs['rows_per_scan'] == 2
+ assert v.attrs['area'].lats.attrs['rows_per_scan'] == 2
+ assert v.attrs['sensor'] == "viirs"
def test_load_every_m_band_refl(self):
"""Test loading all M band reflectances."""
@@ -213,13 +228,14 @@ def test_load_every_m_band_refl(self):
'M09',
'M10',
'M11'])
- self.assertEqual(len(datasets), 11)
+ assert len(datasets) == (11 if self.has_reflectance_bands else 0)
for v in datasets.values():
- self.assertEqual(v.attrs['calibration'], 'reflectance')
- self.assertEqual(v.attrs['units'], '%')
- self.assertEqual(v.attrs['rows_per_scan'], 2)
- self.assertEqual(v.attrs['area'].lons.attrs['rows_per_scan'], 2)
- self.assertEqual(v.attrs['area'].lats.attrs['rows_per_scan'], 2)
+ assert v.attrs['calibration'] == 'reflectance'
+ assert v.attrs['units'] == '%'
+ assert v.attrs['rows_per_scan'] == 2
+ assert v.attrs['area'].lons.attrs['rows_per_scan'] == 2
+ assert v.attrs['area'].lats.attrs['rows_per_scan'] == 2
+ assert v.attrs['sensor'] == "viirs"
def test_load_every_m_band_rad(self):
"""Test loading all M bands as radiances."""
@@ -247,13 +263,37 @@ def test_load_every_m_band_rad(self):
make_dataid(name='M14', calibration='radiance'),
make_dataid(name='M15', calibration='radiance'),
make_dataid(name='M16', calibration='radiance')])
- self.assertEqual(len(datasets), 16)
+ assert len(datasets) == (16 if self.has_reflectance_bands else 5)
for v in datasets.values():
- self.assertEqual(v.attrs['calibration'], 'radiance')
- self.assertEqual(v.attrs['units'], 'W m-2 um-1 sr-1')
- self.assertEqual(v.attrs['rows_per_scan'], 2)
- self.assertEqual(v.attrs['area'].lons.attrs['rows_per_scan'], 2)
- self.assertEqual(v.attrs['area'].lats.attrs['rows_per_scan'], 2)
+ assert v.attrs['calibration'] == 'radiance'
+ assert v.attrs['units'] == 'W m-2 um-1 sr-1'
+ assert v.attrs['rows_per_scan'] == 2
+ assert v.attrs['area'].lons.attrs['rows_per_scan'] == 2
+ assert v.attrs['area'].lats.attrs['rows_per_scan'] == 2
+ assert v.attrs['sensor'] == "viirs"
+
+ def test_load_i_band_angles(self):
+ """Test loading all M bands as radiances."""
+ from satpy.readers import load_reader
+ from satpy.tests.utils import make_dataid
+ r = load_reader(self.reader_configs)
+ loadables = r.select_files_from_pathnames([
+ 'VL1BI_snpp_d20161130_t012400_c20161130054822.nc',
+ 'VL1BM_snpp_d20161130_t012400_c20161130054822.nc',
+ 'VGEOI_snpp_d20161130_t012400_c20161130054822.nc',
+ 'VGEOM_snpp_d20161130_t012400_c20161130054822.nc',
+ ])
+ r.create_filehandlers(loadables)
+ datasets = r.load([
+ make_dataid(name='satellite_zenith_angle'),
+ make_dataid(name='satellite_azimuth_angle'),
+ make_dataid(name='solar_azimuth_angle'),
+ make_dataid(name='solar_zenith_angle'),
+ ])
+ assert len(datasets) == 4
+ for v in datasets.values():
+ assert v.attrs['resolution'] == 371
+ assert v.attrs['sensor'] == "viirs"
def test_load_dnb_radiance(self):
"""Test loading the main DNB dataset."""
@@ -265,13 +305,14 @@ def test_load_dnb_radiance(self):
])
r.create_filehandlers(loadables)
datasets = r.load(['DNB'])
- self.assertEqual(len(datasets), 1)
+ assert len(datasets) == 1
for v in datasets.values():
- self.assertEqual(v.attrs['calibration'], 'radiance')
- self.assertEqual(v.attrs['units'], 'W m-2 sr-1')
- self.assertEqual(v.attrs['rows_per_scan'], 2)
- self.assertEqual(v.attrs['area'].lons.attrs['rows_per_scan'], 2)
- self.assertEqual(v.attrs['area'].lats.attrs['rows_per_scan'], 2)
+ assert v.attrs['calibration'] == 'radiance'
+ assert v.attrs['units'] == 'W m-2 sr-1'
+ assert v.attrs['rows_per_scan'] == 2
+ assert v.attrs['area'].lons.attrs['rows_per_scan'] == 2
+ assert v.attrs['area'].lats.attrs['rows_per_scan'] == 2
+ assert v.attrs['sensor'] == "viirs"
def test_load_dnb_angles(self):
"""Test loading all DNB angle datasets."""
@@ -289,9 +330,21 @@ def test_load_dnb_angles(self):
'dnb_lunar_zenith_angle',
'dnb_lunar_azimuth_angle',
])
- self.assertEqual(len(datasets), 6)
+ assert len(datasets) == 6
for v in datasets.values():
- self.assertEqual(v.attrs['units'], 'degrees')
- self.assertEqual(v.attrs['rows_per_scan'], 2)
- self.assertEqual(v.attrs['area'].lons.attrs['rows_per_scan'], 2)
- self.assertEqual(v.attrs['area'].lats.attrs['rows_per_scan'], 2)
+ assert v.attrs['units'] == 'degrees'
+ assert v.attrs['rows_per_scan'] == 2
+ assert v.attrs['area'].lons.attrs['rows_per_scan'] == 2
+ assert v.attrs['area'].lats.attrs['rows_per_scan'] == 2
+ assert v.attrs['sensor'] == "viirs"
+
+
+class TestVIIRSL1BReaderDayNight(TestVIIRSL1BReaderDay):
+ """Test VIIRS L1b with night data.
+
+ Night data files don't have reflectance bands in them.
+
+ """
+
+ fake_cls = FakeNetCDF4FileHandlerNight
+ has_reflectance_bands = False
diff --git a/satpy/tests/reader_tests/test_viirs_sdr.py b/satpy/tests/reader_tests/test_viirs_sdr.py
index 5224e5009a..4a09e132b3 100644
--- a/satpy/tests/reader_tests/test_viirs_sdr.py
+++ b/satpy/tests/reader_tests/test_viirs_sdr.py
@@ -19,19 +19,19 @@
import os
import unittest
+from contextlib import contextmanager
from unittest import mock
+
import numpy as np
+
from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler
DEFAULT_FILE_DTYPE = np.uint16
-DEFAULT_FILE_SHAPE = (10, 300)
+DEFAULT_FILE_SHAPE = (32, 300)
+# Mimicking one scan line of data
DEFAULT_FILE_DATA = np.arange(DEFAULT_FILE_SHAPE[0] * DEFAULT_FILE_SHAPE[1],
dtype=DEFAULT_FILE_DTYPE).reshape(DEFAULT_FILE_SHAPE)
DEFAULT_FILE_FACTORS = np.array([2.0, 1.0], dtype=np.float32)
-DEFAULT_LAT_DATA = np.linspace(45, 65, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE)
-DEFAULT_LAT_DATA = np.repeat([DEFAULT_LAT_DATA], DEFAULT_FILE_SHAPE[0], axis=0)
-DEFAULT_LON_DATA = np.linspace(5, 45, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE)
-DEFAULT_LON_DATA = np.repeat([DEFAULT_LON_DATA], DEFAULT_FILE_SHAPE[0], axis=0)
DATASET_KEYS = {'GDNBO': 'VIIRS-DNB-GEO',
'SVDNB': 'VIIRS-DNB-SDR',
@@ -67,6 +67,7 @@ class FakeHDF5FileHandler2(FakeHDF5FileHandler):
"""Swap-in HDF5 File Handler."""
_num_test_granules = 1
+ _num_scans_per_gran = [48]
def __init__(self, filename, filename_info, filetype_info, include_factors=True):
"""Create fake file handler."""
@@ -74,7 +75,7 @@ def __init__(self, filename, filename_info, filetype_info, include_factors=True)
super(FakeHDF5FileHandler2, self).__init__(filename, filename_info, filetype_info)
@staticmethod
- def _add_basic_metadata_to_file_content(file_content, filename_info):
+ def _add_basic_metadata_to_file_content(file_content, filename_info, num_grans):
start_time = filename_info['start_time']
end_time = filename_info['end_time'].replace(year=start_time.year,
month=start_time.month,
@@ -84,7 +85,7 @@ def _add_basic_metadata_to_file_content(file_content, filename_info):
ending_date = end_time.strftime('%Y%m%d')
ending_time = end_time.strftime('%H%M%S.%fZ')
new_file_content = {
- "{prefix2}/attr/AggregateNumberGranules": 1,
+ "{prefix2}/attr/AggregateNumberGranules": num_grans,
"{prefix2}/attr/AggregateBeginningDate": begin_date,
"{prefix2}/attr/AggregateBeginningTime": begin_time,
"{prefix2}/attr/AggregateEndingDate": ending_date,
@@ -98,71 +99,84 @@ def _add_basic_metadata_to_file_content(file_content, filename_info):
}
file_content.update(new_file_content)
- @staticmethod
def _add_granule_specific_info_to_file_content(
- file_content, dataset_group, num_granules, gran_group_prefix):
- lats_lists = [
+ self,
+ file_content, dataset_group, num_granules, num_scans_per_granule,
+ gran_group_prefix):
+ lons_lists = self._get_per_granule_lons()
+ lats_lists = self._get_per_granule_lats()
+ file_content["{prefix3}/NumberOfScans"] = np.array([48] * num_granules)
+ for granule_idx in range(num_granules):
+ prefix_gran = '{prefix}/{dataset_group}_Gran_{idx}'.format(prefix=gran_group_prefix,
+ dataset_group=dataset_group,
+ idx=granule_idx)
+ num_scans = num_scans_per_granule[granule_idx]
+ file_content[prefix_gran + '/attr/N_Number_Of_Scans'] = num_scans
+ file_content[prefix_gran + '/attr/G-Ring_Longitude'] = lons_lists[granule_idx]
+ file_content[prefix_gran + '/attr/G-Ring_Latitude'] = lats_lists[granule_idx]
+
+ @staticmethod
+ def _get_per_granule_lons():
+ return [
np.array(
[
- 67.969505, 65.545685, 63.103046, 61.853905, 55.169273,
- 57.062447, 58.86063, 66.495514
+ 50.51393, 49.566296, 48.865967, 18.96082, -4.0238385,
+ -7.05221, -10.405702, 14.638646
],
dtype=np.float32),
np.array(
[
- 72.74879, 70.2493, 67.84738, 66.49691, 58.77254,
- 60.465942, 62.11525, 71.08249
+ 53.52594, 51.685738, 50.439102, 14.629087, -10.247547,
+ -13.951393, -18.256989, 8.36572
],
dtype=np.float32),
np.array(
[
- 77.393425, 74.977875, 72.62976, 71.083435, 62.036346,
- 63.465122, 64.78075, 75.36842
+ 59.386833, 55.770416, 53.38952, 8.353765, -18.062435,
+ -22.608992, -27.867302, -1.3537619
],
dtype=np.float32),
np.array(
[
- 81.67615, 79.49934, 77.278656, 75.369415, 64.72178,
- 65.78417, 66.66166, 79.00025
+ 72.50243, 64.17125, 59.15234, -1.3654504, -27.620953,
+ -33.091743, -39.28113, -17.749891
],
- dtype=np.float32),
+ dtype=np.float32)
]
- lons_lists = [
+
+ @staticmethod
+ def _get_per_granule_lats():
+ return [
np.array(
[
- 50.51393, 49.566296, 48.865967, 18.96082, -4.0238385,
- -7.05221, -10.405702, 14.638646
+ 67.969505, 65.545685, 63.103046, 61.853905, 55.169273,
+ 57.062447, 58.86063, 66.495514
],
dtype=np.float32),
np.array(
[
- 53.52594, 51.685738, 50.439102, 14.629087, -10.247547,
- -13.951393, -18.256989, 8.36572
+ 72.74879, 70.2493, 67.84738, 66.49691, 58.77254,
+ 60.465942, 62.11525, 71.08249
],
dtype=np.float32),
np.array(
[
- 59.386833, 55.770416, 53.38952, 8.353765, -18.062435,
- -22.608992, -27.867302, -1.3537619
+ 77.393425, 74.977875, 72.62976, 71.083435, 62.036346,
+ 63.465122, 64.78075, 75.36842
],
dtype=np.float32),
np.array(
[
- 72.50243, 64.17125, 59.15234, -1.3654504, -27.620953,
- -33.091743, -39.28113, -17.749891
+ 81.67615, 79.49934, 77.278656, 75.369415, 64.72178,
+ 65.78417, 66.66166, 79.00025
],
- dtype=np.float32)
+ dtype=np.float32),
]
- file_content["{prefix3}/NumberOfScans"] = np.array([48] * num_granules)
- for granule_idx in range(num_granules):
- prefix_gran = '{prefix}/{dataset_group}_Gran_{idx}'.format(prefix=gran_group_prefix,
- dataset_group=dataset_group,
- idx=granule_idx)
- file_content[prefix_gran + '/attr/N_Number_Of_Scans'] = 48
- file_content[prefix_gran + '/attr/G-Ring_Longitude'] = lons_lists[granule_idx]
- file_content[prefix_gran + '/attr/G-Ring_Latitude'] = lats_lists[granule_idx]
- def _add_data_info_to_file_content(self, file_content, filename, data_var_prefix):
+ def _add_data_info_to_file_content(self, file_content, filename, data_var_prefix, num_grans):
+ # SDR files always produce data with 48 scans per granule even if there are less
+ total_rows = DEFAULT_FILE_SHAPE[0] * 48 * num_grans
+ new_shape = (total_rows, DEFAULT_FILE_SHAPE[1])
if filename[2:5] in ['M{:02d}'.format(x) for x in range(12)] + ['I01', 'I02', 'I03']:
keys = ['Radiance', 'Reflectance']
elif filename[2:5] in ['M{:02d}'.format(x) for x in range(12, 17)] + ['I04', 'I05']:
@@ -173,13 +187,17 @@ def _add_data_info_to_file_content(self, file_content, filename, data_var_prefix
for k in keys:
k = data_var_prefix + "/" + k
- file_content[k] = DEFAULT_FILE_DATA.copy()
- file_content[k + "/shape"] = DEFAULT_FILE_SHAPE
+ file_content[k] = np.repeat(DEFAULT_FILE_DATA.copy(), 48 * num_grans, axis=0)
+ file_content[k + "/shape"] = new_shape
if self.include_factors:
- file_content[k + "Factors"] = DEFAULT_FILE_FACTORS.copy()
+ file_content[k + "Factors"] = np.repeat(
+ DEFAULT_FILE_FACTORS.copy()[None, :], num_grans, axis=0).ravel()
@staticmethod
- def _add_geolocation_info_to_file_content(file_content, filename, data_var_prefix):
+ def _add_geolocation_info_to_file_content(file_content, filename, data_var_prefix, num_grans):
+ # SDR files always produce data with 48 scans per granule even if there are less
+ total_rows = DEFAULT_FILE_SHAPE[0] * 48 * num_grans
+ new_shape = (total_rows, DEFAULT_FILE_SHAPE[1])
is_dnb = filename[:5] not in ['GMODO', 'GIMGO']
if not is_dnb:
lon_data = np.linspace(15, 55, DEFAULT_FILE_SHAPE[1]).astype(DEFAULT_FILE_DTYPE)
@@ -191,13 +209,13 @@ def _add_geolocation_info_to_file_content(file_content, filename, data_var_prefi
for k in ["Latitude"]:
k = data_var_prefix + "/" + k
file_content[k] = lat_data
- file_content[k] = np.repeat([file_content[k]], DEFAULT_FILE_SHAPE[0], axis=0)
- file_content[k + "/shape"] = DEFAULT_FILE_SHAPE
+ file_content[k] = np.repeat([file_content[k]], total_rows, axis=0)
+ file_content[k + "/shape"] = new_shape
for k in ["Longitude"]:
k = data_var_prefix + "/" + k
file_content[k] = lon_data
- file_content[k] = np.repeat([file_content[k]], DEFAULT_FILE_SHAPE[0], axis=0)
- file_content[k + "/shape"] = DEFAULT_FILE_SHAPE
+ file_content[k] = np.repeat([file_content[k]], total_rows, axis=0)
+ file_content[k + "/shape"] = new_shape
angles = ['SolarZenithAngle',
'SolarAzimuthAngle',
@@ -208,8 +226,8 @@ def _add_geolocation_info_to_file_content(file_content, filename, data_var_prefi
for k in angles:
k = data_var_prefix + "/" + k
file_content[k] = lon_data # close enough to SZA
- file_content[k] = np.repeat([file_content[k]], DEFAULT_FILE_SHAPE[0], axis=0)
- file_content[k + "/shape"] = DEFAULT_FILE_SHAPE
+ file_content[k] = np.repeat([file_content[k]], total_rows, axis=0)
+ file_content[k + "/shape"] = new_shape
@staticmethod
def _add_geo_ref(file_content, filename):
@@ -224,8 +242,8 @@ def _add_geo_ref(file_content, filename):
@staticmethod
def _convert_numpy_content_to_dataarray(final_content):
- from xarray import DataArray
import dask.array as da
+ from xarray import DataArray
for key, val in final_content.items():
if isinstance(val, np.ndarray):
val = da.from_array(val, chunks=val.shape)
@@ -244,23 +262,43 @@ def get_test_content(self, filename, filename_info, filetype_info):
prefix3 = 'All_Data/{dataset_group}_All'.format(dataset_group=dataset_group)
file_content = {}
- self._add_basic_metadata_to_file_content(file_content, filename_info)
+ self._add_basic_metadata_to_file_content(file_content, filename_info, self._num_test_granules)
self._add_granule_specific_info_to_file_content(file_content, dataset_group,
- self._num_test_granules, prefix1)
+ self._num_test_granules, self._num_scans_per_gran,
+ prefix1)
self._add_geo_ref(file_content, filename)
for k, v in list(file_content.items()):
file_content[k.format(prefix1=prefix1, prefix2=prefix2, prefix3=prefix3)] = v
if filename[:3] in ['SVM', 'SVI', 'SVD']:
- self._add_data_info_to_file_content(file_content, filename, prefix3)
+ self._add_data_info_to_file_content(file_content, filename, prefix3,
+ self._num_test_granules)
elif filename[0] == 'G':
- self._add_geolocation_info_to_file_content(file_content, filename, prefix3)
+ self._add_geolocation_info_to_file_content(file_content, filename, prefix3,
+ self._num_test_granules)
final_content.update(file_content)
self._convert_numpy_content_to_dataarray(final_content)
return final_content
+@contextmanager
+def touch_geo_files(*prefixes):
+ """Create and then remove VIIRS SDR geolocation files."""
+ geofiles = [_touch_geo_file(prefix) for prefix in prefixes]
+ try:
+ yield geofiles
+ finally:
+ for filename in geofiles:
+ os.remove(filename)
+
+
+def _touch_geo_file(prefix):
+ geo_fn = prefix + '_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'
+ open(geo_fn, 'w')
+ return geo_fn
+
+
class TestVIIRSSDRReader(unittest.TestCase):
"""Test VIIRS SDR Reader."""
@@ -274,6 +312,7 @@ def _assert_reflectance_properties(self, data_arr, num_scans=16, with_area=True)
if with_area:
self.assertIn('area', data_arr.attrs)
self.assertIsNotNone(data_arr.attrs['area'])
+ self.assertEqual(data_arr.attrs['area'].shape, data_arr.shape)
else:
self.assertNotIn('area', data_arr.attrs)
@@ -285,6 +324,7 @@ def _assert_bt_properties(self, data_arr, num_scans=16, with_area=True):
if with_area:
self.assertIn('area', data_arr.attrs)
self.assertIsNotNone(data_arr.attrs['area'])
+ self.assertEqual(data_arr.attrs['area'].shape, data_arr.shape)
else:
self.assertNotIn('area', data_arr.attrs)
@@ -296,6 +336,7 @@ def _assert_dnb_radiance_properties(self, data_arr, with_area=True):
if with_area:
self.assertIn('area', data_arr.attrs)
self.assertIsNotNone(data_arr.attrs['area'])
+ self.assertEqual(data_arr.attrs['area'].shape, data_arr.shape)
else:
self.assertNotIn('area', data_arr.attrs)
@@ -327,8 +368,9 @@ def test_init(self):
def test_init_start_time_beyond(self):
"""Test basic init with start_time after the provided files."""
- from satpy.readers import load_reader
from datetime import datetime
+
+ from satpy.readers import load_reader
r = load_reader(self.reader_configs,
filter_parameters={
'start_time': datetime(2012, 2, 26)
@@ -340,8 +382,9 @@ def test_init_start_time_beyond(self):
def test_init_end_time_beyond(self):
"""Test basic init with end_time before the provided files."""
- from satpy.readers import load_reader
from datetime import datetime
+
+ from satpy.readers import load_reader
r = load_reader(self.reader_configs,
filter_parameters={
'end_time': datetime(2012, 2, 24)
@@ -353,8 +396,9 @@ def test_init_end_time_beyond(self):
def test_init_start_end_time(self):
"""Test basic init with end_time before the provided files."""
- from satpy.readers import load_reader
from datetime import datetime
+
+ from satpy.readers import load_reader
r = load_reader(self.reader_configs,
filter_parameters={
'start_time': datetime(2012, 2, 24),
@@ -419,11 +463,7 @@ def test_load_all_m_reflectances_find_geo(self):
'SVM10_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5',
'SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5',
])
- # make a fake geo file
- geo_fn = 'GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'
- open(geo_fn, 'w')
-
- try:
+ with touch_geo_files("GMTCO", "GMODO") as (geo_fn1, geo_fn2):
r.create_filehandlers(loadables)
ds = r.load(['M01',
'M02',
@@ -437,8 +477,6 @@ def test_load_all_m_reflectances_find_geo(self):
'M10',
'M11',
])
- finally:
- os.remove(geo_fn)
self.assertEqual(len(ds), 11)
for d in ds.values():
@@ -462,19 +500,20 @@ def test_load_all_m_reflectances_provided_geo(self):
'SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5',
'GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5',
])
- r.create_filehandlers(loadables)
- ds = r.load(['M01',
- 'M02',
- 'M03',
- 'M04',
- 'M05',
- 'M06',
- 'M07',
- 'M08',
- 'M09',
- 'M10',
- 'M11',
- ])
+ with touch_geo_files("GMTCO", "GMODO") as (geo_fn1, geo_fn2):
+ r.create_filehandlers(loadables)
+ ds = r.load(['M01',
+ 'M02',
+ 'M03',
+ 'M04',
+ 'M05',
+ 'M06',
+ 'M07',
+ 'M08',
+ 'M09',
+ 'M10',
+ 'M11',
+ ])
self.assertEqual(len(ds), 11)
for d in ds.values():
self._assert_reflectance_properties(d, with_area=True)
@@ -502,19 +541,20 @@ def test_load_all_m_reflectances_use_nontc(self):
'GMTCO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5',
'GMODO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5',
])
- r.create_filehandlers(loadables, {'use_tc': False})
- ds = r.load(['M01',
- 'M02',
- 'M03',
- 'M04',
- 'M05',
- 'M06',
- 'M07',
- 'M08',
- 'M09',
- 'M10',
- 'M11',
- ])
+ with touch_geo_files("GMTCO", "GMODO") as (geo_fn1, geo_fn2):
+ r.create_filehandlers(loadables, {'use_tc': False})
+ ds = r.load(['M01',
+ 'M02',
+ 'M03',
+ 'M04',
+ 'M05',
+ 'M06',
+ 'M07',
+ 'M08',
+ 'M09',
+ 'M10',
+ 'M11',
+ ])
self.assertEqual(len(ds), 11)
for d in ds.values():
self._assert_reflectance_properties(d, with_area=True)
@@ -541,19 +581,20 @@ def test_load_all_m_reflectances_use_nontc2(self):
'SVM11_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5',
'GMODO_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5',
])
- r.create_filehandlers(loadables, {'use_tc': None})
- ds = r.load(['M01',
- 'M02',
- 'M03',
- 'M04',
- 'M05',
- 'M06',
- 'M07',
- 'M08',
- 'M09',
- 'M10',
- 'M11',
- ])
+ with touch_geo_files("GMODO") as (geo_fn2,):
+ r.create_filehandlers(loadables, {'use_tc': None})
+ ds = r.load(['M01',
+ 'M02',
+ 'M03',
+ 'M04',
+ 'M05',
+ 'M06',
+ 'M07',
+ 'M08',
+ 'M09',
+ 'M10',
+ 'M11',
+ ])
self.assertEqual(len(ds), 11)
for d in ds.values():
self._assert_reflectance_properties(d, with_area=True)
@@ -798,6 +839,7 @@ class FakeHDF5FileHandlerAggr(FakeHDF5FileHandler2):
"""Swap-in HDF5 File Handler with 4 VIIRS Granules per file."""
_num_test_granules = 4
+ _num_scans_per_gran = [48] * 4
class TestAggrVIIRSSDRReader(unittest.TestCase):
@@ -841,3 +883,44 @@ def test_bounding_box(self):
lons, lats = r.file_handlers['generic_file'][0].get_bounding_box()
np.testing.assert_allclose(lons, expected_lons)
np.testing.assert_allclose(lats, expected_lats)
+
+
+class FakeShortHDF5FileHandlerAggr(FakeHDF5FileHandler2):
+ """Fake file that has less scans than usual in a couple granules."""
+
+ _num_test_granules = 3
+ _num_scans_per_gran = [47, 48, 47]
+
+
+class TestShortAggrVIIRSSDRReader(unittest.TestCase):
+ """Test VIIRS SDR Reader with a file that has truncated granules."""
+
+ yaml_file = "viirs_sdr.yaml"
+
+ def setUp(self):
+ """Wrap HDF5 file handler with our own fake handler."""
+ from satpy._config import config_search_paths
+ from satpy.readers.viirs_sdr import VIIRSSDRFileHandler
+ self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file))
+ # http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library
+ self.p = mock.patch.object(VIIRSSDRFileHandler, '__bases__', (FakeShortHDF5FileHandlerAggr,))
+ self.fake_handler = self.p.start()
+ self.p.is_local = True
+
+ def tearDown(self):
+ """Stop wrapping the HDF5 file handler."""
+ self.p.stop()
+
+ def test_load_truncated_band(self):
+ """Test loading a single truncated band."""
+ from satpy.readers import load_reader
+ r = load_reader(self.reader_configs)
+ loadables = r.select_files_from_pathnames([
+ 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5',
+ ])
+ r.create_filehandlers(loadables)
+ ds = r.load(["I01"])
+ self.assertEqual(len(ds), 1)
+ i01_data = ds["I01"].compute()
+ expected_rows = sum(FakeShortHDF5FileHandlerAggr._num_scans_per_gran) * DEFAULT_FILE_SHAPE[0]
+ self.assertEqual(i01_data.shape, (expected_rows, 300))
diff --git a/satpy/tests/reader_tests/test_virr_l1b.py b/satpy/tests/reader_tests/test_virr_l1b.py
index 49f829e0b7..a7a76cafb3 100644
--- a/satpy/tests/reader_tests/test_virr_l1b.py
+++ b/satpy/tests/reader_tests/test_virr_l1b.py
@@ -20,11 +20,12 @@
import unittest
from unittest import mock
-from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler
-import numpy as np
import dask.array as da
+import numpy as np
import xarray as xr
+from satpy.tests.reader_tests.test_hdf5_utils import FakeHDF5FileHandler
+
class FakeHDF5FileHandler2(FakeHDF5FileHandler):
"""Swap-in HDF5 File Handler."""
@@ -89,8 +90,8 @@ class TestVIRRL1BReader(unittest.TestCase):
def setUp(self):
"""Wrap HDF5 file handler with our own fake handler."""
- from satpy.readers.virr_l1b import VIRR_L1B
from satpy._config import config_search_paths
+ from satpy.readers.virr_l1b import VIRR_L1B
self.reader_configs = config_search_paths(os.path.join('readers', self.yaml_file))
# http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library
self.p = mock.patch.object(VIRR_L1B, '__bases__', (FakeHDF5FileHandler2,))
@@ -160,6 +161,7 @@ def _fy3_helper(self, platform_name, reader, Emissive_units):
self.assertEqual(('longitude', 'latitude'), attributes['coordinates'])
self.assertEqual(band_values[dataset['name']],
round(float(np.array(ds[ds.shape[0] // 2][ds.shape[1] // 2])), 6))
+ assert "valid_range" not in ds.attrs
def test_fy3b_file(self):
"""Test that FY3B files are recognized."""
diff --git a/satpy/tests/reader_tests/utils.py b/satpy/tests/reader_tests/utils.py
new file mode 100644
index 0000000000..dd5b09c86a
--- /dev/null
+++ b/satpy/tests/reader_tests/utils.py
@@ -0,0 +1,45 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2022 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Utilities for reader tests."""
+
+
+def default_attr_processor(root, attr):
+ """Do not change the attribute."""
+ return attr
+
+
+def fill_h5(root, contents, attr_processor=default_attr_processor):
+ """Fill hdf5 file with the given contents.
+
+ Args:
+ root: hdf5 file rott
+ contents: Contents to be written into the file
+ attr_processor: A method for modifying attributes before they are
+ written to the file.
+ """
+ for key, val in contents.items():
+ if key in ["value", "attrs"]:
+ continue
+ if "value" in val:
+ root[key] = val["value"]
+ else:
+ grp = root.create_group(key)
+ fill_h5(grp, contents[key])
+ if "attrs" in val:
+ for attr_name, attr_val in val["attrs"].items():
+ root[key].attrs[attr_name] = attr_processor(root, attr_val)
diff --git a/satpy/tests/test_cf_roundtrip.py b/satpy/tests/test_cf_roundtrip.py
new file mode 100644
index 0000000000..d90033de54
--- /dev/null
+++ b/satpy/tests/test_cf_roundtrip.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2017-2019 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Test roundripping the cf writer and reader."""
+
+import os
+
+import numpy as np
+
+from satpy import Scene
+from satpy.tests.reader_tests.test_viirs_compact import fake_dnb, fake_dnb_file # noqa
+
+
+def test_cf_roundtrip(fake_dnb_file, tmp_path): # noqa
+ """Test the cf writing reading cycle."""
+ dnb_filename = os.fspath(fake_dnb_file)
+ write_scn = Scene(filenames=[dnb_filename], reader="viirs_compact")
+ write_scn.load(["DNB"])
+
+ satpy_cf_file = os.fspath(tmp_path / "npp-viirs-20191025061125-20191025061247.nc")
+ write_scn.save_datasets(writer="cf", filename=satpy_cf_file)
+ read_scn = Scene(filenames=[satpy_cf_file], reader="satpy_cf_nc")
+ read_scn.load(["DNB"])
+
+ write_array = write_scn["DNB"]
+ read_array = read_scn["DNB"]
+
+ np.testing.assert_allclose(write_array.values, read_array.values)
diff --git a/satpy/tests/test_compat.py b/satpy/tests/test_compat.py
new file mode 100644
index 0000000000..f084f88e53
--- /dev/null
+++ b/satpy/tests/test_compat.py
@@ -0,0 +1,48 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2022 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Test backports and compatibility fixes."""
+
+import gc
+
+from satpy._compat import CachedPropertyBackport
+
+
+class ClassWithCachedProperty: # noqa
+ def __init__(self, x): # noqa
+ self.x = x
+
+ @CachedPropertyBackport
+ def property(self): # noqa
+ return 2 * self.x
+
+
+def test_cached_property_backport():
+ """Test cached property backport."""
+ c = ClassWithCachedProperty(1)
+ assert c.property == 2
+
+
+def test_cached_property_backport_releases_memory():
+ """Test that cached property backport releases memory."""
+ c1 = ClassWithCachedProperty(2)
+ del c1
+ instances = [
+ obj for obj in gc.get_objects()
+ if isinstance(obj, ClassWithCachedProperty)
+ ]
+ assert len(instances) == 0
diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py
index 83e9474778..cbbc0951c2 100644
--- a/satpy/tests/test_composites.py
+++ b/satpy/tests/test_composites.py
@@ -27,6 +27,9 @@
import numpy as np
import pytest
import xarray as xr
+from pyresample import AreaDefinition
+
+import satpy
class TestMatchDataArrays(unittest.TestCase):
@@ -51,7 +54,7 @@ def test_single_ds(self):
ds1 = self._get_test_ds()
comp = CompositeBase('test_comp')
ret_datasets = comp.match_data_arrays((ds1,))
- self.assertIs(ret_datasets[0], ds1)
+ assert ret_datasets[0].identical(ds1)
def test_mult_ds_area(self):
"""Test multiple datasets successfully pass."""
@@ -60,8 +63,8 @@ def test_mult_ds_area(self):
ds2 = self._get_test_ds()
comp = CompositeBase('test_comp')
ret_datasets = comp.match_data_arrays((ds1, ds2))
- self.assertIs(ret_datasets[0], ds1)
- self.assertIs(ret_datasets[1], ds2)
+ assert ret_datasets[0].identical(ds1)
+ assert ret_datasets[1].identical(ds2)
def test_mult_ds_no_area(self):
"""Test that all datasets must have an area attribute."""
@@ -74,8 +77,9 @@ def test_mult_ds_no_area(self):
def test_mult_ds_diff_area(self):
"""Test that datasets with different areas fail."""
- from satpy.composites import CompositeBase, IncompatibleAreas
from pyresample.geometry import AreaDefinition
+
+ from satpy.composites import CompositeBase, IncompatibleAreas
ds1 = self._get_test_ds()
ds2 = self._get_test_ds()
ds2.attrs['area'] = AreaDefinition(
@@ -90,18 +94,20 @@ def test_mult_ds_diff_area(self):
def test_mult_ds_diff_dims(self):
"""Test that datasets with different dimensions still pass."""
from satpy.composites import CompositeBase
+
# x is still 50, y is still 100, even though they are in
# different order
ds1 = self._get_test_ds(shape=(50, 100), dims=('y', 'x'))
ds2 = self._get_test_ds(shape=(3, 100, 50), dims=('bands', 'x', 'y'))
comp = CompositeBase('test_comp')
ret_datasets = comp.match_data_arrays((ds1, ds2))
- self.assertIs(ret_datasets[0], ds1)
- self.assertIs(ret_datasets[1], ds2)
+ assert ret_datasets[0].identical(ds1)
+ assert ret_datasets[1].identical(ds2)
def test_mult_ds_diff_size(self):
"""Test that datasets with different sizes fail."""
from satpy.composites import CompositeBase, IncompatibleAreas
+
# x is 50 in this one, 100 in ds2
# y is 100 in this one, 50 in ds2
ds1 = self._get_test_ds(shape=(50, 100), dims=('x', 'y'))
@@ -132,8 +138,12 @@ def setUp(self):
'start_time': datetime(2018, 1, 1, 18),
'modifiers': tuple(),
'resolution': 1000,
+ 'calibration': 'reflectance',
+ 'units': '%',
'name': 'test_vis'}
- ds1 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64),
+ low_res_data = np.ones((2, 2), dtype=np.float64) + 4
+ low_res_data[1, 1] = 0.0 # produces infinite ratio
+ ds1 = xr.DataArray(da.from_array(low_res_data, chunks=2),
attrs=attrs, dims=('y', 'x'),
coords={'y': [0, 1], 'x': [0, 1]})
self.ds1 = ds1
@@ -147,15 +157,19 @@ def setUp(self):
coords={'y': [0, 1], 'x': [0, 1]})
ds3.attrs['name'] += '3'
self.ds3 = ds3
- ds4 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64) + 4,
+
+ # high resolution version
+ high_res_data = np.ones((2, 2), dtype=np.float64)
+ high_res_data[1, 0] = np.nan # invalid value in one band
+ ds4 = xr.DataArray(da.from_array(high_res_data, chunks=2),
attrs=attrs, dims=('y', 'x'),
coords={'y': [0, 1], 'x': [0, 1]})
ds4.attrs['name'] += '4'
ds4.attrs['resolution'] = 500
self.ds4 = ds4
- # high res version
- ds4 = xr.DataArray(da.ones((4, 4), chunks=2, dtype=np.float64) + 4,
+ # high resolution version - but too big
+ ds4 = xr.DataArray(da.ones((4, 4), chunks=2, dtype=np.float64),
attrs=attrs.copy(), dims=('y', 'x'),
coords={'y': [0, 1, 2, 3], 'x': [0, 1, 2, 3]})
ds4.attrs['name'] += '4'
@@ -172,8 +186,8 @@ def test_bad_color(self):
self.assertRaises(ValueError, RatioSharpenedRGB, name='true_color', high_resolution_band='bad')
def test_match_data_arrays(self):
- """Test that all of the areas have to be the same resolution."""
- from satpy.composites import RatioSharpenedRGB, IncompatibleAreas
+ """Test that all areas have to be the same resolution."""
+ from satpy.composites import IncompatibleAreas, RatioSharpenedRGB
comp = RatioSharpenedRGB(name='true_color')
self.assertRaises(IncompatibleAreas, comp, (self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4_big,))
@@ -206,8 +220,8 @@ def test_basic_red(self):
res = res.values
self.assertEqual(res.shape, (3, 2, 2))
np.testing.assert_allclose(res[0], self.ds4.values)
- np.testing.assert_allclose(res[1], np.array([[4.5, 4.5], [4.5, 4.5]], dtype=np.float64))
- np.testing.assert_allclose(res[2], np.array([[6, 6], [6, 6]], dtype=np.float64))
+ np.testing.assert_allclose(res[1], np.array([[0.6, 0.6], [np.nan, 3.0]], dtype=np.float64))
+ np.testing.assert_allclose(res[2], np.array([[0.8, 0.8], [np.nan, 4.0]], dtype=np.float64))
def test_self_sharpened_no_high_res(self):
"""Test for exception when no high res band is specified."""
@@ -223,8 +237,15 @@ def test_self_sharpened_basic(self):
res = res.values
self.assertEqual(res.shape, (3, 2, 2))
np.testing.assert_allclose(res[0], self.ds1.values)
- np.testing.assert_allclose(res[1], np.array([[3, 3], [3, 3]], dtype=np.float64))
- np.testing.assert_allclose(res[2], np.array([[4, 4], [4, 4]], dtype=np.float64))
+ np.testing.assert_allclose(res[1], np.array([[4, 4], [4, 0]], dtype=np.float64))
+ np.testing.assert_allclose(res[2], np.array([[5.333333, 5.333333], [5.333333, 0]], dtype=np.float64))
+
+ def test_no_units(self):
+ """Test that the computed RGB has no units attribute."""
+ from satpy.composites import RatioSharpenedRGB
+ comp = RatioSharpenedRGB(name='true_color')
+ res = comp((self.ds1, self.ds2, self.ds3))
+ assert "units" not in res.attrs
class TestDifferenceCompositor(unittest.TestCase):
@@ -266,8 +287,8 @@ def setUp(self):
def test_basic_diff(self):
"""Test that a basic difference composite works."""
from satpy.composites import DifferenceCompositor
- comp = DifferenceCompositor(name='diff')
- res = comp((self.ds1, self.ds2), standard_name='temperature_difference')
+ comp = DifferenceCompositor(name='diff', standard_name='temperature_difference')
+ res = comp((self.ds1, self.ds2))
np.testing.assert_allclose(res.values, -2)
assert res.attrs.get('standard_name') == 'temperature_difference'
@@ -281,6 +302,39 @@ def test_bad_areas_diff(self):
self.assertRaises(IncompatibleAreas, comp, (self.ds1, self.ds2_big))
+@pytest.fixture
+def fake_area():
+ """Return a fake 2×2 area."""
+ from pyresample.geometry import create_area_def
+ return create_area_def("skierffe", 4087, area_extent=[-5_000, -5_000, 5_000, 5_000], shape=(2, 2))
+
+
+@pytest.fixture
+def fake_dataset_pair(fake_area):
+ """Return a fake pair of 2×2 datasets."""
+ ds1 = xr.DataArray(
+ da.full((2, 2), 8, chunks=2, dtype=np.float32), attrs={"area": fake_area})
+ ds2 = xr.DataArray(
+ da.full((2, 2), 4, chunks=2, dtype=np.float32), attrs={"area": fake_area})
+ return (ds1, ds2)
+
+
+def test_ratio_compositor(fake_dataset_pair):
+ """Test the ratio compositor."""
+ from satpy.composites import RatioCompositor
+ comp = RatioCompositor(name="ratio", standard_name="channel_ratio")
+ res = comp(fake_dataset_pair)
+ np.testing.assert_allclose(res.values, 2)
+
+
+def test_sum_compositor(fake_dataset_pair):
+ """Test the sum compositor."""
+ from satpy.composites import SumCompositor
+ comp = SumCompositor(name="sum", standard_name="channel_sum")
+ res = comp(fake_dataset_pair)
+ np.testing.assert_allclose(res.values, 12)
+
+
class TestDayNightCompositor(unittest.TestCase):
"""Test DayNightCompositor."""
@@ -312,35 +366,71 @@ def setUp(self):
self.sza = xr.DataArray(sza, dims=('y', 'x'))
# fake area
- my_area = mock.MagicMock()
- lons = np.array([[-95., -94.], [-93., -92.]])
- lons = da.from_array(lons, lons.shape)
- lats = np.array([[40., 41.], [42., 43.]])
- lats = da.from_array(lats, lats.shape)
- my_area.get_lonlats.return_value = (lons, lats)
+ my_area = AreaDefinition(
+ "test", "", "",
+ "+proj=longlat",
+ 2, 2,
+ (-95.0, 40.0, -92.0, 43.0),
+ )
self.data_a.attrs['area'] = my_area
self.data_b.attrs['area'] = my_area
# not used except to check that it matches the data arrays
self.sza.attrs['area'] = my_area
- def test_basic_sza(self):
- """Test compositor when SZA data is included."""
+ def test_daynight_sza(self):
+ """Test compositor with both day and night portions when SZA data is included."""
from satpy.composites import DayNightCompositor
- comp = DayNightCompositor(name='dn_test')
+ comp = DayNightCompositor(name='dn_test', day_night="day_night")
res = comp((self.data_a, self.data_b, self.sza))
res = res.compute()
expected = np.array([[0., 0.22122352], [0.5, 1.]])
np.testing.assert_allclose(res.values[0], expected)
- def test_basic_area(self):
- """Test compositor when SZA data is not provided."""
+ def test_daynight_area(self):
+ """Test compositor both day and night portions when SZA data is not provided."""
from satpy.composites import DayNightCompositor
- comp = DayNightCompositor(name='dn_test')
+ comp = DayNightCompositor(name='dn_test', day_night="day_night")
res = comp((self.data_a, self.data_b))
res = res.compute()
expected = np.array([[0., 0.33164983], [0.66835017, 1.]])
np.testing.assert_allclose(res.values[0], expected)
+ def test_night_only_sza(self):
+ """Test compositor with night portion when SZA data is included."""
+ from satpy.composites import DayNightCompositor
+ comp = DayNightCompositor(name='dn_test', day_night="night_only")
+ res = comp((self.data_b, self.sza))
+ res = res.compute()
+ expected = np.array([[np.nan, 0.], [0.5, 1.]])
+ np.testing.assert_allclose(res.values[0], expected)
+
+ def test_night_only_area(self):
+ """Test compositor with night portion when SZA data is not provided."""
+ from satpy.composites import DayNightCompositor
+ comp = DayNightCompositor(name='dn_test', day_night="night_only")
+ res = comp((self.data_b))
+ res = res.compute()
+ expected = np.array([[np.nan, 0.], [0., 0.]])
+ np.testing.assert_allclose(res.values[0], expected)
+
+ def test_day_only_sza(self):
+ """Test compositor with day portion when SZA data is included."""
+ from satpy.composites import DayNightCompositor
+ comp = DayNightCompositor(name='dn_test', day_night="day_only")
+ res = comp((self.data_a, self.sza))
+ res = res.compute()
+ expected = np.array([[0., 0.22122352], [0., 0.]])
+ np.testing.assert_allclose(res.values[0], expected)
+
+ def test_day_only_area(self):
+ """Test compositor with day portion when SZA data is not provided."""
+ from satpy.composites import DayNightCompositor
+ comp = DayNightCompositor(name='dn_test', day_night="day_only")
+ res = comp((self.data_a))
+ res = res.compute()
+ expected = np.array([[0., 0.33164983], [0.66835017, 1.]])
+ np.testing.assert_allclose(res.values[0], expected)
+
class TestFillingCompositor(unittest.TestCase):
"""Test case for the filling compositor."""
@@ -366,14 +456,27 @@ def test_fill(self):
"""Test filling."""
from satpy.composites import MultiFiller
comp = MultiFiller(name='fill_test')
- a = xr.DataArray(np.array([1, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan]))
- b = xr.DataArray(np.array([np.nan, 2, 3, np.nan, np.nan, np.nan, np.nan]))
- c = xr.DataArray(np.array([np.nan, 22, 3, np.nan, np.nan, np.nan, 7]))
- d = xr.DataArray(np.array([np.nan, np.nan, np.nan, np.nan, np.nan, 6, np.nan]))
- e = xr.DataArray(np.array([np.nan, np.nan, np.nan, np.nan, 5, np.nan, np.nan]))
+ attrs = {"units": "K"}
+ a = xr.DataArray(
+ np.array([1, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan]),
+ attrs=attrs.copy())
+ b = xr.DataArray(
+ np.array([np.nan, 2, 3, np.nan, np.nan, np.nan, np.nan]),
+ attrs=attrs.copy())
+ c = xr.DataArray(
+ np.array([np.nan, 22, 3, np.nan, np.nan, np.nan, 7]),
+ attrs=attrs.copy())
+ d = xr.DataArray(
+ np.array([np.nan, np.nan, np.nan, np.nan, np.nan, 6, np.nan]),
+ attrs=attrs.copy())
+ e = xr.DataArray(
+ np.array([np.nan, np.nan, np.nan, np.nan, 5, np.nan, np.nan]),
+ attrs=attrs.copy())
expected = xr.DataArray(np.array([1, 2, 3, np.nan, 5, 6, 7]))
res = comp([a, b, c], optional_datasets=[d, e])
np.testing.assert_allclose(res.data, expected.data)
+ assert "units" in res.attrs
+ assert res.attrs["units"] == "K"
class TestLuminanceSharpeningCompositor(unittest.TestCase):
@@ -409,16 +512,25 @@ def test_compositor(self):
np.testing.assert_allclose(res.data, 0.0, atol=1e-9)
-class TestSandwichCompositor(unittest.TestCase):
+class TestSandwichCompositor:
"""Test sandwich compositor."""
+ # Test RGB and RGBA
+ @pytest.mark.parametrize(
+ "input_shape,bands",
+ [
+ ((3, 2, 2), ['R', 'G', 'B']),
+ ((4, 2, 2), ['R', 'G', 'B', 'A'])
+ ]
+ )
@mock.patch('satpy.composites.enhance2dataset')
- def test_compositor(self, e2d):
+ def test_compositor(self, e2d, input_shape, bands):
"""Test luminance sharpening compositor."""
from satpy.composites import SandwichCompositor
- rgb_arr = da.from_array(np.random.random((3, 2, 2)), chunks=2)
- rgb = xr.DataArray(rgb_arr, dims=['bands', 'y', 'x'])
+ rgb_arr = da.from_array(np.random.random(input_shape), chunks=2)
+ rgb = xr.DataArray(rgb_arr, dims=['bands', 'y', 'x'],
+ coords={'bands': bands})
lum_arr = da.from_array(100 * np.random.random((2, 2)), chunks=2)
lum = xr.DataArray(lum_arr, dims=['y', 'x'])
@@ -428,9 +540,17 @@ def test_compositor(self, e2d):
res = comp([lum, rgb])
- for i in range(3):
- np.testing.assert_allclose(res.data[i, :, :],
- rgb_arr[i, :, :] * lum_arr / 100.)
+ for band in rgb:
+ if band.bands != 'A':
+ # Check compositor has modified this band
+ np.testing.assert_allclose(res.loc[band.bands].to_numpy(),
+ band.to_numpy() * lum_arr / 100.)
+ else:
+ # Check Alpha band remains intact
+ np.testing.assert_allclose(res.loc[band.bands].to_numpy(),
+ band.to_numpy())
+ # make sure the compositor doesn't modify the input data
+ np.testing.assert_allclose(lum.values, lum_arr.compute())
class TestInlineComposites(unittest.TestCase):
@@ -438,10 +558,8 @@ class TestInlineComposites(unittest.TestCase):
def test_inline_composites(self):
"""Test that inline composites are working."""
- from satpy.composites.config_loader import CompositorLoader
- cl_ = CompositorLoader()
- cl_.load_sensor_composites('visir')
- comps = cl_.compositors
+ from satpy.composites.config_loader import load_compositor_configs_for_sensors
+ comps = load_compositor_configs_for_sensors(['visir'])[0]
# Check that "fog" product has all its prerequisites defined
keys = comps['visir'].keys()
fog = [comps['visir'][dsid] for dsid in keys if "fog" == dsid['name']][0]
@@ -460,9 +578,7 @@ def test_inline_composites(self):
# Check the same for SEVIRI and verify channel names are used
# in the sub-composite dependencies instead of wavelengths
- cl_ = CompositorLoader()
- cl_.load_sensor_composites('seviri')
- comps = cl_.compositors
+ comps = load_compositor_configs_for_sensors(['seviri'])[0]
keys = comps['seviri'].keys()
fog_dep_ids = [dsid for dsid in keys if "fog_dep" in dsid['name']]
self.assertEqual(comps['seviri'][fog_dep_ids[0]].attrs['prerequisites'],
@@ -702,6 +818,38 @@ def test_call(self):
self.assertEqual(res.attrs['resolution'], 333)
+class TestCategoricalDataCompositor(unittest.TestCase):
+ """Test composiotor for recategorization of categorical data."""
+
+ def setUp(self):
+ """Create test data."""
+ attrs = {'name': 'foo'}
+ data = xr.DataArray(da.from_array([[2., 1.], [3., 0.]]), attrs=attrs,
+ dims=('y', 'x'), coords={'y': [0, 1], 'x': [0, 1]})
+
+ self.data = data
+
+ def test_basic_recategorization(self):
+ """Test general functionality of compositor incl. attributes."""
+ from satpy.composites import CategoricalDataCompositor
+ lut = [np.nan, 0, 1, 1]
+ name = 'bar'
+ comp = CategoricalDataCompositor(name=name, lut=lut)
+ res = comp([self.data])
+ res = res.compute()
+ expected = np.array([[1., 0.], [1., np.nan]])
+ np.testing.assert_equal(res.values, expected)
+ np.testing.assert_equal(res.attrs['name'], name)
+ np.testing.assert_equal(res.attrs['composite_lut'], lut)
+
+ def test_too_many_datasets(self):
+ """Test that ValueError is raised if more than one dataset is provided."""
+ from satpy.composites import CategoricalDataCompositor
+ lut = [np.nan, 0, 1, 1]
+ comp = CategoricalDataCompositor(name='foo', lut=lut)
+ np.testing.assert_raises(ValueError, comp, [self.data, self.data])
+
+
class TestGenericCompositor(unittest.TestCase):
"""Test generic compositor."""
@@ -931,7 +1079,6 @@ def test_init(self, get_area_def):
@mock.patch('satpy.Scene')
def test_call(self, Scene, register, retrieve): # noqa
"""Test the static compositing."""
- import satpy
from satpy.composites import StaticImageCompositor
satpy.config.set(data_dir=os.path.join(os.path.sep, 'path', 'to', 'image'))
@@ -1008,126 +1155,159 @@ def _enhance2dataset(dataset, convert_p=False):
return dataset
-class TestBackgroundCompositor(unittest.TestCase):
+class TestBackgroundCompositor:
"""Test case for the background compositor."""
+ @classmethod
+ def setup_class(cls):
+ """Create shared input data arrays."""
+ foreground_data = {
+ "L": np.array([[[1., 0.5], [0., np.nan]]]),
+ "LA": np.array([[[1., 0.5], [0., np.nan]], [[0.5, 0.5], [0.5, 0.5]]]),
+ "RGB": np.array([
+ [[1., 0.5], [0., np.nan]],
+ [[1., 0.5], [0., np.nan]],
+ [[1., 0.5], [0., np.nan]]]),
+ "RGBA": np.array([
+ [[1.0, 0.5], [0.0, np.nan]],
+ [[1.0, 0.5], [0.0, np.nan]],
+ [[1.0, 0.5], [0.0, np.nan]],
+ [[0.5, 0.5], [0.5, 0.5]]]),
+ }
+ cls.foreground_data = foreground_data
+
@mock.patch('satpy.composites.enhance2dataset', _enhance2dataset)
- def test_call(self):
+ @pytest.mark.parametrize(
+ ('foreground_bands', 'background_bands', 'exp_bands', 'exp_result'),
+ [
+ ('L', 'L', 'L', np.array([[1.0, 0.5], [0.0, 1.0]])),
+ ('LA', 'LA', 'L', np.array([[1.0, 0.75], [0.5, 1.0]])),
+ ('RGB', 'RGB', 'RGB', np.array([
+ [[1., 0.5], [0., 1.]],
+ [[1., 0.5], [0., 1.]],
+ [[1., 0.5], [0., 1.]]])),
+ ('RGBA', 'RGBA', 'RGB', np.array([
+ [[1., 0.75], [0.5, 1.]],
+ [[1., 0.75], [0.5, 1.]],
+ [[1., 0.75], [0.5, 1.]]])),
+ ('RGBA', 'RGB', 'RGB', np.array([
+ [[1., 0.75], [0.5, 1.]],
+ [[1., 0.75], [0.5, 1.]],
+ [[1., 0.75], [0.5, 1.]]])),
+ ]
+ )
+ def test_call(self, foreground_bands, background_bands, exp_bands, exp_result):
"""Test the background compositing."""
from satpy.composites import BackgroundCompositor
- import numpy as np
comp = BackgroundCompositor("name")
# L mode images
- attrs = {'mode': 'L', 'area': 'foo'}
- foreground = xr.DataArray(np.array([[[1., 0.5],
- [0., np.nan]]]),
- dims=('bands', 'y', 'x'),
- coords={'bands': [c for c in attrs['mode']]},
- attrs=attrs)
- background = xr.DataArray(np.ones((1, 2, 2)), dims=('bands', 'y', 'x'),
- coords={'bands': [c for c in attrs['mode']]},
- attrs=attrs)
- res = comp([foreground, background])
- self.assertEqual(res.attrs['area'], 'foo')
- self.assertTrue(np.all(res == np.array([[1., 0.5], [0., 1.]])))
- self.assertEqual(res.attrs['mode'], 'L')
-
- # LA mode images
- attrs = {'mode': 'LA', 'area': 'foo'}
- foreground = xr.DataArray(np.array([[[1., 0.5],
- [0., np.nan]],
- [[0.5, 0.5],
- [0.5, 0.5]]]),
+ foreground_data = self.foreground_data[foreground_bands]
+ attrs = {'mode': foreground_bands, 'area': 'foo'}
+ foreground = xr.DataArray(da.from_array(foreground_data),
dims=('bands', 'y', 'x'),
coords={'bands': [c for c in attrs['mode']]},
attrs=attrs)
- background = xr.DataArray(np.ones((2, 2, 2)), dims=('bands', 'y', 'x'),
+ attrs = {'mode': background_bands, 'area': 'foo'}
+ background = xr.DataArray(da.ones((len(background_bands), 2, 2)), dims=('bands', 'y', 'x'),
coords={'bands': [c for c in attrs['mode']]},
attrs=attrs)
res = comp([foreground, background])
- self.assertTrue(np.all(res == np.array([[1., 0.75], [0.5, 1.]])))
- self.assertEqual(res.attrs['mode'], 'LA')
-
- # RGB mode images
- attrs = {'mode': 'RGB', 'area': 'foo'}
- foreground = xr.DataArray(np.array([[[1., 0.5],
- [0., np.nan]],
- [[1., 0.5],
- [0., np.nan]],
- [[1., 0.5],
- [0., np.nan]]]),
- dims=('bands', 'y', 'x'),
- coords={'bands': [c for c in attrs['mode']]},
- attrs=attrs)
- background = xr.DataArray(np.ones((3, 2, 2)), dims=('bands', 'y', 'x'),
- coords={'bands': [c for c in attrs['mode']]},
- attrs=attrs)
-
- res = comp([foreground, background])
- self.assertTrue(np.all(res == np.array([[[1., 0.5], [0., 1.]],
- [[1., 0.5], [0., 1.]],
- [[1., 0.5], [0., 1.]]])))
- self.assertEqual(res.attrs['mode'], 'RGB')
-
- # RGBA mode images
- attrs = {'mode': 'RGBA', 'area': 'foo'}
- foreground = xr.DataArray(np.array([[[1., 0.5],
- [0., np.nan]],
- [[1., 0.5],
- [0., np.nan]],
- [[1., 0.5],
- [0., np.nan]],
- [[0.5, 0.5],
- [0.5, 0.5]]]),
- dims=('bands', 'y', 'x'),
- coords={'bands': [c for c in attrs['mode']]},
- attrs=attrs)
- background = xr.DataArray(np.ones((4, 2, 2)), dims=('bands', 'y', 'x'),
- coords={'bands': [c for c in attrs['mode']]},
- attrs=attrs)
-
- res = comp([foreground, background])
- self.assertTrue(np.all(res == np.array([[[1., 0.75], [0.5, 1.]],
- [[1., 0.75], [0.5, 1.]],
- [[1., 0.75], [0.5, 1.]]])))
- self.assertEqual(res.attrs['mode'], 'RGBA')
+ assert res.attrs['area'] == 'foo'
+ np.testing.assert_allclose(res, exp_result)
+ assert res.attrs['mode'] == exp_bands
@mock.patch('satpy.composites.enhance2dataset', _enhance2dataset)
def test_multiple_sensors(self):
"""Test the background compositing from multiple sensor data."""
from satpy.composites import BackgroundCompositor
- import numpy as np
comp = BackgroundCompositor("name")
# L mode images
attrs = {'mode': 'L', 'area': 'foo'}
- foreground = xr.DataArray(np.array([[[1., 0.5],
- [0., np.nan]]]),
+ foreground_data = self.foreground_data["L"]
+ foreground = xr.DataArray(da.from_array(foreground_data),
dims=('bands', 'y', 'x'),
coords={'bands': [c for c in attrs['mode']]},
attrs=attrs.copy())
foreground.attrs['sensor'] = 'abi'
- background = xr.DataArray(np.ones((1, 2, 2)), dims=('bands', 'y', 'x'),
+ background = xr.DataArray(da.ones((1, 2, 2)), dims=('bands', 'y', 'x'),
coords={'bands': [c for c in attrs['mode']]},
attrs=attrs.copy())
background.attrs['sensor'] = 'glm'
res = comp([foreground, background])
- self.assertEqual(res.attrs['area'], 'foo')
- self.assertTrue(np.all(res == np.array([[1., 0.5], [0., 1.]])))
- self.assertEqual(res.attrs['mode'], 'L')
- self.assertEqual(res.attrs['sensor'], {'abi', 'glm'})
+ assert res.attrs['area'] == 'foo'
+ np.testing.assert_allclose(res, np.array([[1., 0.5], [0., 1.]]))
+ assert res.attrs['mode'] == 'L'
+ assert res.attrs['sensor'] == {'abi', 'glm'}
-class TestMaskingCompositor(unittest.TestCase):
+class TestMaskingCompositor:
"""Test case for the simple masking compositor."""
+ @pytest.fixture
+ def conditions_v1(self):
+ """Masking conditions with string values."""
+ return [{'method': 'equal',
+ 'value': 'Cloud-free_land',
+ 'transparency': 100},
+ {'method': 'equal',
+ 'value': 'Cloud-free_sea',
+ 'transparency': 50}]
+
+ @pytest.fixture
+ def conditions_v2(self):
+ """Masking conditions with numerical values."""
+ return [{'method': 'equal',
+ 'value': 1,
+ 'transparency': 100},
+ {'method': 'equal',
+ 'value': 2,
+ 'transparency': 50}]
+
+ @pytest.fixture
+ def test_data(self):
+ """Test data to use with masking compositors."""
+ return xr.DataArray(da.random.random((3, 3)), dims=['y', 'x'])
+
+ @pytest.fixture
+ def test_ct_data(self):
+ """Test 2D CT data array."""
+ flag_meanings = ['Cloud-free_land', 'Cloud-free_sea']
+ flag_values = da.array([1, 2])
+ ct_data = da.array([[1, 2, 2],
+ [2, 1, 2],
+ [2, 2, 1]])
+ ct_data = xr.DataArray(ct_data, dims=['y', 'x'])
+ ct_data.attrs['flag_meanings'] = flag_meanings
+ ct_data.attrs['flag_values'] = flag_values
+ return ct_data
+
+ @pytest.fixture
+ def test_ct_data_v3(self, test_ct_data):
+ """Set ct data to NaN where it originally is 1."""
+ return test_ct_data.where(test_ct_data == 1)
+
+ @pytest.fixture
+ def reference_data(self, test_data, test_ct_data):
+ """Get reference data to use in masking compositor tests."""
+ # The data are set to NaN where ct is `1`
+ return test_data.where(test_ct_data > 1)
+
+ @pytest.fixture
+ def reference_alpha(self):
+ """Get reference alpha to use in masking compositor tests."""
+ ref_alpha = da.array([[0, 0.5, 0.5],
+ [0.5, 0, 0.5],
+ [0.5, 0.5, 0]])
+ return xr.DataArray(ref_alpha, dims=['y', 'x'])
+
def test_init(self):
"""Test the initializiation of compositor."""
from satpy.composites import MaskingCompositor
# No transparency or conditions given raises ValueError
- with self.assertRaises(ValueError):
+ with pytest.raises(ValueError):
comp = MaskingCompositor("name")
# transparency defined
@@ -1164,102 +1344,94 @@ def test_get_flag_value(self):
assert _get_flag_value(mask, 'Cloud-free_land') == 1
assert _get_flag_value(mask, 'Cloud-free_sea') == 2
- def test_call(self):
- """Test call the compositor."""
+ @pytest.mark.parametrize("mode", ["LA", "RGBA"])
+ def test_call_numerical_transparency_data(
+ self, conditions_v1, test_data, test_ct_data, reference_data,
+ reference_alpha, mode):
+ """Test call the compositor with numerical transparency data.
+
+ Use parameterisation to test different image modes.
+ """
from satpy.composites import MaskingCompositor
from satpy.tests.utils import CustomScheduler
- flag_meanings = ['Cloud-free_land', 'Cloud-free_sea']
- flag_meanings_str = 'Cloud-free_land Cloud-free_sea'
- flag_values = da.array([1, 2])
- conditions_v1 = [{'method': 'equal',
- 'value': 'Cloud-free_land',
- 'transparency': 100},
- {'method': 'equal',
- 'value': 'Cloud-free_sea',
- 'transparency': 50}]
- conditions_v2 = [{'method': 'equal',
- 'value': 1,
- 'transparency': 100},
- {'method': 'equal',
- 'value': 2,
- 'transparency': 50}]
- conditions_v3 = [{'method': 'isnan',
- 'transparency': 100}]
- conditions_v4 = [{'method': 'absolute_import',
- 'transparency': 'satpy.resample'}]
-
- # 2D data array
- data = xr.DataArray(da.random.random((3, 3)), dims=['y', 'x'])
-
- # 2D CT data array
- ct_data = da.array([[1, 2, 2],
- [2, 1, 2],
- [2, 2, 1]])
- ct_data = xr.DataArray(ct_data, dims=['y', 'x'])
- ct_data.attrs['flag_meanings'] = flag_meanings
- ct_data.attrs['flag_values'] = flag_values
-
- reference_alpha = da.array([[0, 0.5, 0.5],
- [0.5, 0, 0.5],
- [0.5, 0.5, 0]])
- reference_alpha = xr.DataArray(reference_alpha, dims=['y', 'x'])
- # The data are set to NaN where ct is `1`
- reference_data = data.where(ct_data > 1)
-
- reference_alpha_v3 = da.array([[1., 0., 0.],
- [0., 1., 0.],
- [0., 0., 1.]])
- reference_alpha_v3 = xr.DataArray(reference_alpha_v3, dims=['y', 'x'])
- # The data are set to NaN where ct is NaN
- reference_data_v3 = data.where(ct_data == 1)
-
# Test with numerical transparency data
with dask.config.set(scheduler=CustomScheduler(max_computes=0)):
- comp = MaskingCompositor("name", conditions=conditions_v1)
- res = comp([data, ct_data])
- self.assertEqual(res.mode, 'LA')
- np.testing.assert_allclose(res.sel(bands='L'), reference_data)
+ comp = MaskingCompositor("name", conditions=conditions_v1,
+ mode=mode)
+ res = comp([test_data, test_ct_data])
+ assert res.mode == mode
+ for m in mode.rstrip("A"):
+ np.testing.assert_allclose(res.sel(bands=m), reference_data)
np.testing.assert_allclose(res.sel(bands='A'), reference_alpha)
- # Test with named fields
+ def test_call_named_fields(self, conditions_v2, test_data, test_ct_data,
+ reference_data, reference_alpha):
+ """Test with named fields."""
+ from satpy.composites import MaskingCompositor
+ from satpy.tests.utils import CustomScheduler
+
with dask.config.set(scheduler=CustomScheduler(max_computes=0)):
comp = MaskingCompositor("name", conditions=conditions_v2)
- res = comp([data, ct_data])
- self.assertEqual(res.mode, 'LA')
+ res = comp([test_data, test_ct_data])
+ assert res.mode == "LA"
np.testing.assert_allclose(res.sel(bands='L'), reference_data)
np.testing.assert_allclose(res.sel(bands='A'), reference_alpha)
- # Test with named fields which are as a string in the mask attributes
- ct_data.attrs['flag_meanings'] = flag_meanings_str
+ def test_call_named_fields_string(
+ self, conditions_v2, test_data, test_ct_data, reference_data,
+ reference_alpha):
+ """Test with named fields which are as a string in the mask attributes."""
+ from satpy.composites import MaskingCompositor
+ from satpy.tests.utils import CustomScheduler
+
+ flag_meanings_str = 'Cloud-free_land Cloud-free_sea'
+ test_ct_data.attrs['flag_meanings'] = flag_meanings_str
with dask.config.set(scheduler=CustomScheduler(max_computes=0)):
comp = MaskingCompositor("name", conditions=conditions_v2)
- res = comp([data, ct_data])
- self.assertEqual(res.mode, 'LA')
+ res = comp([test_data, test_ct_data])
+ assert res.mode == "LA"
np.testing.assert_allclose(res.sel(bands='L'), reference_data)
np.testing.assert_allclose(res.sel(bands='A'), reference_alpha)
- # Test "isnan" as method
- # Set ct data to NaN where it originally is 1
- ct_data_v3 = ct_data.where(ct_data == 1)
+ def test_method_isnan(self, test_data,
+ test_ct_data, test_ct_data_v3):
+ """Test "isnan" as method."""
+ from satpy.composites import MaskingCompositor
+ from satpy.tests.utils import CustomScheduler
+
+ conditions_v3 = [{'method': 'isnan', 'transparency': 100}]
+
+ # The data are set to NaN where ct is NaN
+ reference_data_v3 = test_data.where(test_ct_data == 1)
+ reference_alpha_v3 = da.array([[1., 0., 0.],
+ [0., 1., 0.],
+ [0., 0., 1.]])
+ reference_alpha_v3 = xr.DataArray(reference_alpha_v3, dims=['y', 'x'])
with dask.config.set(scheduler=CustomScheduler(max_computes=0)):
comp = MaskingCompositor("name", conditions=conditions_v3)
- res = comp([data, ct_data_v3])
- self.assertEqual(res.mode, 'LA')
+ res = comp([test_data, test_ct_data_v3])
+ assert res.mode == "LA"
np.testing.assert_allclose(res.sel(bands='L'), reference_data_v3)
np.testing.assert_allclose(res.sel(bands='A'), reference_alpha_v3)
- # Test "absolute_import" as method
+ def test_method_absolute_import(self, test_data, test_ct_data_v3):
+ """Test "absolute_import" as method."""
+ from satpy.composites import MaskingCompositor
+ from satpy.tests.utils import CustomScheduler
+
+ conditions_v4 = [{'method': 'absolute_import', 'transparency': 'satpy.resample'}]
# This should raise AttributeError
with dask.config.set(scheduler=CustomScheduler(max_computes=0)):
comp = MaskingCompositor("name", conditions=conditions_v4)
- try:
- res = comp([data, ct_data_v3])
- raise ValueError("Tried to use 'np.absolute_import'")
- except AttributeError:
- pass
+ with pytest.raises(AttributeError):
+ comp([test_data, test_ct_data_v3])
+
+ def test_rgb_dataset(self, conditions_v1, test_ct_data, reference_alpha):
+ """Test RGB dataset."""
+ from satpy.composites import MaskingCompositor
+ from satpy.tests.utils import CustomScheduler
- # Test RGB dataset
# 3D data array
data = xr.DataArray(da.random.random((3, 3, 3)),
dims=['bands', 'y', 'x'],
@@ -1269,17 +1441,20 @@ def test_call(self):
with dask.config.set(scheduler=CustomScheduler(max_computes=0)):
comp = MaskingCompositor("name", conditions=conditions_v1)
- res = comp([data, ct_data])
- self.assertEqual(res.mode, 'RGBA')
+ res = comp([data, test_ct_data])
+ assert res.mode == "RGBA"
np.testing.assert_allclose(res.sel(bands='R'),
- data.sel(bands='R').where(ct_data > 1))
+ data.sel(bands='R').where(test_ct_data > 1))
np.testing.assert_allclose(res.sel(bands='G'),
- data.sel(bands='G').where(ct_data > 1))
+ data.sel(bands='G').where(test_ct_data > 1))
np.testing.assert_allclose(res.sel(bands='B'),
- data.sel(bands='B').where(ct_data > 1))
+ data.sel(bands='B').where(test_ct_data > 1))
np.testing.assert_allclose(res.sel(bands='A'), reference_alpha)
- # Test RGBA dataset
+ def test_rgba_dataset(self, conditions_v2, test_ct_data, reference_alpha):
+ """Test RGBA dataset."""
+ from satpy.composites import MaskingCompositor
+ from satpy.tests.utils import CustomScheduler
data = xr.DataArray(da.random.random((4, 3, 3)),
dims=['bands', 'y', 'x'],
coords={'bands': ['R', 'G', 'B', 'A'],
@@ -1288,26 +1463,36 @@ def test_call(self):
with dask.config.set(scheduler=CustomScheduler(max_computes=0)):
comp = MaskingCompositor("name", conditions=conditions_v2)
- res = comp([data, ct_data])
- self.assertEqual(res.mode, 'RGBA')
+ res = comp([data, test_ct_data])
+ assert res.mode == "RGBA"
np.testing.assert_allclose(res.sel(bands='R'),
- data.sel(bands='R').where(ct_data > 1))
+ data.sel(bands='R').where(test_ct_data > 1))
np.testing.assert_allclose(res.sel(bands='G'),
- data.sel(bands='G').where(ct_data > 1))
+ data.sel(bands='G').where(test_ct_data > 1))
np.testing.assert_allclose(res.sel(bands='B'),
- data.sel(bands='B').where(ct_data > 1))
+ data.sel(bands='B').where(test_ct_data > 1))
# The compositor should drop the original alpha band
np.testing.assert_allclose(res.sel(bands='A'), reference_alpha)
- # incorrect method
+ def test_incorrect_method(self, test_data, test_ct_data):
+ """Test incorrect method."""
+ from satpy.composites import MaskingCompositor
conditions = [{'method': 'foo', 'value': 0, 'transparency': 100}]
comp = MaskingCompositor("name", conditions=conditions)
- with self.assertRaises(AttributeError):
- res = comp([data, ct_data])
+ with pytest.raises(AttributeError):
+ comp([test_data, test_ct_data])
+ # Test with too few projectables.
+ with pytest.raises(ValueError):
+ comp([test_data])
+
+ def test_incorrect_mode(self, conditions_v1):
+ """Test initiating with unsupported mode."""
+ from satpy.composites import MaskingCompositor
- # too few projectables
- with self.assertRaises(ValueError):
- res = comp([data])
+ # Incorrect mode raises ValueError
+ with pytest.raises(ValueError):
+ MaskingCompositor("name", conditions=conditions_v1,
+ mode="YCbCrA")
class TestNaturalEnhCompositor(unittest.TestCase):
@@ -1447,12 +1632,15 @@ def test_masking(self):
area = mock.MagicMock()
lons = np.array([-180., -100., -50., 0., 50., 100., 180.])
area.get_lonlats = mock.MagicMock(return_value=[lons, []])
- a = xr.DataArray(np.array([1, 2, 3, 4, 5, 6, 7]), attrs={'area': area})
+ a = xr.DataArray(np.array([1, 2, 3, 4, 5, 6, 7]),
+ attrs={'area': area, 'units': 'K'})
comp = LongitudeMaskingCompositor(name='test', lon_min=-40., lon_max=120.)
expected = xr.DataArray(np.array([np.nan, np.nan, np.nan, 4, 5, 6, np.nan]))
res = comp([a])
np.testing.assert_allclose(res.data, expected.data)
+ assert "units" in res.attrs
+ assert res.attrs["units"] == "K"
comp = LongitudeMaskingCompositor(name='test', lon_min=-40.)
expected = xr.DataArray(np.array([np.nan, np.nan, np.nan, 4, 5, 6, 7]))
@@ -1468,3 +1656,41 @@ def test_masking(self):
expected = xr.DataArray(np.array([1, 2, 3, np.nan, np.nan, np.nan, 7]))
res = comp([a])
np.testing.assert_allclose(res.data, expected.data)
+
+
+def test_bad_sensor_yaml_configs(tmp_path):
+ """Test composite YAML file with no sensor isn't loaded.
+
+ But the bad YAML also shouldn't crash composite configuration loading.
+
+ """
+ from satpy.composites.config_loader import load_compositor_configs_for_sensors
+
+ comp_dir = tmp_path / "composites"
+ comp_dir.mkdir()
+ comp_yaml = comp_dir / "fake_sensor.yaml"
+ with satpy.config.set(config_path=[tmp_path]):
+ _create_fake_composite_config(comp_yaml)
+
+ # no sensor_name in YAML, quietly ignored
+ comps, _ = load_compositor_configs_for_sensors(["fake_sensor"])
+ assert "fake_sensor" in comps
+ assert "fake_composite" not in comps["fake_sensor"]
+
+
+def _create_fake_composite_config(yaml_filename: str):
+ import yaml
+
+ from satpy.composites import StaticImageCompositor
+
+ with open(yaml_filename, "w") as comp_file:
+ yaml.dump({
+ "composites": {
+ "fake_composite": {
+ "compositor": StaticImageCompositor,
+ "url": "http://example.com/image.png",
+ },
+ },
+ },
+ comp_file,
+ )
diff --git a/satpy/tests/test_config.py b/satpy/tests/test_config.py
index 8f38efe295..21d0aa0845 100644
--- a/satpy/tests/test_config.py
+++ b/satpy/tests/test_config.py
@@ -16,24 +16,36 @@
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
"""Test objects and functions in the satpy.config module."""
+from __future__ import annotations
+import contextlib
import os
+import sys
import unittest
+from importlib.metadata import EntryPoint
+from pathlib import Path
+from typing import Callable, Iterator
from unittest import mock
+
import pytest
+import satpy
+from satpy import DatasetDict
+from satpy.composites.config_loader import load_compositor_configs_for_sensors
+
class TestBuiltinAreas(unittest.TestCase):
"""Test that the builtin areas are all valid."""
def test_areas_pyproj(self):
"""Test all areas have valid projections with pyproj."""
+ import numpy as np
import pyproj
+ import xarray as xr
from pyresample import parse_area_file
from pyresample.geometry import SwathDefinition
+
from satpy.resample import get_area_file
- import numpy as np
- import xarray as xr
lons = np.array([[0, 0.1, 0.2], [0.05, 0.15, 0.25]])
lats = np.array([[0, 0.1, 0.2], [0.05, 0.15, 0.25]])
@@ -61,11 +73,12 @@ def test_areas_rasterio(self):
if not hasattr(CRS, 'from_dict'):
return unittest.skip("RasterIO 1.0+ required")
+ import numpy as np
+ import xarray as xr
from pyresample import parse_area_file
from pyresample.geometry import SwathDefinition
+
from satpy.resample import get_area_file
- import numpy as np
- import xarray as xr
lons = np.array([[0, 0.1, 0.2], [0.05, 0.15, 0.25]])
lats = np.array([[0, 0.1, 0.2], [0.05, 0.15, 0.25]])
@@ -91,24 +104,280 @@ def test_areas_rasterio(self):
_ = CRS.from_dict(proj_dict)
-class TestPluginsConfigs(unittest.TestCase):
+@contextlib.contextmanager
+def fake_plugin_etc_path(
+ tmp_path: Path,
+ entry_point_names: dict[str, list[str]],
+) -> Iterator[Path]:
+ """Create a fake satpy plugin entry point.
+
+ This mocks the necessary methods to trick Satpy into thinking a plugin
+ package is installed and has made a satpy plugin available.
+
+ """
+ etc_path, entry_points, module_paths = _get_entry_points_and_etc_paths(tmp_path, entry_point_names)
+ fake_iter_entry_points = _create_fake_iter_entry_points(entry_points)
+ fake_importlib_files = _create_fake_importlib_files(module_paths)
+ with mock.patch('satpy._config.entry_points', fake_iter_entry_points), \
+ mock.patch('satpy._config.impr_files', fake_importlib_files):
+ yield etc_path
+
+
+def _get_entry_points_and_etc_paths(
+ tmp_path: Path,
+ entry_point_names: dict[str, list[str]]
+) -> tuple[Path, dict[str, list[EntryPoint]], dict[str, Path]]:
+ module_path = tmp_path / "satpy_plugin"
+ etc_path = module_path / "etc"
+ etc_path.mkdir(parents=True, exist_ok=True)
+ entry_points: dict[str, list[EntryPoint]] = {}
+ entry_point_module_paths: dict[str, Path] = {}
+ for ep_group, entry_point_values in entry_point_names.items():
+ entry_points[ep_group] = []
+ for entry_point_value in entry_point_values:
+ parts = [part.strip() for part in entry_point_value.split("=")]
+ ep_name = parts[0]
+ ep_value = parts[1]
+ ep_module = ep_value.split(":")[0].strip()
+ ep = EntryPoint(name=ep_name, group=ep_group, value=ep_value)
+ entry_points[ep_group].append(ep)
+ entry_point_module_paths[ep_module] = module_path
+ return etc_path, entry_points, entry_point_module_paths
+
+
+def _create_fake_iter_entry_points(entry_points: dict[str, list[EntryPoint]]) -> Callable[[], dict[str, EntryPoint]]:
+ def _fake_iter_entry_points() -> dict:
+ return entry_points
+ return _fake_iter_entry_points
+
+
+def _create_fake_importlib_files(module_paths: dict[str, Path]) -> Callable[[str], Path]:
+ def _fake_importlib_files(module_name: str) -> Path:
+ return module_paths[module_name]
+ return _fake_importlib_files
+
+
+@pytest.fixture
+def fake_composite_plugin_etc_path(tmp_path: Path) -> Iterator[Path]:
+ """Create a fake plugin entry point with a fake compositor YAML configuration file."""
+ yield from _create_yamlbased_plugin(
+ tmp_path,
+ "composites",
+ "fake_sensor.yaml",
+ _write_fake_composite_yaml,
+ )
+
+
+def _write_fake_composite_yaml(yaml_filename: str) -> None:
+ with open(yaml_filename, "w") as comps_file:
+ comps_file.write("""
+ sensor_name: visir/fake_sensor
+
+ composites:
+ fake_composite:
+ compositor: !!python/name:satpy.composites.GenericCompositor
+ prerequisites:
+ - 3.9
+ - 10.8
+ - 12.0
+ standard_name: fake composite
+
+ """)
+
+
+@pytest.fixture
+def fake_reader_plugin_etc_path(tmp_path: Path) -> Iterator[Path]:
+ """Create a fake plugin entry point with a fake reader YAML configuration file."""
+ yield from _create_yamlbased_plugin(
+ tmp_path,
+ "readers",
+ "fake_reader.yaml",
+ _write_fake_reader_yaml,
+ )
+
+
+def _write_fake_reader_yaml(yaml_filename: str) -> None:
+ reader_name = os.path.splitext(os.path.basename(yaml_filename))[0]
+ with open(yaml_filename, "w") as comps_file:
+ comps_file.write(f"""
+reader:
+ name: {reader_name}
+ sensors: [fake_sensor]
+ reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader
+datasets: {{}}
+""")
+
+
+@pytest.fixture
+def fake_writer_plugin_etc_path(tmp_path: Path) -> Iterator[Path]:
+ """Create a fake plugin entry point with a fake writer YAML configuration file."""
+ yield from _create_yamlbased_plugin(
+ tmp_path,
+ "writers",
+ "fake_writer.yaml",
+ _write_fake_writer_yaml,
+ )
+
+
+def _write_fake_writer_yaml(yaml_filename: str) -> None:
+ writer_name = os.path.splitext(os.path.basename(yaml_filename))[0]
+ with open(yaml_filename, "w") as comps_file:
+ comps_file.write(f"""
+writer:
+ name: {writer_name}
+ writer: !!python/name:satpy.writers.Writer
+""")
+
+
+@pytest.fixture
+def fake_enh_plugin_etc_path(tmp_path: Path) -> Iterator[Path]:
+ """Create a fake plugin entry point with a fake enhancement YAML configure files.
+
+ This creates a ``fake_sensor.yaml`` and ``generic.yaml`` enhancement configuration.
+
+ """
+ yield from _create_yamlbased_plugin(
+ tmp_path,
+ "enhancements",
+ "fake_sensor.yaml",
+ _write_fake_enh_yamls,
+ )
+
+
+def _write_fake_enh_yamls(yaml_filename: str) -> None:
+ with open(yaml_filename, "w") as comps_file:
+ comps_file.write("""
+enhancements:
+ some_custom_plugin_enh:
+ name: fake_name
+ operations:
+ - name: stretch
+ method: !!python/name:satpy.enhancements.stretch
+ kwargs:
+ stretch: crude
+ min_stretch: -100.0
+ max_stretch: 0.0
+""")
+
+ generic_filename = os.path.join(os.path.dirname(yaml_filename), "generic.yaml")
+ with open(generic_filename, "w") as comps_file:
+ comps_file.write("""
+enhancements:
+ default:
+ operations:
+ - name: stretch
+ method: !!python/name:satpy.enhancements.stretch
+ kwargs:
+ stretch: crude
+ min_stretch: -1.0
+ max_stretch: 1.0
+""")
+
+
+def _create_yamlbased_plugin(
+ tmp_path: Path,
+ component_type: str,
+ yaml_name: str,
+ yaml_func: Callable[[str], None]
+) -> Iterator[Path]:
+ entry_point_dict = {f"satpy.{component_type}": [f"example_{component_type} = satpy_plugin"]}
+ with fake_plugin_etc_path(tmp_path, entry_point_dict) as plugin_etc_path:
+ comps_dir = os.path.join(plugin_etc_path, component_type)
+ os.makedirs(comps_dir, exist_ok=True)
+ comps_filename = os.path.join(comps_dir, yaml_name)
+ yaml_func(comps_filename)
+ yield plugin_etc_path
+
+
+class TestPluginsConfigs:
"""Test that plugins are working."""
- @mock.patch('satpy._config.pkg_resources.iter_entry_points')
- def test_get_plugin_configs(self, iter_entry_points):
+ def test_get_plugin_configs(self, fake_composite_plugin_etc_path):
"""Check that the plugin configs are looked for."""
- import pkg_resources
- ep = pkg_resources.EntryPoint.parse('example_composites = satpy_cpe')
- ep.dist = pkg_resources.Distribution.from_filename('satpy_cpe-0.0.0-py3.8.egg')
- ep.dist.module_path = os.path.join(os.path.sep + 'bla', 'bla')
- iter_entry_points.return_value = [ep]
-
- import satpy
from satpy._config import get_entry_points_config_dirs
- # don't let user env vars affect results
+
with satpy.config.set(config_path=[]):
dirs = get_entry_points_config_dirs('satpy.composites')
- self.assertListEqual(dirs, [os.path.join(ep.dist.module_path, 'satpy_cpe', 'etc')])
+ assert dirs == [str(fake_composite_plugin_etc_path)]
+
+ def test_load_entry_point_composite(self, fake_composite_plugin_etc_path):
+ """Test that composites can be loaded from plugin entry points."""
+ with satpy.config.set(config_path=[]):
+ compositors, _ = load_compositor_configs_for_sensors(["fake_sensor"])
+ assert "fake_sensor" in compositors
+ comp_dict = DatasetDict(compositors["fake_sensor"])
+ assert "fake_composite" in comp_dict
+ comp_obj = comp_dict["fake_composite"]
+ assert comp_obj.attrs["name"] == "fake_composite"
+ assert comp_obj.attrs["prerequisites"] == [3.9, 10.8, 12.0]
+
+ @pytest.mark.parametrize("specified_reader", [None, "fake_reader"])
+ def test_plugin_reader_configs(self, fake_reader_plugin_etc_path, specified_reader):
+ """Test that readers can be loaded from plugin entry points."""
+ from satpy.readers import configs_for_reader
+ reader_yaml_path = fake_reader_plugin_etc_path / "readers" / "fake_reader.yaml"
+ self._get_and_check_reader_writer_configs(specified_reader, configs_for_reader, reader_yaml_path)
+
+ def test_plugin_reader_available_readers(self, fake_reader_plugin_etc_path):
+ """Test that readers can be loaded from plugin entry points."""
+ from satpy.readers import available_readers
+ self._check_available_component(available_readers, "fake_reader")
+
+ @pytest.mark.parametrize("specified_writer", [None, "fake_writer"])
+ def test_plugin_writer_configs(self, fake_writer_plugin_etc_path, specified_writer):
+ """Test that writers can be loaded from plugin entry points."""
+ from satpy.writers import configs_for_writer
+ writer_yaml_path = fake_writer_plugin_etc_path / "writers" / "fake_writer.yaml"
+ self._get_and_check_reader_writer_configs(specified_writer, configs_for_writer, writer_yaml_path)
+
+ def test_plugin_writer_available_writers(self, fake_writer_plugin_etc_path):
+ """Test that readers can be loaded from plugin entry points."""
+ from satpy.writers import available_writers
+ self._check_available_component(available_writers, "fake_writer")
+
+ @staticmethod
+ def _get_and_check_reader_writer_configs(specified_component, configs_func, exp_yaml):
+ with satpy.config.set(config_path=[]):
+ configs = list(configs_func(specified_component))
+ assert any(str(exp_yaml) in config_list for config_list in configs)
+
+ @staticmethod
+ def _check_available_component(available_func, exp_component):
+ with satpy.config.set(config_path=[]):
+ available_components = available_func()
+ assert exp_component in available_components
+
+ @pytest.mark.parametrize(
+ ("sensor_name", "exp_result"),
+ [
+ ("fake_sensor", 1.0), # uses the sensor specific entry
+ ("fake_sensor2", 0.5), # uses the generic.yaml default
+ ]
+ )
+ def test_plugin_enhancements_generic_sensor(self, fake_enh_plugin_etc_path, sensor_name, exp_result):
+ """Test that enhancements from a plugin are available."""
+ import dask.array as da
+ import numpy as np
+ import xarray as xr
+ from trollimage.xrimage import XRImage
+
+ from satpy.writers import Enhancer
+
+ data_arr = xr.DataArray(
+ da.zeros((10, 10), dtype=np.float32),
+ dims=("y", "x"),
+ attrs={
+ "sensor": {sensor_name},
+ "name": "fake_name",
+ })
+ img = XRImage(data_arr)
+
+ enh = Enhancer()
+ enh.add_sensor_enhancements(data_arr.attrs["sensor"])
+ enh.apply(img, **img.data.attrs)
+
+ res_data = img.data.values
+ np.testing.assert_allclose(res_data, exp_result)
class TestConfigObject:
@@ -117,8 +386,10 @@ class TestConfigObject:
def test_custom_config_file(self):
"""Test adding a custom configuration file using SATPY_CONFIG."""
import tempfile
- import yaml
from importlib import reload
+
+ import yaml
+
import satpy
my_config_dict = {
'cache_dir': "/path/to/cache",
@@ -137,6 +408,7 @@ def test_custom_config_file(self):
def test_deprecated_env_vars(self):
"""Test that deprecated variables are mapped to new config."""
from importlib import reload
+
import satpy
old_vars = {
'PPP_CONFIG_DIR': '/my/ppp/config/dir',
@@ -152,17 +424,17 @@ def test_deprecated_env_vars(self):
def test_config_path_multiple(self):
"""Test that multiple config paths are accepted."""
from importlib import reload
+
import satpy
+ exp_paths, env_paths = _os_specific_multipaths()
old_vars = {
- 'SATPY_CONFIG_PATH': '/my/configs1:/my/configs2:/my/configs3',
+ 'SATPY_CONFIG_PATH': env_paths,
}
with mock.patch.dict('os.environ', old_vars):
reload(satpy._config)
reload(satpy)
- assert satpy.config.get('config_path') == ['/my/configs1',
- '/my/configs2',
- '/my/configs3']
+ assert satpy.config.get('config_path') == exp_paths
def test_config_path_multiple_load(self):
"""Test that config paths from subprocesses load properly.
@@ -172,9 +444,11 @@ def test_config_path_multiple_load(self):
modified variable.
"""
from importlib import reload
+
import satpy
+ exp_paths, env_paths = _os_specific_multipaths()
old_vars = {
- 'SATPY_CONFIG_PATH': '/my/configs1:/my/configs2:/my/configs3',
+ 'SATPY_CONFIG_PATH': env_paths,
}
with mock.patch.dict('os.environ', old_vars):
@@ -185,13 +459,12 @@ def test_config_path_multiple_load(self):
# load the updated env variable and parse it again.
reload(satpy._config)
reload(satpy)
- assert satpy.config.get('config_path') == ['/my/configs1',
- '/my/configs2',
- '/my/configs3']
+ assert satpy.config.get('config_path') == exp_paths
def test_bad_str_config_path(self):
"""Test that a str config path isn't allowed."""
from importlib import reload
+
import satpy
old_vars = {
'SATPY_CONFIG_PATH': '/my/configs1',
@@ -206,3 +479,11 @@ def test_bad_str_config_path(self):
# strings are not allowed, lists are
with satpy.config.set(config_path='/single/string/paths/are/bad'):
pytest.raises(ValueError, satpy._config.get_config_path_safe)
+
+
+def _os_specific_multipaths():
+ exp_paths = ['/my/configs1', '/my/configs2', '/my/configs3']
+ if sys.platform.startswith("win"):
+ exp_paths = ["C:" + p for p in exp_paths]
+ path_str = os.pathsep.join(exp_paths)
+ return exp_paths, path_str
diff --git a/satpy/tests/test_crefl_utils.py b/satpy/tests/test_crefl_utils.py
index b307ca5e46..1e5da8cd9a 100644
--- a/satpy/tests/test_crefl_utils.py
+++ b/satpy/tests/test_crefl_utils.py
@@ -25,11 +25,14 @@ class TestCreflUtils(unittest.TestCase):
def test_get_atm_variables_abi(self):
"""Test getting atmospheric variables for ABI."""
import numpy as np
- from satpy.composites.crefl_utils import get_atm_variables_abi
- sphalb, rhoray, TtotraytH2O, tOG = get_atm_variables_abi(0.17690244, 6.123234e-17, 530.61332168, 405.,
- 21.71342113, 77.14385758, 56.214566960,
- 0.0043149700000000004, 0.0037296,
- 0.014107995000000002, 0.052349)
+
+ from satpy.modifiers._crefl_utils import _ABIAtmosphereVariables
+ atm_vars = _ABIAtmosphereVariables(
+ 21.71342113, 77.14385758, 56.214566960,
+ 0.17690244, 6.123234e-17, 530.61332168, 405.,
+ 0.0043149700000000004, 0.0037296, 0.014107995000000002, 0.052349,
+ )
+ sphalb, rhoray, TtotraytH2O, tOG = atm_vars()
self.assertLess(abs(np.array(sphalb) - 0.045213532544630494), 1e-10)
self.assertLess(abs(rhoray - 2.2030281148621356), 1e-10)
self.assertLess(abs(TtotraytH2O - 0.30309880915889087), 1e-10)
diff --git a/satpy/tests/test_data_download.py b/satpy/tests/test_data_download.py
index 2093ccaae5..69fe8377d5 100644
--- a/satpy/tests/test_data_download.py
+++ b/satpy/tests/test_data_download.py
@@ -22,8 +22,8 @@
import pytest
import yaml
-from satpy.modifiers import ModifierBase
from satpy.aux_download import DataDownloadMixin
+from satpy.modifiers import ModifierBase
pooch = pytest.importorskip("pooch")
@@ -269,8 +269,8 @@ def test_no_downloads_in_tests(self):
def test_download_script(self):
"""Test basic functionality of the download script."""
- from satpy.aux_download import retrieve_all_cmd
import satpy
+ from satpy.aux_download import retrieve_all_cmd
file_registry = {}
file_urls = {}
with satpy.config.set(config_path=[self.tmpdir]), \
diff --git a/satpy/tests/test_dataset.py b/satpy/tests/test_dataset.py
index e995308835..b79c5b53e4 100644
--- a/satpy/tests/test_dataset.py
+++ b/satpy/tests/test_dataset.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
-# Copyright (c) 2015-2019 Satpy developers
+# Copyright (c) 2015-2022 Satpy developers
#
# This file is part of satpy.
#
@@ -23,7 +23,8 @@
import numpy as np
import pytest
-from satpy.dataset.dataid import DataQuery, DataID, WavelengthRange, ModifierTuple, minimal_default_keys_config
+from satpy.dataset.dataid import DataID, DataQuery, ModifierTuple, WavelengthRange, minimal_default_keys_config
+from satpy.readers.pmw_channels_definitions import FrequencyDoubleSideBand, FrequencyQuadrupleSideBand, FrequencyRange
from satpy.tests.utils import make_cid, make_dataid, make_dsq
@@ -32,10 +33,9 @@ class TestDataID(unittest.TestCase):
def test_basic_init(self):
"""Test basic ways of creating a DataID."""
- from satpy.dataset.dataid import (
- DataID,
- default_id_keys_config as dikc,
- minimal_default_keys_config as mdkc)
+ from satpy.dataset.dataid import DataID
+ from satpy.dataset.dataid import default_id_keys_config as dikc
+ from satpy.dataset.dataid import minimal_default_keys_config as mdkc
did = DataID(dikc, name="a")
assert did['name'] == 'a'
@@ -54,12 +54,14 @@ def test_basic_init(self):
def test_init_bad_modifiers(self):
"""Test that modifiers are a tuple."""
- from satpy.dataset.dataid import DataID, default_id_keys_config as dikc
+ from satpy.dataset.dataid import DataID
+ from satpy.dataset.dataid import default_id_keys_config as dikc
self.assertRaises(TypeError, DataID, dikc, name="a", modifiers="str")
def test_compare_no_wl(self):
"""Compare fully qualified wavelength ID to no wavelength ID."""
- from satpy.dataset.dataid import DataID, default_id_keys_config as dikc
+ from satpy.dataset.dataid import DataID
+ from satpy.dataset.dataid import default_id_keys_config as dikc
d1 = DataID(dikc, name="a", wavelength=(0.1, 0.2, 0.3))
d2 = DataID(dikc, name="a", wavelength=None)
@@ -69,13 +71,15 @@ def test_compare_no_wl(self):
def test_bad_calibration(self):
"""Test that asking for a bad calibration fails."""
- from satpy.dataset.dataid import DataID, default_id_keys_config as dikc
+ from satpy.dataset.dataid import DataID
+ from satpy.dataset.dataid import default_id_keys_config as dikc
with pytest.raises(ValueError):
DataID(dikc, name='C05', calibration='_bad_')
def test_is_modified(self):
"""Test that modifications are detected properly."""
- from satpy.dataset.dataid import DataID, default_id_keys_config as dikc
+ from satpy.dataset.dataid import DataID
+ from satpy.dataset.dataid import default_id_keys_config as dikc
d1 = DataID(dikc, name="a", wavelength=(0.1, 0.2, 0.3), modifiers=('hej',))
d2 = DataID(dikc, name="a", wavelength=(0.1, 0.2, 0.3), modifiers=tuple())
@@ -84,7 +88,8 @@ def test_is_modified(self):
def test_create_less_modified_query(self):
"""Test that modifications are popped correctly."""
- from satpy.dataset.dataid import DataID, default_id_keys_config as dikc
+ from satpy.dataset.dataid import DataID
+ from satpy.dataset.dataid import default_id_keys_config as dikc
d1 = DataID(dikc, name="a", wavelength=(0.1, 0.2, 0.3), modifiers=('hej',))
d2 = DataID(dikc, name="a", wavelength=(0.1, 0.2, 0.3), modifiers=tuple())
@@ -133,13 +138,14 @@ def test_combine_times_without_averaging(self):
def test_combine_arrays(self):
"""Test the combine_metadata with arrays."""
- from satpy.dataset.metadata import combine_metadata
from numpy import arange, ones
from xarray import DataArray
+
+ from satpy.dataset.metadata import combine_metadata
dts = [
- {"quality": (arange(25) % 2).reshape(5, 5).astype("?")},
- {"quality": (arange(1, 26) % 3).reshape(5, 5).astype("?")},
- {"quality": ones((5, 5,), "?")},
+ {"quality": (arange(25) % 2).reshape(5, 5).astype("?")},
+ {"quality": (arange(1, 26) % 3).reshape(5, 5).astype("?")},
+ {"quality": ones((5, 5,), "?")},
]
assert "quality" not in combine_metadata(*dts)
dts2 = [{"quality": DataArray(d["quality"])} for d in dts]
@@ -149,22 +155,22 @@ def test_combine_arrays(self):
assert "quality" not in combine_metadata(*dts3)
# check cases with repeated arrays
dts4 = [
- {"quality": dts[0]["quality"]},
- {"quality": dts[0]["quality"]},
- ]
+ {"quality": dts[0]["quality"]},
+ {"quality": dts[0]["quality"]},
+ ]
assert "quality" in combine_metadata(*dts4)
dts5 = [
- {"quality": dts3[0]["quality"]},
- {"quality": dts3[0]["quality"]},
- ]
+ {"quality": dts3[0]["quality"]},
+ {"quality": dts3[0]["quality"]},
+ ]
assert "quality" in combine_metadata(*dts5)
# check with other types
dts6 = [
- DataArray(arange(5), attrs=dts[0]),
- DataArray(arange(5), attrs=dts[0]),
- DataArray(arange(5), attrs=dts[1]),
- object()
- ]
+ DataArray(arange(5), attrs=dts[0]),
+ DataArray(arange(5), attrs=dts[0]),
+ DataArray(arange(5), attrs=dts[1]),
+ object()
+ ]
assert "quality" not in combine_metadata(*dts6)
def test_combine_lists_identical(self):
@@ -233,8 +239,9 @@ def test_combine_numpy_arrays(self):
def test_combine_dask_arrays(self):
"""Test combining values that are dask arrays."""
- from satpy.dataset.metadata import combine_metadata
import dask.array as da
+
+ from satpy.dataset.metadata import combine_metadata
test_metadata = [{'valid_range': da.from_array(np.array([0., 0.00032], dtype=np.float32))},
{'valid_range': da.from_array(np.array([0., 0.00032], dtype=np.float32))}]
result = combine_metadata(*test_metadata)
@@ -371,7 +378,7 @@ def test_combine_dicts_different(test_mda):
def test_dataid():
"""Test the DataID object."""
- from satpy.dataset.dataid import DataID, WavelengthRange, ModifierTuple, ValueList
+ from satpy.dataset.dataid import DataID, ModifierTuple, ValueList, WavelengthRange
# Check that enum is translated to type.
did = make_dataid()
@@ -412,20 +419,20 @@ def test_dataid():
# Check inequality
default_id_keys_config = {'name': None,
'wavelength': {
- 'type': WavelengthRange,
+ 'type': WavelengthRange,
},
'resolution': None,
'calibration': {
- 'enum': [
- 'reflectance',
- 'brightness_temperature',
- 'radiance',
- 'counts'
- ]
+ 'enum': [
+ 'reflectance',
+ 'brightness_temperature',
+ 'radiance',
+ 'counts'
+ ]
},
'modifiers': {
- 'default': ModifierTuple(),
- 'type': ModifierTuple,
+ 'default': ModifierTuple(),
+ 'type': ModifierTuple,
},
}
assert DataID(default_id_keys_config, wavelength=10) != DataID(default_id_keys_config, name="VIS006")
@@ -433,7 +440,7 @@ def test_dataid():
def test_dataid_equal_if_enums_different():
"""Check that dataids with different enums but same items are equal."""
- from satpy.dataset.dataid import DataID, WavelengthRange, ModifierTuple
+ from satpy.dataset.dataid import DataID, ModifierTuple, WavelengthRange
id_keys_config1 = {'name': None,
'wavelength': {
'type': WavelengthRange,
@@ -476,9 +483,11 @@ def test_dataid_equal_if_enums_different():
def test_dataid_copy():
"""Test copying a DataID."""
- from satpy.dataset.dataid import DataID, default_id_keys_config as dikc
from copy import deepcopy
+ from satpy.dataset.dataid import DataID
+ from satpy.dataset.dataid import default_id_keys_config as dikc
+
did = DataID(dikc, name="a", resolution=1000)
did2 = deepcopy(did)
assert did2 == did
@@ -487,12 +496,30 @@ def test_dataid_copy():
def test_dataid_pickle():
"""Test dataid pickling roundtrip."""
- from satpy.tests.utils import make_dataid
import pickle
+
+ from satpy.tests.utils import make_dataid
did = make_dataid(name='hi', wavelength=(10, 11, 12), resolution=1000, calibration='radiance')
assert did == pickle.loads(pickle.dumps(did))
+def test_dataid_elements_picklable():
+ """Test individual elements of DataID can be pickled.
+
+ In some cases, like in the base reader classes, the elements of a DataID
+ are extracted and stored in a separate dictionary. This means that the
+ internal/fancy pickle handling of DataID does not play a part.
+
+ """
+ import pickle
+
+ from satpy.tests.utils import make_dataid
+ did = make_dataid(name='hi', wavelength=(10, 11, 12), resolution=1000, calibration='radiance')
+ for value in did.values():
+ pickled_value = pickle.loads(pickle.dumps(value))
+ assert value == pickled_value
+
+
class TestDataQuery:
"""Test case for data queries."""
@@ -628,11 +655,269 @@ def test_sort_dataids_with_different_set_of_keys(self):
assert distances[0] < distances[1]
assert distances[1] < distances[2]
+ def test_seviri_hrv_has_priority_over_vis008(self):
+ """Check that the HRV channel has priority over VIS008 when querying 0.8µm."""
+ dids = [DataID(self.default_id_keys_config, name='HRV',
+ wavelength=WavelengthRange(min=0.5, central=0.7, max=0.9, unit='µm'), resolution=1000.134348869,
+ calibration="reflectance", modifiers=()),
+ DataID(self.default_id_keys_config, name='HRV',
+ wavelength=WavelengthRange(min=0.5, central=0.7, max=0.9, unit='µm'), resolution=1000.134348869,
+ calibration="radiance", modifiers=()),
+ DataID(self.default_id_keys_config, name='HRV',
+ wavelength=WavelengthRange(min=0.5, central=0.7, max=0.9, unit='µm'), resolution=1000.134348869,
+ calibration="counts", modifiers=()),
+ DataID(self.default_id_keys_config, name='VIS006',
+ wavelength=WavelengthRange(min=0.56, central=0.635, max=0.71, unit='µm'),
+ resolution=3000.403165817, calibration="reflectance", modifiers=()),
+ DataID(self.default_id_keys_config, name='VIS006',
+ wavelength=WavelengthRange(min=0.56, central=0.635, max=0.71, unit='µm'),
+ resolution=3000.403165817, calibration="radiance", modifiers=()),
+ DataID(self.default_id_keys_config, name='VIS006',
+ wavelength=WavelengthRange(min=0.56, central=0.635, max=0.71, unit='µm'),
+ resolution=3000.403165817, calibration="counts", modifiers=()),
+ DataID(self.default_id_keys_config, name='VIS008',
+ wavelength=WavelengthRange(min=0.74, central=0.81, max=0.88, unit='µm'),
+ resolution=3000.403165817, calibration="reflectance", modifiers=()),
+ DataID(self.default_id_keys_config, name='VIS008',
+ wavelength=WavelengthRange(min=0.74, central=0.81, max=0.88, unit='µm'),
+ resolution=3000.403165817, calibration="radiance", modifiers=()),
+ DataID(self.default_id_keys_config, name='VIS008',
+ wavelength=WavelengthRange(min=0.74, central=0.81, max=0.88, unit='µm'),
+ resolution=3000.403165817, calibration="counts", modifiers=())]
+ dq = DataQuery(wavelength=0.8)
+ res, distances = dq.sort_dataids(dids)
+ assert res[0].name == "HRV"
+
+
+def test_frequency_quadruple_side_band_class_method_convert():
+ """Test the frequency double side band object: test the class method convert."""
+ frq_qdsb = FrequencyQuadrupleSideBand(57, 0.322, 0.05, 0.036)
+
+ res = frq_qdsb.convert(57.37)
+ assert res == 57.37
+
+ res = frq_qdsb.convert({'central': 57.0, 'side': 0.322, 'sideside': 0.05, 'bandwidth': 0.036})
+ assert res == FrequencyQuadrupleSideBand(57, 0.322, 0.05, 0.036)
+
+
+def test_frequency_quadruple_side_band_channel_str():
+ """Test the frequency quadruple side band object: test the band description."""
+ frq_qdsb1 = FrequencyQuadrupleSideBand(57.0, 0.322, 0.05, 0.036)
+ frq_qdsb2 = FrequencyQuadrupleSideBand(57000, 322, 50, 36, 'MHz')
+
+ assert str(frq_qdsb1) == "central=57.0 GHz ±0.322 ±0.05 width=0.036 GHz"
+ assert str(frq_qdsb2) == "central=57000 MHz ±322 ±50 width=36 MHz"
+
+
+def test_frequency_quadruple_side_band_channel_equality():
+ """Test the frequency quadruple side band object: check if two bands are 'equal'."""
+ frq_qdsb = FrequencyQuadrupleSideBand(57, 0.322, 0.05, 0.036)
+ assert frq_qdsb is not None
+ assert frq_qdsb < FrequencyQuadrupleSideBand(57, 0.322, 0.05, 0.04)
+ assert frq_qdsb < FrequencyQuadrupleSideBand(58, 0.322, 0.05, 0.036)
+ assert frq_qdsb < ((58, 0.322, 0.05, 0.036))
+ assert frq_qdsb > FrequencyQuadrupleSideBand(57, 0.322, 0.04, 0.01)
+ assert frq_qdsb > None
+ assert (frq_qdsb < None) is False
+
+ assert 57 != frq_qdsb
+ assert 57.372 == frq_qdsb
+ assert 56.646 == frq_qdsb
+ assert 56.71 == frq_qdsb
+
+ assert frq_qdsb != FrequencyQuadrupleSideBand(57, 0.322, 0.1, 0.040)
+
+ frq_qdsb = None
+ assert FrequencyQuadrupleSideBand(57, 0.322, 0.05, 0.036) != frq_qdsb
+ assert frq_qdsb < FrequencyQuadrupleSideBand(57, 0.322, 0.05, 0.04)
+
+
+def test_frequency_quadruple_side_band_channel_distances():
+ """Test the frequency quadruple side band object: get the distance between two bands."""
+ frq_qdsb = FrequencyQuadrupleSideBand(57, 0.322, 0.05, 0.036)
+ mydist = frq_qdsb.distance([57, 0.322, 0.05, 0.036])
+
+ frq_dict = {'central': 57, 'side': 0.322, 'sideside': 0.05,
+ 'bandwidth': 0.036, 'unit': 'GHz'}
+ mydist = frq_qdsb.distance(frq_dict)
+ assert mydist == np.inf
+
+ mydist = frq_qdsb.distance(57.372)
+ assert mydist == 0.0
+
+ mydist = frq_qdsb.distance(FrequencyQuadrupleSideBand(57, 0.322, 0.05, 0.036))
+ assert mydist == 0.0
+
+ mydist = frq_qdsb.distance(57.38)
+ np.testing.assert_almost_equal(mydist, 0.008)
+
+ mydist = frq_qdsb.distance(57)
+ assert mydist == np.inf
+
+ mydist = frq_qdsb.distance((57, 0.322, 0.05, 0.018))
+ assert mydist == np.inf
+
+
+def test_frequency_quadruple_side_band_channel_containment():
+ """Test the frequency quadruple side band object: check if one band contains another."""
+ frq_qdsb = FrequencyQuadrupleSideBand(57, 0.322, 0.05, 0.036)
+
+ assert 57 not in frq_qdsb
+ assert 57.373 in frq_qdsb
+
+ with pytest.raises(NotImplementedError):
+ assert frq_qdsb in FrequencyQuadrupleSideBand(57, 0.322, 0.05, 0.05)
+
+ frq_qdsb = None
+ assert (frq_qdsb in FrequencyQuadrupleSideBand(57, 0.322, 0.05, 0.05)) is False
+
+ assert '57' not in FrequencyQuadrupleSideBand(57, 0.322, 0.05, 0.05)
+
+
+def test_frequency_double_side_band_class_method_convert():
+ """Test the frequency double side band object: test the class method convert."""
+ frq_dsb = FrequencyDoubleSideBand(183, 7, 2)
+
+ res = frq_dsb.convert(185)
+ assert res == 185
+
+ res = frq_dsb.convert({'central': 185, 'side': 7, 'bandwidth': 2})
+ assert res == FrequencyDoubleSideBand(185, 7, 2)
+
+
+def test_frequency_double_side_band_channel_str():
+ """Test the frequency double side band object: test the band description."""
+ frq_dsb1 = FrequencyDoubleSideBand(183, 7, 2)
+ frq_dsb2 = FrequencyDoubleSideBand(183000, 7000, 2000, 'MHz')
+
+ assert str(frq_dsb1) == "central=183 GHz ±7 width=2 GHz"
+ assert str(frq_dsb2) == "central=183000 MHz ±7000 width=2000 MHz"
+
+
+def test_frequency_double_side_band_channel_equality():
+ """Test the frequency double side band object: check if two bands are 'equal'."""
+ frq_dsb = FrequencyDoubleSideBand(183, 7, 2)
+ assert frq_dsb is not None
+ assert 183 != frq_dsb
+ assert 190 == frq_dsb
+ assert 176 == frq_dsb
+ assert 175.5 == frq_dsb
+
+ assert frq_dsb != FrequencyDoubleSideBand(183, 6.5, 3)
+
+ frq_dsb = None
+ assert FrequencyDoubleSideBand(183, 7, 2) != frq_dsb
+
+ assert frq_dsb < FrequencyDoubleSideBand(183, 7, 2)
+
+ assert FrequencyDoubleSideBand(182, 7, 2) < FrequencyDoubleSideBand(183, 7, 2)
+ assert FrequencyDoubleSideBand(184, 7, 2) > FrequencyDoubleSideBand(183, 7, 2)
+
+
+def test_frequency_double_side_band_channel_distances():
+ """Test the frequency double side band object: get the distance between two bands."""
+ frq_dsb = FrequencyDoubleSideBand(183, 7, 2)
+ mydist = frq_dsb.distance(175.5)
+ assert mydist == 0.5
+
+ mydist = frq_dsb.distance(190.5)
+ assert mydist == 0.5
+
+ np.testing.assert_almost_equal(frq_dsb.distance(175.6), 0.4)
+ np.testing.assert_almost_equal(frq_dsb.distance(190.1), 0.1)
+
+ mydist = frq_dsb.distance(185)
+ assert mydist == np.inf
+
+ mydist = frq_dsb.distance((183, 7.0, 2))
+ assert mydist == 0
+
+ mydist = frq_dsb.distance((183, 7.0, 1))
+ assert mydist == 0
+
+ mydist = frq_dsb.distance(FrequencyDoubleSideBand(183, 7.0, 2))
+ assert mydist == 0
+
+
+def test_frequency_double_side_band_channel_containment():
+ """Test the frequency double side band object: check if one band contains another."""
+ frq_range = FrequencyDoubleSideBand(183, 7, 2)
+
+ assert 175.5 in frq_range
+ assert frq_range in FrequencyDoubleSideBand(183, 6.5, 3)
+ assert frq_range not in FrequencyDoubleSideBand(183, 4, 2)
+
+ with pytest.raises(NotImplementedError):
+ assert frq_range in FrequencyDoubleSideBand(183, 6.5, 3, 'MHz')
+
+ frq_range = None
+ assert (frq_range in FrequencyDoubleSideBand(183, 3, 2)) is False
+
+ assert '183' not in FrequencyDoubleSideBand(183, 3, 2)
+
+
+def test_frequency_range_class_method_convert():
+ """Test the frequency range object: test the class method convert."""
+ frq_range = FrequencyRange(89, 2)
+
+ res = frq_range.convert(89)
+ assert res == 89
+
+ res = frq_range.convert({'central': 89, 'bandwidth': 2})
+ assert res == FrequencyRange(89, 2)
+
+
+def test_frequency_range_class_method_str():
+ """Test the frequency range object: test the band description."""
+ frq_range1 = FrequencyRange(89, 2)
+ frq_range2 = FrequencyRange(89000, 2000, 'MHz')
+
+ assert str(frq_range1) == "central=89 GHz width=2 GHz"
+ assert str(frq_range2) == "central=89000 MHz width=2000 MHz"
+
+
+def test_frequency_range_channel_equality():
+ """Test the frequency range object: check if two bands are 'equal'."""
+ frqr = FrequencyRange(2, 1)
+ assert frqr is not None
+ assert 1.7 == frqr
+ assert 1.2 != frqr
+ assert frqr == (2, 1)
+
+ assert frqr == (2, 1, 'GHz')
+
+
+def test_frequency_range_channel_containment():
+ """Test the frequency range object: channel containment."""
+ frqr = FrequencyRange(2, 1)
+ assert 1.7 in frqr
+ assert 2.8 not in frqr
+
+ with pytest.raises(NotImplementedError):
+ assert frqr in FrequencyRange(89, 2, 'MHz')
+
+ frqr = None
+ assert (frqr in FrequencyRange(89, 2)) is False
+
+ assert '89' not in FrequencyRange(89, 2)
+
+
+def test_frequency_range_channel_distances():
+ """Test the frequency range object: derive distances between bands."""
+ frqr = FrequencyRange(190.0, 2)
+
+ mydist = frqr.distance(FrequencyRange(190, 2))
+ assert mydist == 0
+ mydist = frqr.distance(FrequencyRange(189.5, 2))
+ assert mydist == np.inf
+ mydist = frqr.distance(189.5)
+ assert mydist == 0.5
+ mydist = frqr.distance(188.0)
+ assert mydist == np.inf
+
def test_wavelength_range():
"""Test the wavelength range object."""
- from satpy.dataset.dataid import WavelengthRange
-
wr = WavelengthRange(1, 2, 3)
assert 1.2 == wr
assert .9 != wr
@@ -658,11 +943,12 @@ def test_wavelength_range():
assert str(wr) == "2 µm (1-3 µm)"
assert str(wr2) == "2 nm (1-3 nm)"
+ wr = WavelengthRange(10.5, 11.5, 12.5)
+ np.testing.assert_almost_equal(wr.distance(11.1), 0.4)
+
def test_wavelength_range_cf_roundtrip():
"""Test the wavelength range object roundtrip to cf."""
- from satpy.dataset.dataid import WavelengthRange
-
wr = WavelengthRange(1, 2, 3)
assert WavelengthRange.from_cf(wr.to_cf()) == wr
diff --git a/satpy/tests/test_demo.py b/satpy/tests/test_demo.py
index af2a0c5bf3..e805dd3975 100644
--- a/satpy/tests/test_demo.py
+++ b/satpy/tests/test_demo.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
-# Copyright (c) 2019 Satpy developers
+# Copyright (c) 2019-2021 Satpy developers
#
# This file is part of satpy.
#
@@ -16,10 +16,15 @@
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
"""Tests for the satpy.demo module."""
+from __future__ import annotations
+import contextlib
+import io
import os
import sys
+import tarfile
import unittest
+from collections import defaultdict
from unittest import mock
@@ -124,7 +129,7 @@ class TestGCPUtils(unittest.TestCase):
@mock.patch('satpy.demo._google_cloud_platform.urlopen')
def test_is_gcp_instance(self, uo):
"""Test is_google_cloud_instance."""
- from satpy.demo._google_cloud_platform import is_google_cloud_instance, URLError
+ from satpy.demo._google_cloud_platform import URLError, is_google_cloud_instance
uo.side_effect = URLError("Test Environment")
self.assertFalse(is_google_cloud_instance())
@@ -180,15 +185,290 @@ class TestAHIDemoDownload:
@mock.patch.dict(sys.modules, {'s3fs': mock.MagicMock()})
def test_ahi_full_download(self):
"""Test that the himawari download works as expected."""
- from satpy.demo import download_typhoon_surigae_ahi
from tempfile import gettempdir
+
+ from satpy.demo import download_typhoon_surigae_ahi
files = download_typhoon_surigae_ahi(base_dir=gettempdir())
assert len(files) == 160
@mock.patch.dict(sys.modules, {'s3fs': mock.MagicMock()})
def test_ahi_partial_download(self):
"""Test that the himawari download works as expected."""
- from satpy.demo import download_typhoon_surigae_ahi
from tempfile import gettempdir
+
+ from satpy.demo import download_typhoon_surigae_ahi
files = download_typhoon_surigae_ahi(base_dir=gettempdir(), segments=[4, 9], channels=[1, 2, 3])
assert len(files) == 6
+
+
+def _create_and_populate_dummy_tarfile(fn):
+ """Populate a dummy tarfile with dummy files."""
+ fn.parent.mkdir(exist_ok=True, parents=True)
+ with tarfile.open(fn, mode="x:gz") as tf:
+ for i in range(3):
+ with open(f"fci-rc{i:d}", "w"):
+ pass
+ tf.addfile(tf.gettarinfo(name=f"fci-rc{i:d}"))
+
+
+def test_fci_download(tmp_path, monkeypatch):
+ """Test download of FCI test data."""
+ from satpy.demo import download_fci_test_data
+ monkeypatch.chdir(tmp_path)
+
+ def fake_download_url(url, nm):
+ """Create a dummy tarfile.
+
+ Create a dummy tarfile.
+
+ Intended as a drop-in replacement for demo.utils.download_url.
+ """
+ _create_and_populate_dummy_tarfile(nm)
+
+ with mock.patch("satpy.demo.fci.utils.download_url", new=fake_download_url):
+ files = download_fci_test_data(tmp_path)
+ assert len(files) == 3
+ assert files == ["fci-rc0", "fci-rc1", "fci-rc2"]
+ for f in files:
+ assert os.path.exists(f)
+
+
+class _FakeRequest:
+ """Fake object to act like a requests return value when downloading a file."""
+
+ requests_log: list[str] = []
+
+ def __init__(self, url, stream=None):
+ self._filename = os.path.basename(url)
+ self.headers = {}
+ self.requests_log.append(url)
+ del stream # just mimicking requests 'get'
+
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ return
+
+ def raise_for_status(self):
+ return
+
+ def _get_fake_bytesio(self):
+ filelike_obj = io.BytesIO()
+ filelike_obj.write(self._filename.encode("ascii"))
+ filelike_obj.seek(0)
+ return filelike_obj
+
+ def iter_content(self, chunk_size):
+ """Return generator of 'chunk_size' at a time."""
+ bytes_io = self._get_fake_bytesio()
+ x = bytes_io.read(chunk_size)
+ while x:
+ yield x
+ x = bytes_io.read(chunk_size)
+
+
+@mock.patch('satpy.demo.utils.requests')
+class TestVIIRSSDRDemoDownload:
+ """Test VIIRS SDR downloading."""
+
+ ALL_BAND_PREFIXES = ("SVI01", "SVI02", "SVI03", "SVI04", "SVI05",
+ "SVM01", "SVM02", "SVM03", "SVM04", "SVM05", "SVM06", "SVM07", "SVM08", "SVM09", "SVM10",
+ "SVM11", "SVM12", "SVM13", "SVM14", "SVM15", "SVM16",
+ "SVDNB")
+ ALL_GEO_PREFIXES = ("GITCO", "GMTCO", "GDNBO")
+
+ def test_download(self, _requests, tmpdir):
+ """Test downloading VIIRS SDR data."""
+ from satpy.demo import get_viirs_sdr_20170128_1229
+ _requests.get.side_effect = _FakeRequest
+ with mock_filesystem():
+ files = get_viirs_sdr_20170128_1229(base_dir=str(tmpdir))
+ assert len(files) == 10 * (16 + 5 + 1 + 3) # 10 granules * (5 I bands + 16 M bands + 1 DNB + 3 geolocation)
+ self._assert_bands_in_filenames_and_contents(self.ALL_BAND_PREFIXES + self.ALL_GEO_PREFIXES, files, 10)
+
+ def test_do_not_download_the_files_twice(self, _requests, tmpdir):
+ """Test re-downloading VIIRS SDR data."""
+ from satpy.demo import get_viirs_sdr_20170128_1229
+ get_mock = mock.MagicMock()
+ _requests.get.return_value.__enter__ = get_mock
+ with mock_filesystem():
+ files = get_viirs_sdr_20170128_1229(base_dir=str(tmpdir))
+ new_files = get_viirs_sdr_20170128_1229(base_dir=str(tmpdir))
+
+ total_num_files = 10 * (16 + 5 + 1 + 3) # 10 granules * (5 I bands + 16 M bands + 1 DNB + 3 geolocation)
+ assert len(new_files) == total_num_files
+ assert get_mock.call_count == total_num_files
+ assert new_files == files
+
+ def test_download_channels_num_granules_im(self, _requests, tmpdir):
+ """Test downloading VIIRS SDR I/M data with select granules."""
+ from satpy.demo import get_viirs_sdr_20170128_1229
+ _requests.get.side_effect = _FakeRequest
+ with mock_filesystem():
+ files = get_viirs_sdr_20170128_1229(base_dir=str(tmpdir),
+ channels=("I01", "M01"))
+ assert len(files) == 10 * (1 + 1 + 2) # 10 granules * (1 I band + 1 M band + 2 geolocation)
+ self._assert_bands_in_filenames_and_contents(("SVI01", "SVM01", "GITCO", "GMTCO"), files, 10)
+
+ def test_download_channels_num_granules_im_twice(self, _requests, tmpdir):
+ """Test re-downloading VIIRS SDR I/M data with select granules."""
+ from satpy.demo import get_viirs_sdr_20170128_1229
+ get_mock = mock.MagicMock()
+ _requests.get.return_value.__enter__ = get_mock
+ with mock_filesystem():
+ files = get_viirs_sdr_20170128_1229(base_dir=str(tmpdir),
+ channels=("I01", "M01"))
+ num_first_batch = 10 * (1 + 1 + 2) # 10 granules * (1 I band + 1 M band + 2 geolocation)
+ assert len(files) == num_first_batch
+
+ files = get_viirs_sdr_20170128_1229(base_dir=str(tmpdir),
+ channels=("I01", "M01"),
+ granules=(2, 3))
+ assert len(files) == 2 * (1 + 1 + 2) # 2 granules * (1 I band + 1 M band + 2 geolocation)
+ assert get_mock.call_count == num_first_batch
+
+ def test_download_channels_num_granules_dnb(self, _requests, tmpdir):
+ """Test downloading and re-downloading VIIRS SDR DNB data with select granules."""
+ from satpy.demo import get_viirs_sdr_20170128_1229
+ _requests.get.side_effect = _FakeRequest
+ with mock_filesystem():
+ files = get_viirs_sdr_20170128_1229(base_dir=str(tmpdir),
+ channels=("DNB",),
+ granules=(5, 6, 7, 8, 9))
+ assert len(files) == 5 * (1 + 1) # 5 granules * (1 DNB + 1 geolocation)
+ self._assert_bands_in_filenames_and_contents(("SVDNB", "GDNBO"), files, 5)
+
+ def _assert_bands_in_filenames_and_contents(self, band_prefixes, filenames, num_files_per_band):
+ self._assert_bands_in_filenames(band_prefixes, filenames, num_files_per_band)
+ self._assert_file_contents(filenames)
+
+ @staticmethod
+ def _assert_bands_in_filenames(band_prefixes, filenames, num_files_per_band):
+ for band_name in band_prefixes:
+ files_for_band = [x for x in filenames if band_name in x]
+ assert files_for_band
+ assert len(set(files_for_band)) == num_files_per_band
+
+ @staticmethod
+ def _assert_file_contents(filenames):
+ for fn in filenames:
+ with open(fn, "rb") as fake_hdf5_file:
+ assert fake_hdf5_file.read().decode("ascii") == os.path.basename(fn)
+
+
+@contextlib.contextmanager
+def mock_filesystem():
+ """Create a mock filesystem, patching `open` and `os.path.isfile`."""
+ class FakeFile:
+ """Fake file based on BytesIO."""
+
+ def __init__(self):
+ self.io = io.BytesIO()
+
+ def __enter__(self):
+ return self.io
+
+ def __exit__(self, *args, **kwargs):
+ self.io.seek(0)
+
+ fake_fs = defaultdict(FakeFile)
+ mo = mock.mock_open()
+
+ def fun(filename, *args, **kwargs):
+ return fake_fs[filename]
+
+ mo.side_effect = fun
+ with mock.patch("builtins.open", mo):
+ with mock.patch("os.path.isfile") as isfile:
+ isfile.side_effect = (lambda target: target in fake_fs)
+ yield
+
+
+def test_fs():
+ """Test the mock filesystem."""
+ with mock_filesystem():
+ with open("somefile", "w") as fd:
+ fd.write(b"bla")
+ with open("someotherfile", "w") as fd:
+ fd.write(b"bli")
+ with open("somefile", "r") as fd:
+ assert fd.read() == b"bla"
+ with open("someotherfile", "r") as fd:
+ assert fd.read() == b"bli"
+ assert os.path.isfile("somefile")
+ assert not os.path.isfile("missingfile")
+
+
+class TestSEVIRIHRITDemoDownload(unittest.TestCase):
+ """Test case for downloading an hrit tarball."""
+
+ def setUp(self):
+ """Set up the test case."""
+ from satpy.demo.seviri_hrit import generate_subset_of_filenames
+ self.subdir = os.path.join(".", "seviri_hrit", "20180228_1500")
+ self.files = generate_subset_of_filenames(base_dir=self.subdir)
+
+ self.patcher = mock.patch('satpy.demo.utils.requests.get', autospec=True)
+ self.get_mock = self.patcher.start()
+
+ _FakeRequest.requests_log = []
+
+ def tearDown(self):
+ """Tear down the test case."""
+ self.patcher.stop()
+
+ def test_download_gets_files_with_contents(self):
+ """Test downloading SEVIRI HRIT data with content."""
+ from satpy.demo import download_seviri_hrit_20180228_1500
+ self.get_mock.side_effect = _FakeRequest
+ with mock_filesystem():
+ files = download_seviri_hrit_20180228_1500()
+ assert len(files) == 114
+ assert set(files) == set(self.files)
+ for the_file in files:
+ with open(the_file, mode="r") as fd:
+ assert fd.read().decode("utf8") == os.path.basename(the_file)
+
+ def test_download_from_zenodo(self):
+ """Test downloading SEVIRI HRIT data from zenodo."""
+ from satpy.demo import download_seviri_hrit_20180228_1500
+ self.get_mock.side_effect = _FakeRequest
+ with mock_filesystem():
+ download_seviri_hrit_20180228_1500()
+ assert _FakeRequest.requests_log[0].startswith("https://zenodo.org")
+
+ def test_download_a_subset_of_files(self):
+ """Test downloading a subset of files."""
+ from satpy.demo import download_seviri_hrit_20180228_1500
+ with mock_filesystem():
+ files = download_seviri_hrit_20180228_1500(subset={"HRV": [1, 2, 3], "IR_108": [1, 2], "EPI": None})
+ assert set(files) == set(os.path.join(self.subdir, filename) for filename in [
+ 'H-000-MSG4__-MSG4________-_________-EPI______-201802281500-__',
+ 'H-000-MSG4__-MSG4________-HRV______-000001___-201802281500-__',
+ 'H-000-MSG4__-MSG4________-HRV______-000002___-201802281500-__',
+ 'H-000-MSG4__-MSG4________-HRV______-000003___-201802281500-__',
+ 'H-000-MSG4__-MSG4________-IR_108___-000001___-201802281500-__',
+ 'H-000-MSG4__-MSG4________-IR_108___-000002___-201802281500-__',
+ ])
+
+ def test_do_not_download_same_file_twice(self):
+ """Test that files are not downloaded twice."""
+ from satpy.demo import download_seviri_hrit_20180228_1500
+ get_mock = mock.MagicMock()
+ self.get_mock.return_value.__enter__ = get_mock
+ with mock_filesystem():
+ files = download_seviri_hrit_20180228_1500(subset={"HRV": [1, 2, 3], "IR_108": [1, 2], "EPI": None})
+ new_files = download_seviri_hrit_20180228_1500(subset={"HRV": [1, 2, 3], "IR_108": [1, 2], "EPI": None})
+ assert set(files) == set(new_files)
+ assert get_mock.call_count == 6
+
+ def test_download_to_output_directory(self):
+ """Test downloading to an output directory."""
+ from tempfile import gettempdir
+
+ from satpy.demo import download_seviri_hrit_20180228_1500
+ with mock_filesystem():
+ base_dir = gettempdir()
+ files = download_seviri_hrit_20180228_1500(base_dir=base_dir)
+ assert files[0].startswith(base_dir)
diff --git a/satpy/tests/test_dependency_tree.py b/satpy/tests/test_dependency_tree.py
index 4458deaee7..a544b6e694 100644
--- a/satpy/tests/test_dependency_tree.py
+++ b/satpy/tests/test_dependency_tree.py
@@ -121,13 +121,12 @@ class TestMultipleResolutionSameChannelDependency(unittest.TestCase):
def test_modis_overview_1000m(self):
"""Test a modis overview dependency calculation with resolution fixed to 1000m."""
- from satpy._config import PACKAGE_CONFIG_PATH
- from satpy.readers.yaml_reader import FileYAMLReader
-
from satpy import DataQuery
+ from satpy._config import PACKAGE_CONFIG_PATH
from satpy.composites import GenericCompositor
- from satpy.modifiers.geometry import SunZenithCorrector
from satpy.dataset import DatasetDict
+ from satpy.modifiers.geometry import SunZenithCorrector
+ from satpy.readers.yaml_reader import FileYAMLReader
config_file = os.path.join(PACKAGE_CONFIG_PATH, 'readers', 'modis_l1b.yaml')
self.reader_instance = FileYAMLReader.from_config_files(config_file)
@@ -173,8 +172,8 @@ class TestMultipleSensors(unittest.TestCase):
def setUp(self):
"""Set up the test tree."""
from satpy.composites import CompositeBase
- from satpy.modifiers import ModifierBase
from satpy.dataset.data_dict import DatasetDict
+ from satpy.modifiers import ModifierBase
class _FakeCompositor(CompositeBase):
def __init__(self, ret_val, *args, **kwargs):
diff --git a/satpy/tests/test_file_handlers.py b/satpy/tests/test_file_handlers.py
index a1371b0495..46f4a16784 100644
--- a/satpy/tests/test_file_handlers.py
+++ b/satpy/tests/test_file_handlers.py
@@ -18,11 +18,28 @@
"""test file handler baseclass."""
import unittest
+from datetime import datetime, timedelta
from unittest import mock
import numpy as np
+import pytest
-from satpy.readers.file_handlers import BaseFileHandler
+from satpy.readers.file_handlers import BaseFileHandler, open_dataset
+from satpy.tests.utils import FakeFileHandler
+
+
+def test_open_dataset():
+ """Test xr.open_dataset wrapper."""
+ fn = mock.MagicMock()
+ str_file_path = "path/to/file.nc"
+ with mock.patch('xarray.open_dataset') as xr_open:
+ _ = open_dataset(fn, decode_cf=True, chunks=500)
+ fn.open.assert_called_once_with()
+ xr_open.assert_called_once_with(fn.open(), decode_cf=True, chunks=500)
+
+ xr_open.reset_mock()
+ _ = open_dataset(str_file_path, decode_cf=True, chunks=500)
+ xr_open.assert_called_once_with(str_file_path, decode_cf=True, chunks=500)
class TestBaseFileHandler(unittest.TestCase):
@@ -30,8 +47,6 @@ class TestBaseFileHandler(unittest.TestCase):
def setUp(self):
"""Set up the test."""
- self._old_set = BaseFileHandler.__abstractmethods__
- BaseFileHandler._abstractmethods__ = set()
self.fh = BaseFileHandler(
'filename', {'filename_info': 'bla'}, 'filetype_info')
@@ -141,6 +156,28 @@ def test_combine_orbital_parameters(self):
# Empty
self.fh.combine_info([{}])
+ def test_combine_time_parameters(self):
+ """Combine times in 'time_parameters."""
+ time_params1 = {
+ 'nominal_start_time': datetime(2020, 1, 1, 12, 0, 0),
+ 'nominal_end_time': datetime(2020, 1, 1, 12, 2, 30),
+ 'observation_start_time': datetime(2020, 1, 1, 12, 0, 2, 23821),
+ 'observation_end_time': datetime(2020, 1, 1, 12, 2, 23, 12348),
+ }
+ time_params2 = {}
+ time_shift = timedelta(seconds=1.5)
+ for key, value in time_params1.items():
+ time_params2[key] = value + time_shift
+ res = self.fh.combine_info([
+ {'time_parameters': time_params1},
+ {'time_parameters': time_params2}
+ ])
+ res_time_params = res['time_parameters']
+ assert res_time_params['nominal_start_time'] == datetime(2020, 1, 1, 12, 0, 0)
+ assert res_time_params['nominal_end_time'] == datetime(2020, 1, 1, 12, 2, 31, 500000)
+ assert res_time_params['observation_start_time'] == datetime(2020, 1, 1, 12, 0, 2, 23821)
+ assert res_time_params['observation_end_time'] == datetime(2020, 1, 1, 12, 2, 24, 512348)
+
def test_file_is_kept_intact(self):
"""Test that the file object passed (string, path, or other) is kept intact."""
open_file = mock.MagicMock()
@@ -152,6 +189,19 @@ def test_file_is_kept_intact(self):
bfh = BaseFileHandler(filename, {'filename_info': 'bla'}, 'filetype_info')
assert isinstance(bfh.filename, Path)
- def tearDown(self):
- """Tear down the test."""
- BaseFileHandler.__abstractmethods__ = self._old_set
+
+@pytest.mark.parametrize(
+ ("file_type", "ds_file_type", "exp_result"),
+ [
+ ("fake1", "fake1", True),
+ ("fake1", ["fake1"], True),
+ ("fake1", ["fake1", "fake2"], True),
+ ("fake1", ["fake2"], None),
+ ("fake1", "fake2", None),
+ ("fake1", "fake1_with_suffix", None),
+ ]
+)
+def test_file_type_match(file_type, ds_file_type, exp_result):
+ """Test that file type matching uses exactly equality."""
+ fh = FakeFileHandler("some_file.txt", {}, {"file_type": file_type})
+ assert fh.file_type_matches(ds_file_type) is exp_result
diff --git a/satpy/tests/test_modifiers.py b/satpy/tests/test_modifiers.py
index dc273449d5..d3e0a3c5fe 100644
--- a/satpy/tests/test_modifiers.py
+++ b/satpy/tests/test_modifiers.py
@@ -16,92 +16,144 @@
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
"""Tests for modifiers in modifiers/__init__.py."""
-
import unittest
-from unittest import mock
from datetime import datetime
+from unittest import mock
import dask.array as da
import numpy as np
import pytest
import xarray as xr
-
-
-class TestSunZenithCorrector(unittest.TestCase):
+from pyresample.geometry import AreaDefinition, StackedAreaDefinition
+from pytest_lazyfixture import lazy_fixture
+
+
+def _sunz_area_def():
+ """Get fake area for testing sunz generation."""
+ area = AreaDefinition('test', 'test', 'test',
+ {'proj': 'merc'}, 2, 2,
+ (-2000, -2000, 2000, 2000))
+ return area
+
+
+def _sunz_bigger_area_def():
+ """Get area that is twice the size of 'sunz_area_def'."""
+ bigger_area = AreaDefinition('test', 'test', 'test',
+ {'proj': 'merc'}, 4, 4,
+ (-2000, -2000, 2000, 2000))
+ return bigger_area
+
+
+def _sunz_stacked_area_def():
+ """Get fake stacked area for testing sunz generation."""
+ area1 = AreaDefinition('test', 'test', 'test',
+ {'proj': 'merc'}, 2, 1,
+ (-2000, 0, 2000, 2000))
+ area2 = AreaDefinition('test', 'test', 'test',
+ {'proj': 'merc'}, 2, 1,
+ (-2000, -2000, 2000, 0))
+ return StackedAreaDefinition(area1, area2)
+
+
+def _shared_sunz_attrs(area_def):
+ attrs = {'area': area_def,
+ 'start_time': datetime(2018, 1, 1, 18),
+ 'modifiers': tuple(),
+ 'name': 'test_vis'}
+ return attrs
+
+
+def _get_ds1(attrs):
+ ds1 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64),
+ attrs=attrs, dims=('y', 'x'),
+ coords={'y': [0, 1], 'x': [0, 1]})
+ return ds1
+
+
+@pytest.fixture(scope="session")
+def sunz_ds1():
+ """Generate fake dataset for sunz tests."""
+ attrs = _shared_sunz_attrs(_sunz_area_def())
+ return _get_ds1(attrs)
+
+
+@pytest.fixture(scope="session")
+def sunz_ds1_stacked():
+ """Generate fake dataset for sunz tests."""
+ attrs = _shared_sunz_attrs(_sunz_stacked_area_def())
+ return _get_ds1(attrs)
+
+
+@pytest.fixture(scope="session")
+def sunz_ds2():
+ """Generate larger fake dataset for sunz tests."""
+ attrs = _shared_sunz_attrs(_sunz_bigger_area_def())
+ ds2 = xr.DataArray(da.ones((4, 4), chunks=2, dtype=np.float64),
+ attrs=attrs, dims=('y', 'x'),
+ coords={'y': [0, 0.5, 1, 1.5], 'x': [0, 0.5, 1, 1.5]})
+ return ds2
+
+
+@pytest.fixture(scope="session")
+def sunz_sza():
+ """Generate fake solar zenith angle data array for testing."""
+ sza = xr.DataArray(
+ np.rad2deg(np.arccos(da.from_array([[0.0149581333, 0.0146694376], [0.0150812684, 0.0147925727]],
+ chunks=2))),
+ attrs={'area': _sunz_area_def()},
+ dims=('y', 'x'),
+ coords={'y': [0, 1], 'x': [0, 1]},
+ )
+ return sza
+
+
+class TestSunZenithCorrector:
"""Test case for the zenith corrector."""
- def setUp(self):
- """Create test data."""
- from pyresample.geometry import AreaDefinition
- area = AreaDefinition('test', 'test', 'test',
- {'proj': 'merc'}, 2, 2,
- (-2000, -2000, 2000, 2000))
- bigger_area = AreaDefinition('test', 'test', 'test',
- {'proj': 'merc'}, 4, 4,
- (-2000, -2000, 2000, 2000))
- attrs = {'area': area,
- 'start_time': datetime(2018, 1, 1, 18),
- 'modifiers': tuple(),
- 'name': 'test_vis'}
- ds1 = xr.DataArray(da.ones((2, 2), chunks=2, dtype=np.float64),
- attrs=attrs, dims=('y', 'x'),
- coords={'y': [0, 1], 'x': [0, 1]})
- self.ds1 = ds1
- ds2 = xr.DataArray(da.ones((4, 4), chunks=2, dtype=np.float64),
- attrs=attrs, dims=('y', 'x'),
- coords={'y': [0, 0.5, 1, 1.5], 'x': [0, 0.5, 1, 1.5]})
- ds2.attrs['area'] = bigger_area
- self.ds2 = ds2
- self.sza = xr.DataArray(
- np.rad2deg(np.arccos(da.from_array([[0.0149581333, 0.0146694376], [0.0150812684, 0.0147925727]],
- chunks=2))),
- attrs={'area': area},
- dims=('y', 'x'),
- coords={'y': [0, 1], 'x': [0, 1]},
- )
-
- def test_basic_default_not_provided(self):
+ def test_basic_default_not_provided(self, sunz_ds1):
"""Test default limits when SZA isn't provided."""
from satpy.modifiers.geometry import SunZenithCorrector
comp = SunZenithCorrector(name='sza_test', modifiers=tuple())
- res = comp((self.ds1,), test_attr='test')
+ res = comp((sunz_ds1,), test_attr='test')
np.testing.assert_allclose(res.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]]))
- self.assertIn('y', res.coords)
- self.assertIn('x', res.coords)
- ds1 = self.ds1.copy().drop_vars(('y', 'x'))
+ assert 'y' in res.coords
+ assert 'x' in res.coords
+ ds1 = sunz_ds1.copy().drop_vars(('y', 'x'))
res = comp((ds1,), test_attr='test')
np.testing.assert_allclose(res.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]]))
- self.assertNotIn('y', res.coords)
- self.assertNotIn('x', res.coords)
+ assert 'y' not in res.coords
+ assert 'x' not in res.coords
- def test_basic_lims_not_provided(self):
+ def test_basic_lims_not_provided(self, sunz_ds1):
"""Test custom limits when SZA isn't provided."""
from satpy.modifiers.geometry import SunZenithCorrector
comp = SunZenithCorrector(name='sza_test', modifiers=tuple(), correction_limit=90)
- res = comp((self.ds1,), test_attr='test')
+ res = comp((sunz_ds1,), test_attr='test')
np.testing.assert_allclose(res.values, np.array([[66.853262, 68.168939], [66.30742, 67.601493]]))
- def test_basic_default_provided(self):
+ @pytest.mark.parametrize("data_arr", [lazy_fixture("sunz_ds1"), lazy_fixture("sunz_ds1_stacked")])
+ def test_basic_default_provided(self, data_arr, sunz_sza):
"""Test default limits when SZA is provided."""
from satpy.modifiers.geometry import SunZenithCorrector
comp = SunZenithCorrector(name='sza_test', modifiers=tuple())
- res = comp((self.ds1, self.sza), test_attr='test')
+ res = comp((data_arr, sunz_sza), test_attr='test')
np.testing.assert_allclose(res.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]]))
- def test_basic_lims_provided(self):
+ @pytest.mark.parametrize("data_arr", [lazy_fixture("sunz_ds1"), lazy_fixture("sunz_ds1_stacked")])
+ def test_basic_lims_provided(self, data_arr, sunz_sza):
"""Test custom limits when SZA is provided."""
from satpy.modifiers.geometry import SunZenithCorrector
comp = SunZenithCorrector(name='sza_test', modifiers=tuple(), correction_limit=90)
- res = comp((self.ds1, self.sza), test_attr='test')
+ res = comp((data_arr, sunz_sza), test_attr='test')
np.testing.assert_allclose(res.values, np.array([[66.853262, 68.168939], [66.30742, 67.601493]]))
- def test_imcompatible_areas(self):
+ def test_imcompatible_areas(self, sunz_ds2, sunz_sza):
"""Test sunz correction on incompatible areas."""
from satpy.composites import IncompatibleAreas
from satpy.modifiers.geometry import SunZenithCorrector
comp = SunZenithCorrector(name='sza_test', modifiers=tuple(), correction_limit=90)
with pytest.raises(IncompatibleAreas):
- comp((self.ds2, self.sza), test_attr='test')
+ comp((sunz_ds2, sunz_sza), test_attr='test')
class TestNIRReflectance(unittest.TestCase):
@@ -181,7 +233,9 @@ def test_no_sunz_no_co2(self, calculator, apply_modifier_info, sza):
info = {'modifiers': None}
res = comp([self.nir, self.ir_], optional_datasets=[], **info)
- self.get_lonlats.assert_called()
+ # due to copying of DataArrays, self.get_lonlats is not the same as the one that was called
+ # we must used the area from the final result DataArray
+ res.attrs["area"].get_lonlats.assert_called()
sza.assert_called_with(self.start_time, self.lons, self.lats)
self.refl_from_tbs.assert_called_with(self.da_sunz, self.nir.data, self.ir_.data, tb_ir_co2=None)
assert np.allclose(res.data, self.refl * 100).compute()
@@ -341,94 +395,36 @@ def test_compositor(self, calculator, apply_modifier_info, sza):
class TestPSPAtmosphericalCorrection(unittest.TestCase):
"""Test the pyspectral-based atmospheric correction modifier."""
- def setUp(self):
- """Patch in-class imports."""
- self.orbital = mock.MagicMock()
- modules = {
- 'pyspectral.atm_correction_ir': mock.MagicMock(),
- 'pyorbital.orbital': self.orbital,
- }
- self.module_patcher = mock.patch.dict('sys.modules', modules)
- self.module_patcher.start()
-
- def tearDown(self):
- """Unpatch in-class imports."""
- self.module_patcher.stop()
-
- @mock.patch('satpy.modifiers.PSPAtmosphericalCorrection.apply_modifier_info')
- @mock.patch('satpy.modifiers.atmosphere.get_satpos')
- def test_call(self, get_satpos, *mocks):
+ def test_call(self):
"""Test atmospherical correction."""
- from satpy.modifiers import PSPAtmosphericalCorrection
-
- # Patch methods
- get_satpos.return_value = 'sat_lon', 'sat_lat', 12345678
- self.orbital.get_observer_look.return_value = 0, 0
- area = mock.MagicMock()
- area.get_lonlats.return_value = 'lons', 'lats'
- band = mock.MagicMock(attrs={'area': area,
- 'start_time': 'start_time',
- 'name': 'name',
- 'platform_name': 'platform',
- 'sensor': 'sensor'}, dims=['y'])
-
- # Perform atmospherical correction
- psp = PSPAtmosphericalCorrection(name='dummy')
- psp(projectables=[band])
-
- # Check arguments of get_orbserver_look() call, especially the altitude
- # unit conversion from meters to kilometers
- self.orbital.get_observer_look.assert_called_with(
- 'sat_lon', 'sat_lat', 12345.678, 'start_time', 'lons', 'lats', 0)
-
-
-class TestPSPRayleighReflectance(unittest.TestCase):
- """Test the pyspectral-based rayleigh correction modifier."""
-
- def setUp(self):
- """Patch in-class imports."""
- self.astronomy = mock.MagicMock()
- self.orbital = mock.MagicMock()
- modules = {
- 'pyorbital.astronomy': self.astronomy,
- 'pyorbital.orbital': self.orbital,
- }
- self.module_patcher = mock.patch.dict('sys.modules', modules)
- self.module_patcher.start()
-
- def tearDown(self):
- """Unpatch in-class imports."""
- self.module_patcher.stop()
+ from pyresample.geometry import SwathDefinition
- @mock.patch('satpy.modifiers.atmosphere.get_satpos')
- def test_get_angles(self, get_satpos):
- """Test sun and satellite angle calculation."""
- from satpy.modifiers import PSPRayleighReflectance
+ from satpy.modifiers import PSPAtmosphericalCorrection
# Patch methods
- get_satpos.return_value = 'sat_lon', 'sat_lat', 12345678
- self.orbital.get_observer_look.return_value = 0, 0
- self.astronomy.get_alt_az.return_value = 0, 0
- area = mock.MagicMock()
lons = np.zeros((5, 5))
lons[1, 1] = np.inf
lons = da.from_array(lons, chunks=5)
lats = np.zeros((5, 5))
lats[1, 1] = np.inf
lats = da.from_array(lats, chunks=5)
- area.get_lonlats.return_value = (lons, lats)
- vis = mock.MagicMock(attrs={'area': area,
- 'start_time': 'start_time'})
-
- # Compute angles
- psp = PSPRayleighReflectance(name='dummy')
- psp.get_angles(vis)
-
- # Check arguments of get_orbserver_look() call, especially the altitude
- # unit conversion from meters to kilometers
- self.orbital.get_observer_look.assert_called_once()
- args = self.orbital.get_observer_look.call_args[0]
- self.assertEqual(args[:4], ('sat_lon', 'sat_lat', 12345.678, 'start_time'))
- self.assertIsInstance(args[4], da.Array)
- self.assertIsInstance(args[5], da.Array)
- self.assertEqual(args[6], 0)
+ area = SwathDefinition(lons, lats)
+ stime = datetime(2020, 1, 1, 12, 0, 0)
+ orb_params = {
+ "satellite_actual_altitude": 12345678,
+ "nadir_longitude": 0.0,
+ "nadir_latitude": 0.0,
+ }
+ band = xr.DataArray(da.zeros((5, 5)),
+ attrs={'area': area,
+ 'start_time': stime,
+ 'name': 'name',
+ 'platform_name': 'platform',
+ 'sensor': 'sensor',
+ 'orbital_parameters': orb_params},
+ dims=('y', 'x'))
+
+ # Perform atmospherical correction
+ psp = PSPAtmosphericalCorrection(name='dummy')
+ res = psp(projectables=[band])
+ res.compute()
diff --git a/satpy/tests/test_multiscene.py b/satpy/tests/test_multiscene.py
index a4d167db7a..1d14d43298 100644
--- a/satpy/tests/test_multiscene.py
+++ b/satpy/tests/test_multiscene.py
@@ -24,6 +24,10 @@
from datetime import datetime
from unittest import mock
+import pytest
+import xarray as xr
+
+from satpy import DataQuery
from satpy.dataset.dataid import DataID, ModifierTuple, WavelengthRange
DEFAULT_SHAPE = (5, 10)
@@ -86,9 +90,9 @@ def _create_test_area(proj_str=None, shape=DEFAULT_SHAPE, extents=None):
def _create_test_dataset(name, shape=DEFAULT_SHAPE, area=None):
"""Create a test DataArray object."""
- import xarray as xr
import dask.array as da
import numpy as np
+ import xarray as xr
return xr.DataArray(
da.zeros(shape, dtype=np.float32, chunks=shape), dims=('y', 'x'),
@@ -186,12 +190,13 @@ def test_from_files(self):
scn_mock.assert_has_calls(calls)
scn_mock.reset_mock()
- mscn = MultiScene.from_files(
- input_files_abi + input_files_glm,
- reader=('abi_l1b', "glm_l2"),
- group_keys=["start_time"],
- ensure_all_readers=True,
- time_threshold=30)
+ with pytest.warns(DeprecationWarning):
+ mscn = MultiScene.from_files(
+ input_files_abi + input_files_glm,
+ reader=('abi_l1b', "glm_l2"),
+ group_keys=["start_time"],
+ ensure_all_readers=True,
+ time_threshold=30)
assert len(mscn.scenes) == 2
calls = [mock.call(
filenames={'abi_l1b': [in_file_abi], 'glm_l2': [in_file_glm]})
@@ -208,79 +213,80 @@ def test_from_files(self):
time_threshold=30)
assert len(mscn.scenes) == 12
- def test_group(self):
- """Test group."""
- from satpy import Scene, MultiScene
-
- ds1 = _create_test_dataset(name='ds1')
- ds2 = _create_test_dataset(name='ds2')
- ds3 = _create_test_dataset(name='ds3')
- ds4 = _create_test_dataset(name='ds4')
- scene1 = Scene()
- scene1['ds1'] = ds1
- scene1['ds2'] = ds2
- scene2 = Scene()
- scene2['ds3'] = ds3
- scene2['ds4'] = ds4
-
- multi_scene = MultiScene([scene1, scene2])
- groups = {make_dataid(name='odd', wavelength=(1, 2, 3)): ['ds1', 'ds3'],
- make_dataid(name='even', wavelength=(2, 3, 4)): ['ds2', 'ds4']}
- multi_scene.group(groups)
- self.assertSetEqual(multi_scene.shared_dataset_ids, set(groups.keys()))
+class TestMultiSceneGrouping:
+ """Test dataset grouping in MultiScene."""
- def test_add_group_aliases(self):
- """Test adding group aliases."""
- import xarray as xr
- import numpy as np
- import types
-
- from satpy.multiscene import add_group_aliases
+ @pytest.fixture
+ def scene1(self):
+ """Create first test scene."""
from satpy import Scene
+ scene = Scene()
+ dsid1 = make_dataid(
+ name="ds1",
+ resolution=123,
+ wavelength=(1, 2, 3),
+ polarization="H"
+ )
+ scene[dsid1] = _create_test_dataset(name='ds1')
+ dsid2 = make_dataid(
+ name="ds2",
+ resolution=456,
+ wavelength=(4, 5, 6),
+ polarization="V"
+ )
+ scene[dsid2] = _create_test_dataset(name='ds2')
+ return scene
- # Define test scenes
- ds_id1 = make_dataid(name='ds1', wavelength=(10.7, 10.8, 10.9))
- ds_id2 = make_dataid(name='ds2', wavelength=(1.9, 2.0, 2.1))
- ds_id3 = make_dataid(name='ds3', wavelength=(10.8, 10.9, 11.0))
- ds_id31 = make_dataid(name='ds31', polarization='H')
+ @pytest.fixture
+ def scene2(self):
+ """Create second test scene."""
+ from satpy import Scene
+ scene = Scene()
+ dsid1 = make_dataid(
+ name="ds3",
+ resolution=123.1,
+ wavelength=(1.1, 2.1, 3.1),
+ polarization="H"
+ )
+ scene[dsid1] = _create_test_dataset(name='ds3')
+ dsid2 = make_dataid(
+ name="ds4",
+ resolution=456.1,
+ wavelength=(4.1, 5.1, 6.1),
+ polarization="V"
+ )
+ scene[dsid2] = _create_test_dataset(name='ds4')
+ return scene
- scene1 = Scene()
- scene1[ds_id1] = xr.DataArray([1])
- scene2 = Scene()
- scene2[ds_id2] = xr.DataArray([2])
- scene3 = Scene()
- scene3[ds_id3] = xr.DataArray([3])
- scene3[ds_id31] = xr.DataArray([4])
- scenes = [scene1, scene2, scene3]
-
- # Define groups
- g1 = make_dataid(name='g1', wavelength=(10, 11, 12))
- g2 = make_dataid(name='g2', wavelength=(1, 2, 3), polarization='V')
- groups = {g1: ['ds1', 'ds3'], g2: ['ds2']}
-
- # Test adding aliases
- with_aliases = add_group_aliases(iter(scenes), groups)
- self.assertIsInstance(with_aliases, types.GeneratorType)
- with_aliases = list(with_aliases)
- self.assertSetEqual(set(with_aliases[0].keys()), {g1, ds_id1})
- self.assertSetEqual(set(with_aliases[1].keys()), {g2, ds_id2})
- self.assertSetEqual(set(with_aliases[2].keys()), {g1, ds_id3, ds_id31})
-
- np.testing.assert_array_equal(with_aliases[0]['g1'].values, [1])
- np.testing.assert_array_equal(with_aliases[0]['ds1'].values, [1])
- np.testing.assert_array_equal(with_aliases[1]['g2'].values, [2])
- np.testing.assert_array_equal(with_aliases[1]['ds2'].values, [2])
- np.testing.assert_array_equal(with_aliases[2]['g1'].values, [3])
- np.testing.assert_array_equal(with_aliases[2]['ds3'].values, [3])
- np.testing.assert_array_equal(with_aliases[2]['ds31'].values, [4])
-
- # Make sure that modifying the result doesn't modify the original
- self.assertNotIn(g1, scene1)
-
- # Adding an alias for multiple datasets in one scene should fail
- gen = add_group_aliases([scene3], {g1: ['ds3', 'ds31']})
- self.assertRaises(ValueError, list, gen)
+ @pytest.fixture
+ def multi_scene(self, scene1, scene2):
+ """Create small multi scene for testing."""
+ from satpy import MultiScene
+ return MultiScene([scene1, scene2])
+
+ @pytest.fixture
+ def groups(self):
+ """Get group definitions for the MultiScene."""
+ return {
+ DataQuery(name='odd'): ['ds1', 'ds3'],
+ DataQuery(name='even'): ['ds2', 'ds4']
+ }
+
+ def test_multi_scene_grouping(self, multi_scene, groups, scene1):
+ """Test grouping a MultiScene."""
+ multi_scene.group(groups)
+ shared_ids_exp = {make_dataid(name="odd"), make_dataid(name="even")}
+ assert multi_scene.shared_dataset_ids == shared_ids_exp
+ assert DataQuery(name='odd') not in scene1
+ xr.testing.assert_allclose(multi_scene.scenes[0]["ds1"], scene1["ds1"])
+
+ def test_fails_to_add_multiple_datasets_from_the_same_scene_to_a_group(self, multi_scene):
+ """Test that multiple datasets from the same scene in one group fails."""
+ groups = {DataQuery(name='mygroup'): ['ds1', 'ds2']}
+ multi_scene.group(groups)
+ with pytest.raises(ValueError):
+ next(multi_scene.scenes)
class TestMultiSceneSave(unittest.TestCase):
@@ -433,8 +439,9 @@ def test_save_datasets_simple(self):
@mock.patch('satpy.multiscene.get_enhanced_image', _fake_get_enhanced_image)
def test_save_datasets_distributed_delayed(self):
"""Test distributed save for writers returning delayed obejcts e.g. simple_image."""
- from satpy import MultiScene
from dask.delayed import Delayed
+
+ from satpy import MultiScene
area = _create_test_area()
scenes = _create_test_scenes(area=area)
@@ -467,8 +474,9 @@ def test_save_datasets_distributed_delayed(self):
@mock.patch('satpy.multiscene.get_enhanced_image', _fake_get_enhanced_image)
def test_save_datasets_distributed_source_target(self):
"""Test distributed save for writers returning sources and targets e.g. geotiff writer."""
- from satpy import MultiScene
import dask.array as da
+
+ from satpy import MultiScene
area = _create_test_area()
scenes = _create_test_scenes(area=area)
@@ -499,10 +507,11 @@ def test_save_datasets_distributed_source_target(self):
def test_crop(self):
"""Test the crop method."""
- from satpy import Scene, MultiScene
- from xarray import DataArray
- from pyresample.geometry import AreaDefinition
import numpy as np
+ from pyresample.geometry import AreaDefinition
+ from xarray import DataArray
+
+ from satpy import MultiScene, Scene
scene1 = Scene()
area_extent = (-5570248.477339745, -5561247.267842293, 5567248.074173927,
5570248.477339745)
@@ -548,9 +557,10 @@ class TestBlendFuncs(unittest.TestCase):
def setUp(self):
"""Set up test data."""
- import xarray as xr
- import dask.array as da
from datetime import datetime
+
+ import dask.array as da
+ import xarray as xr
from pyresample.geometry import AreaDefinition
area = AreaDefinition('test', 'test', 'test',
{'proj': 'geos', 'lon_0': -95.5, 'h': 35786023.0},
@@ -576,8 +586,9 @@ def test_stack(self):
def test_timeseries(self):
"""Test the 'timeseries' function."""
- from satpy.multiscene import timeseries
import xarray as xr
+
+ from satpy.multiscene import timeseries
res = timeseries([self.ds1, self.ds2])
res2 = timeseries([self.ds3, self.ds4])
self.assertIsInstance(res, xr.DataArray)
diff --git a/satpy/tests/test_node.py b/satpy/tests/test_node.py
index 2db124459a..8a41082266 100644
--- a/satpy/tests/test_node.py
+++ b/satpy/tests/test_node.py
@@ -19,6 +19,7 @@
import unittest
from unittest.mock import MagicMock
+
from satpy.node import CompositorNode
diff --git a/satpy/tests/test_readers.py b/satpy/tests/test_readers.py
index 6e829284a5..3f76e63596 100644
--- a/satpy/tests/test_readers.py
+++ b/satpy/tests/test_readers.py
@@ -17,17 +17,21 @@
# satpy. If not, see .
"""Test classes and functions in the readers/__init__.py module."""
+import builtins
import os
+import sys
import unittest
from contextlib import suppress
from unittest import mock
import pytest
+
from satpy.dataset.data_dict import get_key
-from satpy.dataset.dataid import WavelengthRange, ModifierTuple, DataID
+from satpy.dataset.dataid import DataID, ModifierTuple, WavelengthRange
# clear the config dir environment variable so it doesn't interfere
os.environ.pop("PPP_CONFIG_DIR", None)
+os.environ.pop("SATPY_CONFIG_PATH", None)
local_id_keys_config = {'name': {
'required': True,
@@ -53,6 +57,8 @@
},
}
+real_import = builtins.__import__
+
def make_dataid(**items):
"""Make a data id."""
@@ -224,6 +230,7 @@ def setUp(self):
"""Wrap HDF5 file handler with our own fake handler."""
from satpy.readers.viirs_sdr import VIIRSSDRFileHandler
from satpy.tests.reader_tests.test_viirs_sdr import FakeHDF5FileHandler2
+
# http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library
self.p = mock.patch.object(VIIRSSDRFileHandler, '__bases__', (FakeHDF5FileHandler2,))
self.fake_handler = self.p.start()
@@ -265,6 +272,7 @@ def test_bad_reader_name_with_filenames(self):
def test_filenames_as_path(self):
"""Test with filenames specified as pathlib.Path."""
from pathlib import Path
+
from satpy.readers import load_readers
ri = load_readers(filenames=[
Path('SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'),
@@ -356,8 +364,9 @@ def test_missing_requirements(self, *mocks):
def test_all_filtered(self):
"""Test behaviour if no file matches the filter parameters."""
- from satpy.readers import load_readers
import datetime
+
+ from satpy.readers import load_readers
filenames = {
'viirs_sdr': ['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'],
}
@@ -369,8 +378,9 @@ def test_all_filtered(self):
def test_all_filtered_multiple(self):
"""Test behaviour if no file matches the filter parameters."""
- from satpy.readers import load_readers
import datetime
+
+ from satpy.readers import load_readers
filenames = {
'viirs_sdr': ['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'],
'abi_l1b': ['OR_ABI-L1b-RadF-M3C01_G16_s20120561730408_e20120561741175_c20172631741218.nc'],
@@ -382,8 +392,9 @@ def test_all_filtered_multiple(self):
def test_almost_all_filtered(self):
"""Test behaviour if only one reader has datasets."""
- from satpy.readers import load_readers
import datetime
+
+ from satpy.readers import load_readers
filenames = {
'viirs_sdr': ['SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'],
'abi_l1b': ['OR_ABI-L1b-RadF-M3C01_G16_s20172631730408_e20172631741175_c20172631741218.nc'],
@@ -405,6 +416,7 @@ def setUp(self):
"""Wrap HDF5 file handler with our own fake handler."""
from satpy.readers.viirs_sdr import VIIRSSDRFileHandler
from satpy.tests.reader_tests.test_viirs_sdr import FakeHDF5FileHandler2
+
# http://stackoverflow.com/questions/12219967/how-to-mock-a-base-class-with-python-mock-library
self.p = mock.patch.object(VIIRSSDRFileHandler, '__bases__', (FakeHDF5FileHandler2,))
self.fake_handler = self.p.start()
@@ -454,8 +466,9 @@ def test_reader_other_name(self):
def test_reader_name_matched_start_end_time(self):
"""Test with start and end time matching the filename."""
- from satpy.readers import find_files_and_readers
from datetime import datetime
+
+ from satpy.readers import find_files_and_readers
fn = 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'
# touch the file so it exists on disk
test_file = open(fn, 'w')
@@ -475,8 +488,9 @@ def test_reader_name_matched_start_time(self):
Start time in the middle of the file time should still match the file.
"""
- from satpy.readers import find_files_and_readers
from datetime import datetime
+
+ from satpy.readers import find_files_and_readers
fn = 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'
# touch the file so it exists on disk
test_file = open(fn, 'w')
@@ -494,8 +508,9 @@ def test_reader_name_matched_end_time(self):
End time in the middle of the file time should still match the file.
"""
- from satpy.readers import find_files_and_readers
from datetime import datetime
+
+ from satpy.readers import find_files_and_readers
fn = 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'
# touch the file so it exists on disk
test_file = open(fn, 'w')
@@ -509,8 +524,9 @@ def test_reader_name_matched_end_time(self):
def test_reader_name_unmatched_start_end_time(self):
"""Test with start and end time matching the filename."""
- from satpy.readers import find_files_and_readers
from datetime import datetime
+
+ from satpy.readers import find_files_and_readers
fn = 'SVI01_npp_d20120225_t1801245_e1802487_b01708_c20120226002130255476_noaa_ops.h5'
# touch the file so it exists on disk
test_file = open(fn, 'w')
@@ -569,6 +585,7 @@ def test_sensor(self):
def test_sensor_no_files(self):
"""Test that readers for the current sensor are loaded."""
from satpy.readers import find_files_and_readers
+
# we can't easily know how many readers satpy has that support
# 'viirs' so we just pass it and hope that this works
self.assertRaises(ValueError, find_files_and_readers, sensor='viirs')
@@ -578,26 +595,45 @@ def test_sensor_no_files(self):
def test_reader_load_failed(self):
"""Test that an exception is raised when a reader can't be loaded."""
- from satpy.readers import find_files_and_readers
import yaml
+
+ from satpy.readers import find_files_and_readers
+
# touch the file so it exists on disk
with mock.patch('yaml.load') as load:
load.side_effect = yaml.YAMLError("Import problems")
self.assertRaises(yaml.YAMLError, find_files_and_readers, reader='viirs_sdr')
+ def test_pending_old_reader_name_mapping(self):
+ """Test that requesting pending old reader names raises a warning."""
+ from satpy.readers import PENDING_OLD_READER_NAMES, get_valid_reader_names
+ if not PENDING_OLD_READER_NAMES:
+ return unittest.skip("Skipping pending deprecated reader tests because "
+ "no pending deprecated readers.")
+ test_reader = sorted(PENDING_OLD_READER_NAMES.keys())[0]
+ with self.assertWarns(FutureWarning):
+ valid_reader_names = get_valid_reader_names([test_reader])
+ self.assertEqual(valid_reader_names[0], PENDING_OLD_READER_NAMES[test_reader])
+
def test_old_reader_name_mapping(self):
"""Test that requesting old reader names raises a warning."""
- from satpy.readers import configs_for_reader, OLD_READER_NAMES
+ from satpy.readers import OLD_READER_NAMES, get_valid_reader_names
if not OLD_READER_NAMES:
return unittest.skip("Skipping deprecated reader tests because "
"no deprecated readers.")
test_reader = sorted(OLD_READER_NAMES.keys())[0]
- self.assertRaises(ValueError, list, configs_for_reader(test_reader))
+ with self.assertRaises(ValueError):
+ get_valid_reader_names([test_reader])
class TestYAMLFiles(unittest.TestCase):
"""Test and analyze the reader configuration files."""
+ def setUp(self):
+ """Set up monkeypatch."""
+ from _pytest.monkeypatch import MonkeyPatch
+ self.monkeypatch = MonkeyPatch()
+
def test_filename_matches_reader_name(self):
"""Test that every reader filename matches the name in the YAML."""
import yaml
@@ -635,6 +671,29 @@ def test_available_readers(self):
self.assertIn('name', reader_info)
self.assertEqual(reader_infos, sorted(reader_infos, key=lambda reader_info: reader_info['name']))
+ def test_available_readers_base_loader(self):
+ """Test the 'available_readers' function for yaml loader type BaseLoader."""
+ import yaml
+
+ from satpy import available_readers
+ from satpy._config import glob_config
+
+ def patched_import_error(name, globals=None, locals=None, fromlist=(), level=0):
+ if name in ('netcdf4', ):
+ raise ImportError(f"Mocked import error {name}")
+ return real_import(name, globals=globals, locals=locals, fromlist=fromlist, level=level)
+
+ self.monkeypatch.delitem(sys.modules, 'netcdf4', raising=False)
+ self.monkeypatch.setattr(builtins, '__import__', patched_import_error)
+
+ with pytest.raises(ImportError):
+ import netcdf4 # noqa: F401
+
+ reader_names = available_readers(yaml_loader=yaml.BaseLoader)
+ self.assertIn('abi_l1b', reader_names) # needs netcdf4
+ self.assertIn('viirs_l1b', reader_names)
+ self.assertEqual(len(reader_names), len(list(glob_config('readers/*.yaml'))))
+
class TestGroupFiles(unittest.TestCase):
"""Test the 'group_files' utility function."""
@@ -689,6 +748,7 @@ def setUp(self):
def test_no_reader(self):
"""Test that reader does not need to be provided."""
from satpy.readers import group_files
+
# without files it's going to be an empty result
assert group_files([]) == []
groups = group_files(self.g16_files)
@@ -702,8 +762,10 @@ def test_unknown_files(self):
def test_bad_reader(self):
"""Test that reader not existing causes an error."""
- from satpy.readers import group_files
import yaml
+
+ from satpy.readers import group_files
+
# touch the file so it exists on disk
with mock.patch('yaml.load') as load:
load.side_effect = yaml.YAMLError("Import problems")
@@ -834,6 +896,86 @@ def test_multi_readers(self):
group_keys=("start_time"),
time_threshold=10**9)
+ _filenames_abi_glm = [
+ "OR_ABI-L1b-RadF-M6C14_G16_s19000010000000_e19000010005000_c20403662359590.nc",
+ "OR_ABI-L1b-RadF-M6C14_G16_s19000010010000_e19000010015000_c20403662359590.nc",
+ "OR_ABI-L1b-RadF-M6C14_G16_s19000010020000_e19000010025000_c20403662359590.nc",
+ "OR_GLM-L2-GLMF-M3_G16_s19000010000000_e19000010001000_c20403662359590.nc",
+ "OR_GLM-L2-GLMF-M3_G16_s19000010001000_e19000010002000_c20403662359590.nc",
+ "OR_GLM-L2-GLMF-M3_G16_s19000010002000_e19000010003000_c20403662359590.nc",
+ "OR_GLM-L2-GLMF-M3_G16_s19000010003000_e19000010004000_c20403662359590.nc",
+ "OR_GLM-L2-GLMF-M3_G16_s19000010004000_e19000010005000_c20403662359590.nc",
+ "OR_GLM-L2-GLMF-M3_G16_s19000010005000_e19000010006000_c20403662359590.nc",
+ "OR_GLM-L2-GLMF-M3_G16_s19000010006000_e19000010007000_c20403662359590.nc",
+ "OR_GLM-L2-GLMF-M3_G16_s19000010007000_e19000010008000_c20403662359590.nc",
+ "OR_GLM-L2-GLMF-M3_G16_s19000010008000_e19000010009000_c20403662359590.nc",
+ "OR_GLM-L2-GLMF-M3_G16_s19000010009000_e19000010010000_c20403662359590.nc",
+ "OR_GLM-L2-GLMF-M3_G16_s19000010010000_e19000010011000_c20403662359590.nc",
+ "OR_GLM-L2-GLMF-M3_G16_s19000010011000_e19000010012000_c20403662359590.nc",
+ "OR_GLM-L2-GLMF-M3_G16_s19000010012000_e19000010013000_c20403662359590.nc",
+ "OR_GLM-L2-GLMF-M3_G16_s19000010013000_e19000010014000_c20403662359590.nc",
+ "OR_GLM-L2-GLMF-M3_G16_s19000010014000_e19000010015000_c20403662359590.nc",
+ "OR_GLM-L2-GLMF-M3_G16_s19000010015000_e19000010016000_c20403662359590.nc"]
+
+ def test_multi_readers_empty_groups_raises_filenotfounderror(self):
+ """Test behaviour on empty groups passing multiple readers.
+
+ Make sure it raises an exception, for there will be groups
+ containing GLM but not ABI.
+ """
+ from satpy.readers import group_files
+ with pytest.raises(
+ FileNotFoundError, match="when grouping files, group at index 1 "
+ "had no files for readers: abi_l1b"):
+ group_files(
+ self._filenames_abi_glm,
+ reader=["abi_l1b", "glm_l2"],
+ group_keys=("start_time",),
+ time_threshold=35,
+ missing="raise")
+
+ def test_multi_readers_empty_groups_missing_skip(self):
+ """Verify empty groups are skipped.
+
+ Verify that all groups lacking ABI are skipped, resulting in only
+ three groups that are all non-empty for both instruments.
+ """
+ from satpy.readers import group_files
+ groups = group_files(
+ self._filenames_abi_glm,
+ reader=["abi_l1b", "glm_l2"],
+ group_keys=("start_time",),
+ time_threshold=35,
+ missing="skip")
+ assert len(groups) == 2
+ for g in groups:
+ assert g["abi_l1b"]
+ assert g["glm_l2"]
+
+ def test_multi_readers_empty_groups_passed(self):
+ """Verify that all groups are there, resulting in some that are empty."""
+ from satpy.readers import group_files
+ groups = group_files(
+ self._filenames_abi_glm,
+ reader=["abi_l1b", "glm_l2"],
+ group_keys=("start_time",),
+ time_threshold=35,
+ missing="pass")
+ assert len(groups) == 17
+ assert not groups[1]["abi_l1b"] # should be empty
+ assert groups[1]["glm_l2"] # should not be empty
+
+ def test_multi_readers_invalid_parameter(self):
+ """Verify that invalid missing parameter raises ValueError."""
+ from satpy.readers import group_files
+ with pytest.raises(ValueError):
+ group_files(
+ self._filenames_abi_glm,
+ reader=["abi_l1b", "glm_l2"],
+ group_keys=("start_time",),
+ time_threshold=35,
+ missing="hopkin green frog")
+
def _generate_random_string():
import uuid
@@ -857,10 +999,11 @@ class TestFSFile(unittest.TestCase):
def setUp(self):
"""Set up the instance."""
- import fsspec
- from pathlib import Path
import tempfile
import zipfile
+ from pathlib import Path
+
+ import fsspec
self.random_string = _generate_random_string()
self.local_filename = os.path.join(tempfile.gettempdir(), self.random_string)
Path(self.local_filename).touch()
@@ -898,8 +1041,9 @@ def test_fsfile_with_regular_filename_and_fs_spec_abides_pathlike(self):
def test_fsfile_with_pathlike(self):
"""Test FSFile with path-like object."""
- from satpy.readers import FSFile
from pathlib import Path
+
+ from satpy.readers import FSFile
f = FSFile(Path(self.local_filename))
assert str(f) == os.fspath(f) == self.local_filename
@@ -925,24 +1069,27 @@ def test_open_local_fs_file(self):
def test_open_zip_fs_regular_filename(self):
"""Test opening a zipfs with a regular filename provided."""
- from satpy.readers import FSFile
from fsspec.implementations.zip import ZipFileSystem
+
+ from satpy.readers import FSFile
zip_fs = ZipFileSystem(self.zip_name)
file = FSFile(_posixify_path(self.local_filename2), zip_fs)
_assert_is_open_file_and_close(file.open())
def test_open_zip_fs_openfile(self):
"""Test opening a zipfs openfile."""
- from satpy.readers import FSFile
import fsspec
+
+ from satpy.readers import FSFile
open_file = fsspec.open("zip:/" + _posixify_path(self.local_filename2) + "::file://" + self.zip_name)
file = FSFile(open_file)
_assert_is_open_file_and_close(file.open())
def test_sorting_fsfiles(self):
"""Test sorting FSFiles."""
- from satpy.readers import FSFile
from fsspec.implementations.zip import ZipFileSystem
+
+ from satpy.readers import FSFile
zip_fs = ZipFileSystem(self.zip_name)
file1 = FSFile(self.local_filename2, zip_fs)
@@ -955,8 +1102,9 @@ def test_sorting_fsfiles(self):
def test_equality(self):
"""Test that FSFile compares equal when it should."""
- from satpy.readers import FSFile
from fsspec.implementations.zip import ZipFileSystem
+
+ from satpy.readers import FSFile
zip_fs = ZipFileSystem(self.zip_name)
assert FSFile(self.local_filename) == FSFile(self.local_filename)
assert (FSFile(self.local_filename, zip_fs) ==
@@ -967,10 +1115,11 @@ def test_equality(self):
def test_hash(self):
"""Test that FSFile hashing behaves sanely."""
- from satpy.readers import FSFile
- from fsspec.implementations.zip import ZipFileSystem
- from fsspec.implementations.local import LocalFileSystem
from fsspec.implementations.cached import CachingFileSystem
+ from fsspec.implementations.local import LocalFileSystem
+ from fsspec.implementations.zip import ZipFileSystem
+
+ from satpy.readers import FSFile
lfs = LocalFileSystem()
zfs = ZipFileSystem(self.zip_name)
diff --git a/satpy/tests/test_regressions.py b/satpy/tests/test_regressions.py
index 52c6d0856a..f85d9c37be 100644
--- a/satpy/tests/test_regressions.py
+++ b/satpy/tests/test_regressions.py
@@ -23,8 +23,8 @@
import dask.array as da
import numpy as np
from xarray import DataArray, Dataset
-from satpy.tests.utils import make_dataid
+from satpy.tests.utils import make_dataid
abi_file_list = ['/data/OR_ABI-L1b-RadF-M3C01_G16_s20180722030423_e20180722041189_c20180722041235-118900_0.nc',
'/data/OR_ABI-L1b-RadF-M3C02_G16_s20180722030423_e20180722041190_c20180722041228-120000_0.nc',
@@ -201,8 +201,9 @@ def test_1088(fake_open_dataset):
@patch('xarray.open_dataset')
def test_no_enums(fake_open_dataset):
"""Check that no enums are inserted in the resulting attrs."""
- from satpy import Scene
from enum import Enum
+
+ from satpy import Scene
fake_open_dataset.side_effect = generate_fake_abi_xr_dataset
scene = Scene(abi_file_list, reader='abi_l1b')
diff --git a/satpy/tests/test_resample.py b/satpy/tests/test_resample.py
index c377d3fbb2..42301cedf8 100644
--- a/satpy/tests/test_resample.py
+++ b/satpy/tests/test_resample.py
@@ -16,10 +16,10 @@
# satpy. If not, see .
"""Unittests for resamplers."""
-import unittest
-import tempfile
-import shutil
import os
+import shutil
+import tempfile
+import unittest
from unittest import mock
try:
@@ -45,10 +45,10 @@ def get_test_data(input_shape=(100, 50), output_shape=(200, 100), output_proj=No
* target_area_def: AreaDefinition to be used as a target for resampling
"""
- from xarray import DataArray
import dask.array as da
from pyresample.geometry import AreaDefinition, SwathDefinition
from pyresample.utils import proj4_str_to_dict
+ from xarray import DataArray
ds1 = DataArray(da.zeros(input_shape, chunks=85),
dims=input_dims,
attrs={'name': 'test_data_name', 'test': 'test'})
@@ -107,11 +107,12 @@ class TestHLResample(unittest.TestCase):
def test_type_preserve(self):
"""Check that the type of resampled datasets is preserved."""
- from satpy.resample import resample_dataset
- import xarray as xr
import dask.array as da
import numpy as np
+ import xarray as xr
from pyresample.geometry import SwathDefinition
+
+ from satpy.resample import resample_dataset
source_area = SwathDefinition(xr.DataArray(da.arange(4, chunks=5).reshape((2, 2)), dims=['y', 'x']),
xr.DataArray(da.arange(4, chunks=5).reshape((2, 2)), dims=['y', 'x']))
dest_area = SwathDefinition(xr.DataArray(da.arange(4, chunks=5).reshape((2, 2)) + .0001, dims=['y', 'x']),
@@ -142,6 +143,7 @@ def test_kd_resampling(self, xr_resampler, create_filename, zarr_open,
xr_dset, cnc):
"""Test the kd resampler."""
import dask.array as da
+
from satpy.resample import KDTreeResampler
data, source_area, swath_data, source_swath, target_area = get_test_data()
mock_dset = mock.MagicMock()
@@ -263,6 +265,7 @@ def test_2d_ewa(self, get_lonlats, ll2cr, fornav):
"""Test EWA with a 2D dataset."""
import numpy as np
import xarray as xr
+
from satpy.resample import resample_dataset
ll2cr.return_value = (100,
np.zeros((10, 10), dtype=np.float32),
@@ -311,6 +314,7 @@ def test_3d_ewa(self, get_lonlats, ll2cr, fornav):
"""Test EWA with a 3D dataset."""
import numpy as np
import xarray as xr
+
from satpy.resample import resample_dataset
_, _, swath_data, source_swath, target_area = get_test_data(
input_shape=(3, 200, 100), input_dims=('bands', 'y', 'x'))
@@ -362,32 +366,34 @@ class TestNativeResampler(unittest.TestCase):
def test_expand_reduce(self):
"""Test class method 'expand_reduce' basics."""
- from satpy.resample import NativeResampler
- import numpy as np
import dask.array as da
+ import numpy as np
+
+ from satpy.resample import NativeResampler
d_arr = da.zeros((6, 20), chunks=4)
- new_data = NativeResampler.expand_reduce(d_arr, {0: 2., 1: 2.})
+ new_data = NativeResampler._expand_reduce(d_arr, {0: 2., 1: 2.})
self.assertEqual(new_data.shape, (12, 40))
- new_data = NativeResampler.expand_reduce(d_arr, {0: .5, 1: .5})
+ new_data = NativeResampler._expand_reduce(d_arr, {0: .5, 1: .5})
self.assertEqual(new_data.shape, (3, 10))
- self.assertRaises(ValueError, NativeResampler.expand_reduce,
+ self.assertRaises(ValueError, NativeResampler._expand_reduce,
d_arr, {0: 1. / 3, 1: 1.})
- new_data = NativeResampler.expand_reduce(d_arr, {0: 1., 1: 1.})
+ new_data = NativeResampler._expand_reduce(d_arr, {0: 1., 1: 1.})
self.assertEqual(new_data.shape, (6, 20))
self.assertIs(new_data, d_arr)
- self.assertRaises(ValueError, NativeResampler.expand_reduce,
+ self.assertRaises(ValueError, NativeResampler._expand_reduce,
d_arr, {0: 0.333323423, 1: 1.})
- self.assertRaises(ValueError, NativeResampler.expand_reduce,
+ self.assertRaises(ValueError, NativeResampler._expand_reduce,
d_arr, {0: 1.333323423, 1: 1.})
n_arr = np.zeros((6, 20))
- new_data = NativeResampler.expand_reduce(n_arr, {0: 2., 1: 1.0})
+ new_data = NativeResampler._expand_reduce(n_arr, {0: 2., 1: 1.0})
self.assertTrue(np.all(new_data.compute()[::2, :] == n_arr))
def test_expand_dims(self):
"""Test expanding native resampling with 2D data."""
- from satpy.resample import NativeResampler
import numpy as np
+
+ from satpy.resample import NativeResampler
ds1, source_area, _, _, target_area = get_test_data()
# source geo def doesn't actually matter
resampler = NativeResampler(source_area, target_area)
@@ -406,8 +412,9 @@ def test_expand_dims(self):
def test_expand_dims_3d(self):
"""Test expanding native resampling with 3D data."""
- from satpy.resample import NativeResampler
import numpy as np
+
+ from satpy.resample import NativeResampler
ds1, source_area, _, _, target_area = get_test_data(
input_shape=(3, 100, 50), input_dims=('bands', 'y', 'x'))
# source geo def doesn't actually matter
@@ -430,8 +437,9 @@ def test_expand_dims_3d(self):
def test_expand_without_dims(self):
"""Test expanding native resampling with no dimensions specified."""
- from satpy.resample import NativeResampler
import numpy as np
+
+ from satpy.resample import NativeResampler
ds1, source_area, _, _, target_area = get_test_data(input_dims=None)
# source geo def doesn't actually matter
resampler = NativeResampler(source_area, target_area)
@@ -465,6 +473,7 @@ def test_bil_resampling(self, xr_resampler, create_filename,
"""Test the bilinear resampler."""
import dask.array as da
import xarray as xr
+
from satpy.resample import BilinearResampler
data, source_area, swath_data, source_swath, target_area = get_test_data()
@@ -564,10 +573,11 @@ class TestCoordinateHelpers(unittest.TestCase):
def test_area_def_coordinates(self):
"""Test coordinates being added with an AreaDefinition."""
- import numpy as np
import dask.array as da
+ import numpy as np
import xarray as xr
from pyresample.geometry import AreaDefinition
+
from satpy.resample import add_crs_xy_coords
area_def = AreaDefinition(
'test', 'test', 'test', {'proj': 'lcc', 'lat_1': 25, 'lat_0': 25},
@@ -639,6 +649,7 @@ def test_swath_def_coordinates(self):
import dask.array as da
import xarray as xr
from pyresample.geometry import SwathDefinition
+
from satpy.resample import add_crs_xy_coords
lons_data = da.random.random((200, 100), chunks=50)
lats_data = da.random.random((200, 100), chunks=50)
@@ -713,6 +724,7 @@ def _compute_mocked_bucket_avg(self, data, return_data=None, **kwargs):
def test_compute(self):
"""Test bucket resampler computation."""
import dask.array as da
+
# 1D data
data = da.ones((5,))
res = self._compute_mocked_bucket_avg(data, fill_value=2)
@@ -784,8 +796,8 @@ def test_compute_and_not_use_skipna_handling(self):
@mock.patch('pyresample.bucket.BucketResampler')
def test_resample(self, pyresample_bucket):
"""Test bucket resamplers resample method."""
- import xarray as xr
import dask.array as da
+ import xarray as xr
self.bucket.resampler = mock.MagicMock()
self.bucket.precompute = mock.MagicMock()
self.bucket.compute = mock.MagicMock()
@@ -850,6 +862,7 @@ def _compute_mocked_bucket_sum(self, data, return_data=None, **kwargs):
def test_compute(self):
"""Test sum bucket resampler computation."""
import dask.array as da
+
# 1D data
data = da.ones((5,))
res = self._compute_mocked_bucket_sum(data)
@@ -937,6 +950,7 @@ def _compute_mocked_bucket_count(self, data, return_data=None, **kwargs):
def test_compute(self):
"""Test count bucket resampler computation."""
import dask.array as da
+
# 1D data
data = da.ones((5,))
res = self._compute_mocked_bucket_count(data)
@@ -996,9 +1010,9 @@ def test_compute(self):
@mock.patch('pyresample.bucket.BucketResampler')
def test_resample(self, pyresample_bucket):
"""Test fraction bucket resamplers resample method."""
- import xarray as xr
import dask.array as da
import numpy as np
+ import xarray as xr
self.bucket.resampler = mock.MagicMock()
self.bucket.precompute = mock.MagicMock()
diff --git a/satpy/tests/test_scene.py b/satpy/tests/test_scene.py
index ea74b1e6e8..178064782b 100644
--- a/satpy/tests/test_scene.py
+++ b/satpy/tests/test_scene.py
@@ -17,22 +17,30 @@
# satpy. If not, see .
"""Unit tests for scene.py."""
+import math
import os
+import random
+import string
import unittest
+from datetime import datetime
from unittest import mock
-import string
-import random
-
-import satpy
-from satpy import Scene
-from satpy.tests.utils import (default_id_keys_config, make_cid, make_dataid,
- make_dsq, spy_decorator,
- FAKE_FILEHANDLER_START, FAKE_FILEHANDLER_END)
+import dask.array as da
import numpy as np
-import xarray as xr
import pytest
+import xarray as xr
+import satpy
+from satpy import Scene
+from satpy.tests.utils import (
+ FAKE_FILEHANDLER_END,
+ FAKE_FILEHANDLER_START,
+ default_id_keys_config,
+ make_cid,
+ make_dataid,
+ make_dsq,
+ spy_decorator,
+)
TEST_ETC_DIR = os.path.join(os.path.dirname(__file__), 'etc')
@@ -91,34 +99,64 @@ def test_init_preserve_reader_kwargs(self):
assert scene.start_time == FAKE_FILEHANDLER_START
assert scene.end_time == FAKE_FILEHANDLER_END
+ @pytest.mark.parametrize(
+ ("reader", "filenames", "exp_sensors"),
+ [
+ ("fake1", ["fake1_1.txt"], {"fake_sensor"}),
+ (None, {"fake1": ["fake1_1.txt"], "fake2_1ds": ["fake2_1ds_1.txt"]}, {"fake_sensor", "fake_sensor2"}),
+ ]
+ )
+ def test_sensor_names_readers(self, reader, filenames, exp_sensors):
+ """Test that Scene sensor_names handles different cases properly."""
+ scene = Scene(reader=reader, filenames=filenames)
+ assert scene.start_time == FAKE_FILEHANDLER_START
+ assert scene.end_time == FAKE_FILEHANDLER_END
+ assert scene.sensor_names == exp_sensors
+
+ @pytest.mark.parametrize(
+ ("include_reader", "added_sensor", "exp_sensors"),
+ [
+ (False, "my_sensor", {"my_sensor"}),
+ (True, "my_sensor", {"my_sensor", "fake_sensor"}),
+ (False, {"my_sensor"}, {"my_sensor"}),
+ (True, {"my_sensor"}, {"my_sensor", "fake_sensor"}),
+ (False, {"my_sensor1", "my_sensor2"}, {"my_sensor1", "my_sensor2"}),
+ (True, {"my_sensor1", "my_sensor2"}, {"my_sensor1", "my_sensor2", "fake_sensor"}),
+ ]
+ )
+ def test_sensor_names_added_datasets(self, include_reader, added_sensor, exp_sensors):
+ """Test that Scene sensor_names handles contained sensors properly."""
+ if include_reader:
+ scene = Scene(reader="fake1", filenames=["fake1_1.txt"])
+ else:
+ scene = Scene()
+
+ scene["my_ds"] = xr.DataArray([], attrs={"sensor": added_sensor})
+ assert scene.sensor_names == exp_sensors
+
def test_init_alone(self):
"""Test simple initialization."""
- from satpy.scene import Scene
scn = Scene()
assert not scn._readers, 'Empty scene should not load any readers'
def test_init_no_files(self):
"""Test that providing an empty list of filenames fails."""
- from satpy.scene import Scene
pytest.raises(ValueError, Scene, reader='viirs_sdr', filenames=[])
def test_create_reader_instances_with_filenames(self):
"""Test creating a reader providing filenames."""
filenames = ["bla", "foo", "bar"]
reader_name = None
- with mock.patch('satpy.scene.Scene._compute_metadata_from_readers') as md:
- md.return_value = {'sensor': {'sensor'}}
- with mock.patch('satpy.scene.load_readers') as findermock:
- Scene(filenames=filenames)
- findermock.assert_called_once_with(
- filenames=filenames,
- reader=reader_name,
- reader_kwargs=None,
- )
+ with mock.patch('satpy.scene.load_readers') as findermock:
+ Scene(filenames=filenames)
+ findermock.assert_called_once_with(
+ filenames=filenames,
+ reader=reader_name,
+ reader_kwargs=None,
+ )
def test_init_with_empty_filenames(self):
"""Test initialization with empty filename list."""
- from satpy.scene import Scene
filenames = []
Scene(filenames=filenames)
@@ -131,7 +169,6 @@ def test_init_with_fsfile(self):
# TypeError within that method if passed an FSFile instance.
# Instead rely on the ValueError that satpy raises if no readers
# are found.
-
# Choose random filename that doesn't exist. Not using tempfile here,
# because tempfile creates files and we don't want that here.
fsf = FSFile("".join(random.choices(string.printable, k=50)))
@@ -172,7 +209,6 @@ def test_init_with_fsfile(self):
def test_create_reader_instances_with_reader(self):
"""Test createring a reader instance providing the reader name."""
- from satpy.scene import Scene
reader = "foo"
filenames = ["1", "2", "3"]
with mock.patch('satpy.scene.load_readers') as findermock:
@@ -222,25 +258,21 @@ def test_create_multiple_reader_different_kwargs(self):
def test_iter(self):
"""Test iteration over the scene."""
- from satpy import Scene
- from xarray import DataArray
scene = Scene()
- scene["1"] = DataArray(np.arange(5))
- scene["2"] = DataArray(np.arange(5))
- scene["3"] = DataArray(np.arange(5))
+ scene["1"] = xr.DataArray(np.arange(5))
+ scene["2"] = xr.DataArray(np.arange(5))
+ scene["3"] = xr.DataArray(np.arange(5))
for x in scene:
- assert isinstance(x, DataArray)
+ assert isinstance(x, xr.DataArray)
def test_iter_by_area_swath(self):
"""Test iterating by area on a swath."""
- from satpy import Scene
- from xarray import DataArray
from pyresample.geometry import SwathDefinition
scene = Scene()
sd = SwathDefinition(lons=np.arange(5), lats=np.arange(5))
- scene["1"] = DataArray(np.arange(5), attrs={'area': sd})
- scene["2"] = DataArray(np.arange(5), attrs={'area': sd})
- scene["3"] = DataArray(np.arange(5))
+ scene["1"] = xr.DataArray(np.arange(5), attrs={'area': sd})
+ scene["2"] = xr.DataArray(np.arange(5), attrs={'area': sd})
+ scene["3"] = xr.DataArray(np.arange(5))
for area_obj, ds_list in scene.iter_by_area():
ds_list_names = set(ds['name'] for ds in ds_list)
if area_obj is sd:
@@ -251,15 +283,12 @@ def test_iter_by_area_swath(self):
def test_bad_setitem(self):
"""Test setting an item wrongly."""
- from satpy import Scene
scene = Scene()
pytest.raises(ValueError, scene.__setitem__, '1', np.arange(5))
def test_setitem(self):
"""Test setting an item."""
- from satpy import Scene
from satpy.tests.utils import make_dataid
- import xarray as xr
scene = Scene()
scene["1"] = ds1 = xr.DataArray(np.arange(5))
expected_id = make_cid(**ds1.attrs)
@@ -279,12 +308,10 @@ def test_setitem(self):
def test_getitem(self):
"""Test __getitem__ with names only."""
- from satpy import Scene
- from xarray import DataArray
scene = Scene()
- scene["1"] = ds1 = DataArray(np.arange(5))
- scene["2"] = ds2 = DataArray(np.arange(5))
- scene["3"] = ds3 = DataArray(np.arange(5))
+ scene["1"] = ds1 = xr.DataArray(np.arange(5))
+ scene["2"] = ds2 = xr.DataArray(np.arange(5))
+ scene["3"] = ds3 = xr.DataArray(np.arange(5))
assert scene['1'] is ds1
assert scene['2'] is ds2
assert scene['3'] is ds3
@@ -294,31 +321,28 @@ def test_getitem(self):
def test_getitem_modifiers(self):
"""Test __getitem__ with names and modifiers."""
- from satpy import Scene
- from xarray import DataArray
-
# Return least modified item
scene = Scene()
- scene['1'] = ds1_m0 = DataArray(np.arange(5))
+ scene['1'] = ds1_m0 = xr.DataArray(np.arange(5))
scene[make_dataid(name='1', modifiers=('mod1',))
- ] = ds1_m1 = DataArray(np.arange(5))
+ ] = xr.DataArray(np.arange(5))
assert scene['1'] is ds1_m0
assert len(list(scene.keys())) == 2
scene = Scene()
- scene['1'] = ds1_m0 = DataArray(np.arange(5))
+ scene['1'] = ds1_m0 = xr.DataArray(np.arange(5))
scene[make_dataid(name='1', modifiers=('mod1',))
- ] = ds1_m1 = DataArray(np.arange(5))
+ ] = xr.DataArray(np.arange(5))
scene[make_dataid(name='1', modifiers=('mod1', 'mod2'))
- ] = ds1_m2 = DataArray(np.arange(5))
+ ] = xr.DataArray(np.arange(5))
assert scene['1'] is ds1_m0
assert len(list(scene.keys())) == 3
scene = Scene()
scene[make_dataid(name='1', modifiers=('mod1', 'mod2'))
- ] = ds1_m2 = DataArray(np.arange(5))
+ ] = ds1_m2 = xr.DataArray(np.arange(5))
scene[make_dataid(name='1', modifiers=('mod1',))
- ] = ds1_m1 = DataArray(np.arange(5))
+ ] = ds1_m1 = xr.DataArray(np.arange(5))
assert scene['1'] is ds1_m1
assert scene[make_dataid(name='1', modifiers=('mod1', 'mod2'))] is ds1_m2
pytest.raises(KeyError, scene.__getitem__,
@@ -327,8 +351,6 @@ def test_getitem_modifiers(self):
def test_getitem_slices(self):
"""Test __getitem__ with slices."""
- from satpy import Scene
- from xarray import DataArray
from pyresample.geometry import AreaDefinition, SwathDefinition
from pyresample.utils import proj4_str_to_dict
scene1 = Scene()
@@ -347,25 +369,25 @@ def test_getitem_slices(self):
)
swath_def = SwathDefinition(lons=np.zeros((5, 10)),
lats=np.zeros((5, 10)))
- scene1["1"] = scene2["1"] = DataArray(np.zeros((5, 10)))
- scene1["2"] = scene2["2"] = DataArray(np.zeros((5, 10)),
- dims=('y', 'x'))
- scene1["3"] = DataArray(np.zeros((5, 10)), dims=('y', 'x'),
- attrs={'area': area_def})
- anc_vars = [DataArray(np.ones((5, 10)), attrs={'name': 'anc_var',
- 'area': area_def})]
+ scene1["1"] = scene2["1"] = xr.DataArray(np.zeros((5, 10)))
+ scene1["2"] = scene2["2"] = xr.DataArray(np.zeros((5, 10)),
+ dims=('y', 'x'))
+ scene1["3"] = xr.DataArray(np.zeros((5, 10)), dims=('y', 'x'),
+ attrs={'area': area_def})
+ anc_vars = [xr.DataArray(np.ones((5, 10)),
+ attrs={'name': 'anc_var', 'area': area_def})]
attrs = {'ancillary_variables': anc_vars, 'area': area_def}
- scene1["3a"] = DataArray(np.zeros((5, 10)),
- dims=('y', 'x'),
- attrs=attrs)
- scene2["4"] = DataArray(np.zeros((5, 10)), dims=('y', 'x'),
- attrs={'area': swath_def})
- anc_vars = [DataArray(np.ones((5, 10)), attrs={'name': 'anc_var',
- 'area': swath_def})]
+ scene1["3a"] = xr.DataArray(np.zeros((5, 10)),
+ dims=('y', 'x'),
+ attrs=attrs)
+ scene2["4"] = xr.DataArray(np.zeros((5, 10)), dims=('y', 'x'),
+ attrs={'area': swath_def})
+ anc_vars = [xr.DataArray(np.ones((5, 10)),
+ attrs={'name': 'anc_var', 'area': swath_def})]
attrs = {'ancillary_variables': anc_vars, 'area': swath_def}
- scene2["4a"] = DataArray(np.zeros((5, 10)),
- dims=('y', 'x'),
- attrs=attrs)
+ scene2["4a"] = xr.DataArray(np.zeros((5, 10)),
+ dims=('y', 'x'),
+ attrs=attrs)
new_scn1 = scene1[2:5, 2:8]
new_scn2 = scene2[2:5, 2:8]
for new_scn in [new_scn1, new_scn2]:
@@ -389,8 +411,6 @@ def test_getitem_slices(self):
def test_crop(self):
"""Test the crop method."""
- from satpy import Scene
- from xarray import DataArray
from pyresample.geometry import AreaDefinition
scene1 = Scene()
area_extent = (-5570248.477339745, -5561247.267842293, 5567248.074173927,
@@ -417,12 +437,12 @@ def test_crop(self):
y_size // 2,
area_extent,
)
- scene1["1"] = DataArray(np.zeros((y_size, x_size)))
- scene1["2"] = DataArray(np.zeros((y_size, x_size)), dims=('y', 'x'))
- scene1["3"] = DataArray(np.zeros((y_size, x_size)), dims=('y', 'x'),
- attrs={'area': area_def})
- scene1["4"] = DataArray(np.zeros((y_size // 2, x_size // 2)), dims=('y', 'x'),
- attrs={'area': area_def2})
+ scene1["1"] = xr.DataArray(np.zeros((y_size, x_size)))
+ scene1["2"] = xr.DataArray(np.zeros((y_size, x_size)), dims=('y', 'x'))
+ scene1["3"] = xr.DataArray(np.zeros((y_size, x_size)), dims=('y', 'x'),
+ attrs={'area': area_def})
+ scene1["4"] = xr.DataArray(np.zeros((y_size // 2, x_size // 2)), dims=('y', 'x'),
+ attrs={'area': area_def2})
# by area
crop_area = AreaDefinition(
@@ -466,8 +486,6 @@ def test_crop(self):
def test_crop_epsg_crs(self):
"""Test the crop method when source area uses an EPSG code."""
- from satpy import Scene
- from xarray import DataArray
from pyresample.geometry import AreaDefinition
scene1 = Scene()
@@ -481,8 +499,8 @@ def test_crop_epsg_crs(self):
y_size,
area_extent,
)
- scene1["1"] = DataArray(np.zeros((y_size, x_size)), dims=('y', 'x'),
- attrs={'area': area_def})
+ scene1["1"] = xr.DataArray(np.zeros((y_size, x_size)), dims=('y', 'x'),
+ attrs={'area': area_def})
# by x/y bbox
new_scn1 = scene1.crop(xy_bbox=(719695.7781587119, 5427887.407618969, 725068.1609052602, 5433708.364368956))
assert '1' in new_scn1
@@ -490,8 +508,6 @@ def test_crop_epsg_crs(self):
def test_crop_rgb(self):
"""Test the crop method on multi-dimensional data."""
- from satpy import Scene
- from xarray import DataArray
from pyresample.geometry import AreaDefinition
scene1 = Scene()
area_extent = (-5570248.477339745, -5561247.267842293, 5567248.074173927,
@@ -518,9 +534,12 @@ def test_crop_rgb(self):
y_size // 2,
area_extent,
)
- scene1["1"] = DataArray(np.zeros((3, y_size, x_size)), dims=('bands', 'y', 'x'), attrs={'area': area_def})
- scene1["2"] = DataArray(np.zeros((y_size // 2, 3, x_size // 2)), dims=('y', 'bands', 'x'),
- attrs={'area': area_def2})
+ scene1["1"] = xr.DataArray(np.zeros((3, y_size, x_size)),
+ dims=('bands', 'y', 'x'),
+ attrs={'area': area_def})
+ scene1["2"] = xr.DataArray(np.zeros((y_size // 2, 3, x_size // 2)),
+ dims=('y', 'bands', 'x'),
+ attrs={'area': area_def2})
# by lon/lat bbox
new_scn1 = scene1.crop(ll_bbox=(-20., -5., 0, 0))
@@ -533,21 +552,20 @@ def test_crop_rgb(self):
def test_contains(self):
"""Test contains."""
- from satpy import Scene
- from xarray import DataArray
scene = Scene()
- scene["1"] = DataArray(np.arange(5), attrs={'wavelength': (0.1, 0.2, 0.3),
- '_satpy_id_keys': default_id_keys_config})
+ scene["1"] = xr.DataArray(np.arange(5),
+ attrs={'wavelength': (0.1, 0.2, 0.3),
+ '_satpy_id_keys': default_id_keys_config})
assert '1' in scene
assert 0.15 in scene
assert '2' not in scene
assert 0.31 not in scene
scene = Scene()
- scene['blueberry'] = DataArray(np.arange(5))
- scene['blackberry'] = DataArray(np.arange(5))
- scene['strawberry'] = DataArray(np.arange(5))
- scene['raspberry'] = DataArray(np.arange(5))
+ scene['blueberry'] = xr.DataArray(np.arange(5))
+ scene['blackberry'] = xr.DataArray(np.arange(5))
+ scene['strawberry'] = xr.DataArray(np.arange(5))
+ scene['raspberry'] = xr.DataArray(np.arange(5))
# deepcode ignore replace~keys~list~compare: This is on purpose
assert make_cid(name='blueberry') in scene.keys()
assert make_cid(name='blueberry') in scene
@@ -556,15 +574,16 @@ def test_contains(self):
def test_delitem(self):
"""Test deleting an item."""
- from satpy import Scene
- from xarray import DataArray
scene = Scene()
- scene["1"] = DataArray(np.arange(5), attrs={'wavelength': (0.1, 0.2, 0.3),
- '_satpy_id_keys': default_id_keys_config})
- scene["2"] = DataArray(np.arange(5), attrs={'wavelength': (0.4, 0.5, 0.6),
- '_satpy_id_keys': default_id_keys_config})
- scene["3"] = DataArray(np.arange(5), attrs={'wavelength': (0.7, 0.8, 0.9),
- '_satpy_id_keys': default_id_keys_config})
+ scene["1"] = xr.DataArray(np.arange(5),
+ attrs={'wavelength': (0.1, 0.2, 0.3),
+ '_satpy_id_keys': default_id_keys_config})
+ scene["2"] = xr.DataArray(np.arange(5),
+ attrs={'wavelength': (0.4, 0.5, 0.6),
+ '_satpy_id_keys': default_id_keys_config})
+ scene["3"] = xr.DataArray(np.arange(5),
+ attrs={'wavelength': (0.7, 0.8, 0.9),
+ '_satpy_id_keys': default_id_keys_config})
del scene['1']
del scene['3']
del scene[0.45]
@@ -574,7 +593,6 @@ def test_delitem(self):
def test_all_datasets_no_readers(self):
"""Test all datasets with no reader."""
- from satpy import Scene
scene = Scene()
pytest.raises(KeyError, scene.all_dataset_ids, reader_name='fake')
id_list = scene.all_dataset_ids()
@@ -585,7 +603,6 @@ def test_all_datasets_no_readers(self):
def test_all_dataset_names_no_readers(self):
"""Test all dataset names with no reader."""
- from satpy import Scene
scene = Scene()
pytest.raises(KeyError, scene.all_dataset_names, reader_name='fake')
name_list = scene.all_dataset_names()
@@ -596,7 +613,6 @@ def test_all_dataset_names_no_readers(self):
def test_available_dataset_no_readers(self):
"""Test the available datasets without a reader."""
- from satpy import Scene
scene = Scene()
pytest.raises(
KeyError, scene.available_dataset_ids, reader_name='fake')
@@ -608,7 +624,6 @@ def test_available_dataset_no_readers(self):
def test_available_dataset_names_no_readers(self):
"""Test the available dataset names without a reader."""
- from satpy import Scene
scene = Scene()
pytest.raises(
KeyError, scene.available_dataset_names, reader_name='fake')
@@ -618,69 +633,183 @@ def test_available_dataset_names_no_readers(self):
name_list = scene.available_dataset_names(composites=True)
assert name_list == []
+ def test_storage_options_from_reader_kwargs_no_options(self):
+ """Test getting storage options from reader kwargs.
-class TestFinestCoarsestArea:
- """Test the Scene logic for finding the finest and coarsest area."""
+ Case where there are no options given.
+ """
+ filenames = ["s3://data-bucket/file1", "s3://data-bucket/file2", "s3://data-bucket/file3"]
+ with mock.patch('satpy.scene.load_readers'):
+ with mock.patch('fsspec.open_files') as open_files:
+ Scene(filenames=filenames)
+ open_files.assert_called_once_with(filenames)
- def setup_method(self):
- """Set common variables."""
- from xarray import DataArray
- from pyresample.geometry import AreaDefinition
- from pyresample.utils import proj4_str_to_dict
- self.scene = Scene()
- self.scene["1"] = DataArray(np.arange(10).reshape((2, 5)),
- attrs={'wavelength': (0.1, 0.2, 0.3)})
- self.ds1 = self.scene["1"]
+ def test_storage_options_from_reader_kwargs_single_dict_no_options(self):
+ """Test getting storage options from reader kwargs for remote files.
- self.scene["2"] = DataArray(np.arange(40).reshape((4, 10)),
- attrs={'wavelength': (0.4, 0.5, 0.6)})
- self.ds2 = self.scene["2"]
+ Case where a single dict is given for all readers without storage options.
+ """
+ filenames = ["s3://data-bucket/file1", "s3://data-bucket/file2", "s3://data-bucket/file3"]
+ reader_kwargs = {'reader_opt': 'foo'}
+ with mock.patch('satpy.scene.load_readers'):
+ with mock.patch('fsspec.open_files') as open_files:
+ Scene(filenames=filenames, reader_kwargs=reader_kwargs)
+ open_files.assert_called_once_with(filenames)
- self.scene["3"] = DataArray(np.arange(40).reshape((4, 10)),
- attrs={'wavelength': (0.7, 0.8, 0.9)})
- self.ds3 = self.scene["3"]
+ def test_storage_options_from_reader_kwargs_single_dict(self):
+ """Test getting storage options from reader kwargs.
- proj_dict = proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 '
- '+lon_0=-95. +lat_0=25 +lat_1=25 '
- '+units=m +no_defs')
- self.area_def1 = AreaDefinition(
- 'test',
- 'test',
- 'test',
- proj_dict,
- 100,
- 200,
- (-1000., -1500., 1000., 1500.),
- )
- self.area_def2 = AreaDefinition(
- 'test',
- 'test',
- 'test',
- proj_dict,
- 200,
- 400,
- (-1000., -1500., 1000., 1500.),
- )
+ Case where a single dict is given for all readers with some common storage options.
+ """
+ filenames = ["s3://data-bucket/file1", "s3://data-bucket/file2", "s3://data-bucket/file3"]
+ reader_kwargs = {'reader_opt': 'foo'}
+ expected_reader_kwargs = reader_kwargs.copy()
+ storage_options = {'option1': '1'}
+ reader_kwargs['storage_options'] = storage_options
+ with mock.patch('satpy.scene.load_readers') as load_readers:
+ with mock.patch('fsspec.open_files') as open_files:
+ Scene(filenames=filenames, reader_kwargs=reader_kwargs)
+ call_ = load_readers.mock_calls[0]
+ assert call_.kwargs['reader_kwargs'] == expected_reader_kwargs
+ open_files.assert_called_once_with(filenames, **storage_options)
+
+ def test_storage_options_from_reader_kwargs_per_reader(self):
+ """Test getting storage options from reader kwargs.
+
+ Case where each reader have their own storage options.
+ """
+ from copy import deepcopy
+
+ filenames = {
+ "reader1": ["s3://data-bucket/file1"],
+ "reader2": ["s3://data-bucket/file2"],
+ "reader3": ["s3://data-bucket/file3"],
+ }
+ storage_options_1 = {'option1': '1'}
+ storage_options_2 = {'option2': '2'}
+ storage_options_3 = {'option3': '3'}
+ reader_kwargs = {
+ "reader1": {'reader_opt_1': 'foo'},
+ "reader2": {'reader_opt_2': 'bar'},
+ "reader3": {'reader_opt_3': 'baz'},
+ }
+ expected_reader_kwargs = deepcopy(reader_kwargs)
+ reader_kwargs['reader1']['storage_options'] = storage_options_1
+ reader_kwargs['reader2']['storage_options'] = storage_options_2
+ reader_kwargs['reader3']['storage_options'] = storage_options_3
+
+ with mock.patch('satpy.scene.load_readers') as load_readers:
+ with mock.patch('fsspec.open_files') as open_files:
+ Scene(filenames=filenames, reader_kwargs=reader_kwargs)
+ call_ = load_readers.mock_calls[0]
+ assert call_.kwargs['reader_kwargs'] == expected_reader_kwargs
+ assert mock.call(filenames["reader1"], **storage_options_1) in open_files.mock_calls
+ assert mock.call(filenames["reader2"], **storage_options_2) in open_files.mock_calls
+ assert mock.call(filenames["reader3"], **storage_options_3) in open_files.mock_calls
+
+
+def _create_coarest_finest_data_array(shape, area_def, attrs=None):
+ data_arr = xr.DataArray(
+ da.arange(math.prod(shape)).reshape(shape),
+ attrs={
+ 'area': area_def,
+ })
+ if attrs:
+ data_arr.attrs.update(attrs)
+ return data_arr
+
+
+def _create_coarsest_finest_area_def(shape, extents):
+ from pyresample import AreaDefinition
+ proj_str = '+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs'
+ area_def = AreaDefinition(
+ 'test',
+ 'test',
+ 'test',
+ proj_str,
+ shape[1],
+ shape[0],
+ extents,
+ )
+ return area_def
+
+
+def _create_coarsest_finest_swath_def(shape, extents, name_suffix):
+ from pyresample import SwathDefinition
+ if len(shape) == 1:
+ lons_arr = da.linspace(extents[0], extents[2], shape[0], dtype=np.float32)
+ lats_arr = da.linspace(extents[1], extents[3], shape[0], dtype=np.float32)
+ else:
+ lons_arr = da.repeat(da.linspace(extents[0], extents[2], shape[1], dtype=np.float32)[None, :], shape[0], axis=0)
+ lats_arr = da.repeat(da.linspace(extents[1], extents[3], shape[0], dtype=np.float32)[:, None], shape[1], axis=1)
+ lons_data_arr = xr.DataArray(lons_arr, attrs={"name": f"longitude{name_suffix}"})
+ lats_data_arr = xr.DataArray(lats_arr, attrs={"name": f"latitude1{name_suffix}"})
+ return SwathDefinition(lons_data_arr, lats_data_arr)
+
+
+class TestFinestCoarsestArea:
+ """Test the Scene logic for finding the finest and coarsest area."""
- def test_coarsest_finest_area_upright_area(self):
+ @pytest.mark.parametrize(
+ ("coarse_area", "fine_area"),
+ [
+ (_create_coarsest_finest_area_def((2, 5), (1000.0, 1500.0, -1000.0, -1500.0)),
+ _create_coarsest_finest_area_def((4, 10), (1000.0, 1500.0, -1000.0, -1500.0))),
+ (_create_coarsest_finest_area_def((2, 5), (-1000.0, -1500.0, 1000.0, 1500.0)),
+ _create_coarsest_finest_area_def((4, 10), (-1000.0, -1500.0, 1000.0, 1500.0))),
+ (_create_coarsest_finest_swath_def((2, 5), (1000.0, 1500.0, -1000.0, -1500.0), "1"),
+ _create_coarsest_finest_swath_def((4, 10), (1000.0, 1500.0, -1000.0, -1500.0), "1")),
+ (_create_coarsest_finest_swath_def((5,), (1000.0, 1500.0, -1000.0, -1500.0), "1"),
+ _create_coarsest_finest_swath_def((10,), (1000.0, 1500.0, -1000.0, -1500.0), "1")),
+ ]
+ )
+ def test_coarsest_finest_area_different_shape(self, coarse_area, fine_area):
"""Test 'coarsest_area' and 'finest_area' methods for upright areas."""
- self.ds1.attrs['area'] = self.area_def1
- self.ds2.attrs['area'] = self.area_def2
- self.ds3.attrs['area'] = self.area_def2
- assert self.scene.coarsest_area() is self.area_def1
- assert self.scene.finest_area() is self.area_def2
- assert self.scene.coarsest_area(['2', '3']) is self.area_def2
-
- def test_coarsest_finest_area_flipped_area(self):
- """Test 'coarsest_area' and 'finest_area' methods for flipped areas with negative pixel sizes."""
- area_def1_flipped = self.area_def1.copy(area_extent=tuple([-1*ae for ae in self.area_def1.area_extent]))
- area_def2_flipped = self.area_def2.copy(area_extent=tuple([-1*ae for ae in self.area_def2.area_extent]))
- self.ds1.attrs['area'] = area_def1_flipped
- self.ds2.attrs['area'] = area_def2_flipped
- self.ds3.attrs['area'] = area_def2_flipped
- assert self.scene.coarsest_area() is area_def1_flipped
- assert self.scene.finest_area() is area_def2_flipped
- assert self.scene.coarsest_area(['2', '3']) is area_def2_flipped
+ ds1 = _create_coarest_finest_data_array(coarse_area.shape, coarse_area, {"wavelength": (0.1, 0.2, 0.3)})
+ ds2 = _create_coarest_finest_data_array(fine_area.shape, fine_area, {"wavelength": (0.4, 0.5, 0.6)})
+ ds3 = _create_coarest_finest_data_array(fine_area.shape, fine_area, {"wavelength": (0.7, 0.8, 0.9)})
+ scn = Scene()
+ scn["1"] = ds1
+ scn["2"] = ds2
+ scn["3"] = ds3
+
+ assert scn.coarsest_area() is coarse_area
+ assert scn.finest_area() is fine_area
+ assert scn.coarsest_area(['2', '3']) is fine_area
+
+ @pytest.mark.parametrize(
+ ("area_def", "shifted_area"),
+ [
+ (_create_coarsest_finest_area_def((2, 5), (-1000.0, -1500.0, 1000.0, 1500.0)),
+ _create_coarsest_finest_area_def((2, 5), (-900.0, -1400.0, 1100.0, 1600.0))),
+ (_create_coarsest_finest_swath_def((2, 5), (-1000.0, -1500.0, 1000.0, 1500.0), "1"),
+ _create_coarsest_finest_swath_def((2, 5), (-900.0, -1400.0, 1100.0, 1600.0), "2")),
+ ],
+ )
+ def test_coarsest_finest_area_same_shape(self, area_def, shifted_area):
+ """Test that two areas with the same shape are consistently returned.
+
+ If two geometries (ex. two AreaDefinitions or two SwathDefinitions)
+ have the same resolution (shape) but different
+ coordinates, which one has the finer resolution would ultimately be
+ determined by the semi-random ordering of the internal container of
+ the Scene (a dict) if only pixel resolution was compared. This test
+ makes sure that it is always the same object returned.
+
+ """
+ ds1 = _create_coarest_finest_data_array(area_def.shape, area_def)
+ ds2 = _create_coarest_finest_data_array(area_def.shape, shifted_area)
+ scn = Scene()
+ scn["ds1"] = ds1
+ scn["ds2"] = ds2
+ course_area1 = scn.coarsest_area()
+
+ scn = Scene()
+ scn["ds2"] = ds2
+ scn["ds1"] = ds1
+ coarse_area2 = scn.coarsest_area()
+ # doesn't matter what order they were added, this should be the same area
+ assert coarse_area2 is course_area1
class TestSceneAvailableDatasets:
@@ -704,7 +833,7 @@ def test_all_datasets_one_reader(self):
num_reader_ds = 21 + 6
assert len(id_list) == num_reader_ds
id_list = scene.all_dataset_ids(composites=True)
- assert len(id_list) == num_reader_ds + 29
+ assert len(id_list) == num_reader_ds + 33
def test_all_datasets_multiple_reader(self):
"""Test all datasets for multiple readers."""
@@ -733,6 +862,41 @@ def test_available_composite_ids_missing_available(self):
reader='fake1_1ds')
assert 'comp2' not in scene.available_composite_names()
+ def test_available_composites_known_versus_all(self):
+ """Test available_composite_ids when some datasets aren't available."""
+ scene = Scene(filenames=['fake1_1.txt'], reader='fake1',
+ reader_kwargs={"not_available": ["ds2", "ds3"]})
+ all_comps = scene.all_composite_names()
+ avail_comps = scene.available_composite_names()
+ # there should always be more known composites than available composites
+ assert len(all_comps) > len(avail_comps)
+ for not_avail_comp in ("comp2", "comp3"):
+ assert not_avail_comp in all_comps
+ assert not_avail_comp not in avail_comps
+
+
+class TestSceneSerialization:
+ """Test the Scene serialization."""
+
+ def setup_method(self):
+ """Set config_path to point to test 'etc' directory."""
+ self.old_config_path = satpy.config.get('config_path')
+ satpy.config.set(config_path=[TEST_ETC_DIR])
+
+ def teardown_method(self):
+ """Restore previous 'config_path' setting."""
+ satpy.config.set(config_path=self.old_config_path)
+
+ def test_serialization_with_readers_and_data_arr(self):
+ """Test that dask can serialize a Scene with readers."""
+ from distributed.protocol import deserialize, serialize
+
+ scene = Scene(filenames=['fake1_1.txt'], reader='fake1')
+ scene.load(['ds1'])
+ cloned_scene = deserialize(*serialize(scene))
+ assert scene._readers.keys() == cloned_scene._readers.keys()
+ assert scene.all_dataset_ids == scene.all_dataset_ids
+
class TestSceneLoading:
"""Test the Scene objects `.load` method."""
@@ -906,7 +1070,7 @@ def test_load_multiple_resolutions(self):
"""Test loading a dataset has multiple resolutions available with different resolutions."""
scene = Scene(filenames=['fake1_1.txt'], reader='fake1')
comp25 = make_cid(name='comp25', resolution=1000)
- scene[comp25] = 'bla'
+ scene[comp25] = xr.DataArray([], attrs={'name': 'comp25', 'resolution': 1000})
scene.load(['comp25'], resolution=500)
loaded_ids = list(scene._datasets.keys())
@@ -1091,7 +1255,7 @@ def test_load_comp19(self):
# Check dependency tree nodes
# initialize the dep tree without loading the data
scene = Scene(filenames=['fake1_1.txt'], reader='fake1')
- scene._dependency_tree.populate_with_keys({'comp19'})
+ scene._update_dependency_tree({'comp19'}, None)
this_node = scene._dependency_tree['comp19']
shared_dep_id = make_dataid(name='ds5', modifiers=('res_change',))
@@ -1140,6 +1304,14 @@ def test_load_modified(self):
assert len(loaded_ids) == 1
assert loaded_ids[0]['modifiers'] == ('mod1', 'mod2')
+ def test_load_modified_with_load_kwarg(self):
+ """Test loading a modified dataset using the ``Scene.load`` keyword argument."""
+ scene = Scene(filenames=['fake1_1.txt'], reader='fake1')
+ scene.load(['ds1'], modifiers=('mod1', 'mod2'))
+ loaded_ids = list(scene._datasets.keys())
+ assert len(loaded_ids) == 1
+ assert loaded_ids[0]['modifiers'] == ('mod1', 'mod2')
+
def test_load_multiple_modified(self):
"""Test loading multiple modified datasets."""
scene = Scene(filenames=['fake1_1.txt'], reader='fake1')
@@ -1292,7 +1464,7 @@ def test_modified_with_wl_dep(self):
ds3_mod_id = make_dsq(name='ds3', modifiers=('mod_wl',))
scene = Scene(filenames=['fake1_1.txt'], reader='fake1')
- scene._dependency_tree.populate_with_keys({ds1_mod_id, ds3_mod_id})
+ scene._update_dependency_tree({ds1_mod_id, ds3_mod_id}, None)
ds1_mod_node = scene._dependency_tree[ds1_mod_id]
ds3_mod_node = scene._dependency_tree[ds3_mod_id]
@@ -1349,6 +1521,31 @@ def test_available_comps_no_deps(self):
available_comp_ids = scene.available_composite_ids()
assert make_cid(name='static_image') in available_comp_ids
+ def test_compute_pass_through(self):
+ """Test pass through of xarray compute."""
+ import numpy as np
+ scene = Scene(filenames=['fake1_1.txt'], reader='fake1')
+ scene.load(['ds1'])
+ scene = scene.compute()
+ assert isinstance(scene['ds1'].data, np.ndarray)
+
+ def test_persist_pass_through(self):
+ """Test pass through of xarray persist."""
+ from dask.array.utils import assert_eq
+ scene = Scene(filenames=['fake1_1.txt'], reader='fake1')
+ scene.load(['ds1'])
+ scenep = scene.persist()
+ assert_eq(scene['ds1'].data, scenep['ds1'].data)
+ assert set(scenep['ds1'].data.dask).issubset(scene['ds1'].data.dask)
+ assert len(scenep["ds1"].data.dask) == scenep['ds1'].data.npartitions
+
+ def test_chunk_pass_through(self):
+ """Test pass through of xarray chunk."""
+ scene = Scene(filenames=['fake1_1.txt'], reader='fake1')
+ scene.load(['ds1'])
+ scene = scene.chunk(chunks=2)
+ assert scene['ds1'].data.chunksize == (2, 2)
+
class TestSceneResampling:
"""Test resampling a Scene to another Scene object."""
@@ -1463,8 +1660,6 @@ def test_resample_scene_preserves_requested_dependencies(self, rs):
def test_resample_reduce_data_toggle(self, rs):
"""Test that the Scene can be reduced or not reduced during resampling."""
from pyresample.geometry import AreaDefinition
- import dask.array as da
- import xarray as xr
rs.side_effect = self._fake_resample_dataset_force_20x20
proj_str = ('+proj=lcc +datum=WGS84 +ellps=WGS84 '
@@ -1604,6 +1799,74 @@ def test_no_generate_comp10(self, rs):
assert 'comp10' in new_scn
assert not new_scn.missing_datasets
+ def test_comp_loading_after_resampling_existing_sensor(self):
+ """Test requesting a composite after resampling."""
+ scene = Scene(filenames=['fake1_1.txt'], reader='fake1')
+ scene.load(["ds1", "ds2"])
+ new_scn = scene.resample(resampler='native')
+
+ # Can't load from readers after resampling
+ with pytest.raises(KeyError):
+ new_scn.load(["ds3"])
+
+ # But we can load composites because the sensor composites were loaded
+ # when the reader datasets were accessed
+ new_scn.load(["comp2"])
+ assert "comp2" in new_scn
+
+ def test_comp_loading_after_resampling_new_sensor(self):
+ """Test requesting a composite after resampling when the sensor composites weren't loaded before."""
+ # this is our base Scene with sensor "fake_sensor2"
+ scene1 = Scene(filenames=['fake2_3ds_1.txt'], reader='fake2_3ds')
+ scene1.load(["ds2"])
+ new_scn = scene1.resample(resampler='native')
+
+ # Can't load from readers after resampling
+ with pytest.raises(KeyError):
+ new_scn.load(["ds3"])
+
+ # Can't load the composite from fake_sensor composites yet
+ # 'ds1' is missing
+ with pytest.raises(KeyError):
+ new_scn.load(["comp2"])
+
+ # artificial DataArray "created by the user"
+ # mimics a user adding their own data with the same sensor
+ user_da = scene1["ds2"].copy()
+ user_da.attrs["name"] = "ds1"
+ user_da.attrs["sensor"] = {"fake_sensor2"}
+ # Add 'ds1' that doesn't provide the 'fake_sensor' sensor
+ new_scn["ds1"] = user_da
+ with pytest.raises(KeyError):
+ new_scn.load(["comp2"])
+ assert "comp2" not in new_scn
+
+ # artificial DataArray "created by the user"
+ # mimics a user adding their own data with its own sensor to the Scene
+ user_da = scene1["ds2"].copy()
+ user_da.attrs["name"] = "ds1"
+ user_da.attrs["sensor"] = {"fake_sensor"}
+ # Now 'fake_sensor' composites have been loaded
+ new_scn["ds1"] = user_da
+ new_scn.load(["comp2"])
+ assert "comp2" in new_scn
+
+ def test_comp_loading_multisensor_composite_created_user(self):
+ """Test that multisensor composite can be created manually.
+
+ Test that if the user has created datasets "manually", that
+ multi-sensor composites provided can still be read.
+ """
+ scene1 = Scene(filenames=["fake1_1.txt"], reader="fake1")
+ scene1.load(["ds1"])
+ scene2 = Scene(filenames=["fake4_1.txt"], reader="fake4")
+ scene2.load(["ds4_b"])
+ scene3 = Scene()
+ scene3["ds1"] = scene1["ds1"]
+ scene3["ds4_b"] = scene2["ds4_b"]
+ scene3.load(["comp_multi"])
+ assert "comp_multi" in scene3
+
def test_comps_need_resampling_optional_mod_deps(self):
"""Test that a composite with complex dependencies.
@@ -1655,10 +1918,6 @@ def tearDown(self):
def test_save_datasets_default(self):
"""Save a dataset using 'save_datasets'."""
- from satpy.scene import Scene
- import xarray as xr
- import dask.array as da
- from datetime import datetime
ds1 = xr.DataArray(
da.zeros((100, 200), chunks=50),
dims=('y', 'x'),
@@ -1672,10 +1931,6 @@ def test_save_datasets_default(self):
def test_save_datasets_by_ext(self):
"""Save a dataset using 'save_datasets' with 'filename'."""
- from satpy.scene import Scene
- import xarray as xr
- import dask.array as da
- from datetime import datetime
ds1 = xr.DataArray(
da.zeros((100, 200), chunks=50),
dims=('y', 'x'),
@@ -1694,10 +1949,6 @@ def test_save_datasets_by_ext(self):
def test_save_datasets_bad_writer(self):
"""Save a dataset using 'save_datasets' and a bad writer."""
- from satpy.scene import Scene
- import xarray as xr
- import dask.array as da
- from datetime import datetime
ds1 = xr.DataArray(
da.zeros((100, 200), chunks=50),
dims=('y', 'x'),
@@ -1713,7 +1964,6 @@ def test_save_datasets_bad_writer(self):
def test_save_datasets_missing_wishlist(self):
"""Calling 'save_datasets' with no valid datasets."""
- from satpy.scene import Scene
scn = Scene()
scn._wishlist.add(make_cid(name='true_color'))
pytest.raises(RuntimeError,
@@ -1726,10 +1976,6 @@ def test_save_datasets_missing_wishlist(self):
def test_save_dataset_default(self):
"""Save a dataset using 'save_dataset'."""
- from satpy.scene import Scene
- import xarray as xr
- import dask.array as da
- from datetime import datetime
ds1 = xr.DataArray(
da.zeros((100, 200), chunks=50),
dims=('y', 'x'),
@@ -1745,12 +1991,16 @@ def test_save_dataset_default(self):
class TestSceneConversions(unittest.TestCase):
"""Test Scene conversion to geoviews, xarray, etc."""
+ def test_to_xarray_dataset_with_empty_scene(self):
+ """Test converting empty Scene to xarray dataset."""
+ scn = Scene()
+ xrds = scn.to_xarray_dataset()
+ assert isinstance(xrds, xr.Dataset)
+ assert len(xrds.variables) == 0
+ assert len(xrds.coords) == 0
+
def test_geoviews_basic_with_area(self):
"""Test converting a Scene to geoviews with an AreaDefinition."""
- from satpy import Scene
- import xarray as xr
- import dask.array as da
- from datetime import datetime
from pyresample.geometry import AreaDefinition
scn = Scene()
area = AreaDefinition('test', 'test', 'test',
@@ -1765,10 +2015,6 @@ def test_geoviews_basic_with_area(self):
def test_geoviews_basic_with_swath(self):
"""Test converting a Scene to geoviews with a SwathDefinition."""
- from satpy import Scene
- import xarray as xr
- import dask.array as da
- from datetime import datetime
from pyresample.geometry import SwathDefinition
scn = Scene()
lons = xr.DataArray(da.zeros((2, 2)))
@@ -1798,8 +2044,6 @@ def test_aggregate(self):
@staticmethod
def _create_test_data(x_size, y_size):
- from satpy import Scene
- from xarray import DataArray
from pyresample.geometry import AreaDefinition
scene1 = Scene()
area_extent = (-5570248.477339745, -5561247.267842293, 5567248.074173927,
@@ -1815,14 +2059,18 @@ def _create_test_data(x_size, y_size):
y_size,
area_extent,
)
- scene1["1"] = DataArray(np.ones((y_size, x_size)), attrs={'_satpy_id_keys': default_id_keys_config})
- scene1["2"] = DataArray(np.ones((y_size, x_size)), dims=('y', 'x'),
- attrs={'_satpy_id_keys': default_id_keys_config})
- scene1["3"] = DataArray(np.ones((y_size, x_size)), dims=('y', 'x'),
- attrs={'area': area_def, '_satpy_id_keys': default_id_keys_config})
- scene1["4"] = DataArray(np.ones((y_size, x_size)), dims=('y', 'x'),
- attrs={'area': area_def, 'standard_name': 'backscatter',
- '_satpy_id_keys': default_id_keys_config})
+ scene1["1"] = xr.DataArray(np.ones((y_size, x_size)),
+ attrs={'_satpy_id_keys': default_id_keys_config})
+ scene1["2"] = xr.DataArray(np.ones((y_size, x_size)),
+ dims=('y', 'x'),
+ attrs={'_satpy_id_keys': default_id_keys_config})
+ scene1["3"] = xr.DataArray(np.ones((y_size, x_size)),
+ dims=('y', 'x'),
+ attrs={'area': area_def, '_satpy_id_keys': default_id_keys_config})
+ scene1["4"] = xr.DataArray(np.ones((y_size, x_size)),
+ dims=('y', 'x'),
+ attrs={'area': area_def, 'standard_name': 'backscatter',
+ '_satpy_id_keys': default_id_keys_config})
return scene1
def _check_aggregation_results(self, expected_aggregated_shape, scene1, scene2, x_size, y_size):
diff --git a/satpy/tests/test_utils.py b/satpy/tests/test_utils.py
index 03b5740002..3f0e055765 100644
--- a/satpy/tests/test_utils.py
+++ b/satpy/tests/test_utils.py
@@ -16,16 +16,21 @@
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
"""Testing of utils."""
+from __future__ import annotations
+import datetime
import logging
+import typing
import unittest
import warnings
from unittest import mock
+import dask.array as da
+import numpy as np
import pytest
-from numpy import sqrt
+import xarray as xr
-from satpy.utils import angle2xyz, lonlat2xyz, xyz2angle, xyz2lonlat, proj_units_to_meters, get_satpos
+from satpy.utils import angle2xyz, get_satpos, lonlat2xyz, proj_units_to_meters, xyz2angle, xyz2lonlat
class TestUtils(unittest.TestCase):
@@ -64,14 +69,14 @@ def test_lonlat2xyz(self):
self.assertAlmostEqual(z__, -1)
x__, y__, z__ = lonlat2xyz(0, 45)
- self.assertAlmostEqual(x__, sqrt(2) / 2)
+ self.assertAlmostEqual(x__, np.sqrt(2) / 2)
self.assertAlmostEqual(y__, 0)
- self.assertAlmostEqual(z__, sqrt(2) / 2)
+ self.assertAlmostEqual(z__, np.sqrt(2) / 2)
x__, y__, z__ = lonlat2xyz(0, 60)
- self.assertAlmostEqual(x__, sqrt(1) / 2)
+ self.assertAlmostEqual(x__, np.sqrt(1) / 2)
self.assertAlmostEqual(y__, 0)
- self.assertAlmostEqual(z__, sqrt(3) / 2)
+ self.assertAlmostEqual(z__, np.sqrt(3) / 2)
def test_angle2xyz(self):
"""Test the lonlat2xyz function."""
@@ -127,13 +132,13 @@ def test_angle2xyz(self):
x__, y__, z__ = angle2xyz(0, 45)
self.assertAlmostEqual(x__, 0)
- self.assertAlmostEqual(y__, sqrt(2) / 2)
- self.assertAlmostEqual(z__, sqrt(2) / 2)
+ self.assertAlmostEqual(y__, np.sqrt(2) / 2)
+ self.assertAlmostEqual(z__, np.sqrt(2) / 2)
x__, y__, z__ = angle2xyz(0, 60)
self.assertAlmostEqual(x__, 0)
- self.assertAlmostEqual(y__, sqrt(3) / 2)
- self.assertAlmostEqual(z__, sqrt(1) / 2)
+ self.assertAlmostEqual(y__, np.sqrt(3) / 2)
+ self.assertAlmostEqual(z__, np.sqrt(1) / 2)
def test_xyz2lonlat(self):
"""Test xyz2lonlat."""
@@ -153,7 +158,7 @@ def test_xyz2lonlat(self):
self.assertAlmostEqual(lon, 0)
self.assertAlmostEqual(lat, 90)
- lon, lat = xyz2lonlat(sqrt(2) / 2, sqrt(2) / 2, 0)
+ lon, lat = xyz2lonlat(np.sqrt(2) / 2, np.sqrt(2) / 2, 0)
self.assertAlmostEqual(lon, 45)
self.assertAlmostEqual(lat, 0)
@@ -175,7 +180,7 @@ def test_xyz2angle(self):
self.assertAlmostEqual(azi, 0)
self.assertAlmostEqual(zen, 0)
- azi, zen = xyz2angle(sqrt(2) / 2, sqrt(2) / 2, 0)
+ azi, zen = xyz2angle(np.sqrt(2) / 2, np.sqrt(2) / 2, 0)
self.assertAlmostEqual(azi, 45)
self.assertAlmostEqual(zen, 90)
@@ -205,54 +210,91 @@ def test_proj_units_to_meters(self):
res = proj_units_to_meters(prj)
self.assertEqual(res, '+a=6378137.000 +b=6378137.000 +h=35785863.000')
- @mock.patch('satpy.utils.warnings.warn')
- def test_get_satpos(self, warn_mock):
+
+class TestGetSatPos:
+ """Tests for 'get_satpos'."""
+
+ @pytest.mark.parametrize(
+ ("included_prefixes", "preference", "expected_result"),
+ [
+ (("nadir_", "satellite_actual_", "satellite_nominal_", "projection_"), None, (1, 2, 3)),
+ (("satellite_actual_", "satellite_nominal_", "projection_"), None, (1.1, 2.1, 3)),
+ (("satellite_nominal_", "projection_"), None, (1.2, 2.2, 3.1)),
+ (("projection_",), None, (1.3, 2.3, 3.2)),
+ (("nadir_", "satellite_actual_", "satellite_nominal_", "projection_"), "nadir", (1, 2, 3)),
+ (("nadir_", "satellite_actual_", "satellite_nominal_", "projection_"), "actual", (1.1, 2.1, 3)),
+ (("nadir_", "satellite_actual_", "satellite_nominal_", "projection_"), "nominal", (1.2, 2.2, 3.1)),
+ (("nadir_", "satellite_actual_", "satellite_nominal_", "projection_"), "projection", (1.3, 2.3, 3.2)),
+ (("satellite_nominal_", "projection_"), "actual", (1.2, 2.2, 3.1)),
+ (("projection_",), "projection", (1.3, 2.3, 3.2)),
+ ]
+ )
+ def test_get_satpos(self, included_prefixes, preference, expected_result):
"""Test getting the satellite position."""
- orb_params = {'nadir_longitude': 1,
- 'satellite_actual_longitude': 1.1,
- 'satellite_nominal_longitude': 1.2,
- 'projection_longitude': 1.3,
- 'nadir_latitude': 2,
- 'satellite_actual_latitude': 2.1,
- 'satellite_nominal_latitude': 2.2,
- 'projection_latitude': 2.3,
- 'satellite_actual_altitude': 3,
- 'satellite_nominal_altitude': 3.1,
- 'projection_altitude': 3.2}
- dataset = mock.MagicMock(attrs={'orbital_parameters': orb_params,
- 'satellite_longitude': -1,
- 'satellite_latitude': -2,
- 'satellite_altitude': -3})
-
- # Nadir
- lon, lat, alt = get_satpos(dataset)
- self.assertTupleEqual((lon, lat, alt), (1, 2, 3))
-
- # Actual
- orb_params.pop('nadir_longitude')
- orb_params.pop('nadir_latitude')
- lon, lat, alt = get_satpos(dataset)
- self.assertTupleEqual((lon, lat, alt), (1.1, 2.1, 3))
-
- # Nominal
- orb_params.pop('satellite_actual_longitude')
- orb_params.pop('satellite_actual_latitude')
- orb_params.pop('satellite_actual_altitude')
- lon, lat, alt = get_satpos(dataset)
- self.assertTupleEqual((lon, lat, alt), (1.2, 2.2, 3.1))
-
- # Projection
- orb_params.pop('satellite_nominal_longitude')
- orb_params.pop('satellite_nominal_latitude')
- orb_params.pop('satellite_nominal_altitude')
- lon, lat, alt = get_satpos(dataset)
- self.assertTupleEqual((lon, lat, alt), (1.3, 2.3, 3.2))
- warn_mock.assert_called()
-
- # Legacy
- dataset.attrs.pop('orbital_parameters')
- lon, lat, alt = get_satpos(dataset)
- self.assertTupleEqual((lon, lat, alt), (-1, -2, -3))
+ all_orb_params = {
+ 'nadir_longitude': 1,
+ 'satellite_actual_longitude': 1.1,
+ 'satellite_nominal_longitude': 1.2,
+ 'projection_longitude': 1.3,
+ 'nadir_latitude': 2,
+ 'satellite_actual_latitude': 2.1,
+ 'satellite_nominal_latitude': 2.2,
+ 'projection_latitude': 2.3,
+ 'satellite_actual_altitude': 3,
+ 'satellite_nominal_altitude': 3.1,
+ 'projection_altitude': 3.2
+ }
+ orb_params = {key: value for key, value in all_orb_params.items() if
+ any(in_prefix in key for in_prefix in included_prefixes)}
+ data_arr = xr.DataArray((), attrs={'orbital_parameters': orb_params})
+
+ with warnings.catch_warnings(record=True) as caught_warnings:
+ lon, lat, alt = get_satpos(data_arr, preference=preference)
+ has_satpos_warnings = any("using projection" in str(msg.message) for msg in caught_warnings)
+ expect_warning = included_prefixes == ("projection_",) and preference != "projection"
+ if expect_warning:
+ assert has_satpos_warnings
+ else:
+ assert not has_satpos_warnings
+ assert (lon, lat, alt) == expected_result
+
+ @pytest.mark.parametrize(
+ "attrs",
+ (
+ {},
+ {'orbital_parameters': {'projection_longitude': 1}},
+ {'satellite_altitude': 1}
+ )
+ )
+ def test_get_satpos_fails_with_informative_error(self, attrs):
+ """Test that get_satpos raises an informative error message."""
+ data_arr = xr.DataArray((), attrs=attrs)
+ with pytest.raises(KeyError, match="Unable to determine satellite position.*"):
+ get_satpos(data_arr)
+
+ def test_get_satpos_from_satname(self, caplog):
+ """Test getting satellite position from satellite name only."""
+ import pyorbital.tlefile
+
+ data_arr = xr.DataArray(
+ (),
+ attrs={
+ "platform_name": "Meteosat-42",
+ "sensor": "irives",
+ "start_time": datetime.datetime(2031, 11, 20, 19, 18, 17)})
+ with mock.patch("pyorbital.tlefile.read") as plr:
+ plr.return_value = pyorbital.tlefile.Tle(
+ "Meteosat-42",
+ line1="1 40732U 15034A 22011.84285506 .00000004 00000+0 00000+0 0 9995",
+ line2="2 40732 0.2533 325.0106 0000976 118.8734 330.4058 1.00272123 23817")
+ with caplog.at_level(logging.WARNING):
+ (lon, lat, alt) = get_satpos(data_arr, use_tle=True)
+ assert "Orbital parameters missing from metadata" in caplog.text
+ np.testing.assert_allclose(
+ (lon, lat, alt),
+ (119.39533705010592, -1.1491628298731498, 35803.19986408156),
+ rtol=1e-4,
+ )
def test_make_fake_scene():
@@ -262,9 +304,6 @@ def test_make_fake_scene():
purposes, it has grown sufficiently complex that it needs its own
testing.
"""
- import numpy as np
- import dask.array as da
- import xarray as xr
from satpy.tests.utils import make_fake_scene
assert make_fake_scene({}).keys() == []
@@ -316,7 +355,7 @@ def test_specific_check_satpy(self):
def test_debug_on(caplog):
"""Test that debug_on is working as expected."""
- from satpy.utils import debug_on, debug_off, debug
+ from satpy.utils import debug, debug_off, debug_on
def depwarn():
logger = logging.getLogger("satpy.silly")
@@ -345,7 +384,7 @@ def depwarn():
def test_logging_on_and_off(caplog):
"""Test that switching logging on and off works."""
- from satpy.utils import logging_on, logging_off
+ from satpy.utils import logging_off, logging_on
logger = logging.getLogger("satpy.silly")
logging_on()
with caplog.at_level(logging.WARNING):
@@ -357,3 +396,176 @@ def test_logging_on_and_off(caplog):
with caplog.at_level(logging.DEBUG):
logger.warning("You've got a nice army base here, Colonel.")
assert "You've got a nice army base here, Colonel." not in caplog.text
+
+
+@pytest.mark.parametrize(
+ ("shapes", "chunks", "dims", "exp_unified"),
+ [
+ (
+ ((3, 5, 5), (5, 5)),
+ (-1, -1),
+ (("bands", "y", "x"), ("y", "x")),
+ True,
+ ),
+ (
+ ((3, 5, 5), (5, 5)),
+ (-1, 2),
+ (("bands", "y", "x"), ("y", "x")),
+ True,
+ ),
+ (
+ ((4, 5, 5), (3, 5, 5)),
+ (-1, -1),
+ (("bands", "y", "x"), ("bands", "y", "x")),
+ False,
+ ),
+ ],
+)
+def test_unify_chunks(shapes, chunks, dims, exp_unified):
+ """Test unify_chunks utility function."""
+ from satpy.utils import unify_chunks
+ inputs = list(_data_arrays_from_params(shapes, chunks, dims))
+ results = unify_chunks(*inputs)
+ if exp_unified:
+ _verify_unified(results)
+ else:
+ _verify_unchanged_chunks(results, inputs)
+
+
+def _data_arrays_from_params(shapes: list[tuple[int, ...]],
+ chunks: list[tuple[int, ...]],
+ dims: list[tuple[int, ...]]
+ ) -> typing.Generator[xr.DataArray, None, None]:
+ for shape, chunk, dim in zip(shapes, chunks, dims):
+ yield xr.DataArray(da.ones(shape, chunks=chunk), dims=dim)
+
+
+def _verify_unified(data_arrays: list[xr.DataArray]) -> None:
+ dim_chunks: dict[str, int] = {}
+ for data_arr in data_arrays:
+ for dim, chunk_size in zip(data_arr.dims, data_arr.chunks):
+ exp_chunks = dim_chunks.setdefault(dim, chunk_size)
+ assert exp_chunks == chunk_size
+
+
+def _verify_unchanged_chunks(data_arrays: list[xr.DataArray],
+ orig_arrays: list[xr.DataArray]) -> None:
+ for data_arr, orig_arr in zip(data_arrays, orig_arrays):
+ assert data_arr.chunks == orig_arr.chunks
+
+
+def test_chunk_pixel_size():
+ """Check the chunk pixel size computations."""
+ from unittest.mock import patch
+
+ from satpy.utils import get_chunk_pixel_size
+ with patch("satpy.utils.CHUNK_SIZE", None):
+ assert get_chunk_pixel_size() is None
+ with patch("satpy.utils.CHUNK_SIZE", 10):
+ assert get_chunk_pixel_size() == 100
+ with patch("satpy.utils.CHUNK_SIZE", (10, 20)):
+ assert get_chunk_pixel_size() == 200
+
+
+def test_chunk_size_limit():
+ """Check the chunk size limit computations."""
+ from unittest.mock import patch
+
+ from satpy.utils import get_chunk_size_limit
+ with patch("satpy.utils.CHUNK_SIZE", None):
+ assert get_chunk_size_limit(np.uint8) is None
+ with patch("satpy.utils.CHUNK_SIZE", 10):
+ assert get_chunk_size_limit(np.float64) == 800
+ with patch("satpy.utils.CHUNK_SIZE", (10, 20)):
+ assert get_chunk_size_limit(np.int32) == 800
+
+
+def test_convert_remote_files_to_fsspec_local_files():
+ """Test convertion of remote files to fsspec objects.
+
+ Case without scheme/protocol, which should default to plain filenames.
+ """
+ from satpy.utils import convert_remote_files_to_fsspec
+
+ filenames = ["/tmp/file1.nc", "file:///tmp/file2.nc"]
+ res = convert_remote_files_to_fsspec(filenames)
+ assert res == filenames
+
+
+def test_convert_remote_files_to_fsspec_mixed_sources():
+ """Test convertion of remote files to fsspec objects.
+
+ Case with mixed local and remote files.
+ """
+ from satpy.readers import FSFile
+ from satpy.utils import convert_remote_files_to_fsspec
+
+ filenames = ["/tmp/file1.nc", "s3://data-bucket/file2.nc", "file:///tmp/file3.nc"]
+ res = convert_remote_files_to_fsspec(filenames)
+ # Two local files, one remote
+ assert filenames[0] in res
+ assert filenames[2] in res
+ assert sum([isinstance(f, FSFile) for f in res]) == 1
+
+
+def test_convert_remote_files_to_fsspec_filename_dict():
+ """Test convertion of remote files to fsspec objects.
+
+ Case where filenames is a dictionary mapping readers and filenames.
+ """
+ from satpy.readers import FSFile
+ from satpy.utils import convert_remote_files_to_fsspec
+
+ filenames = {
+ "reader1": ["/tmp/file1.nc", "/tmp/file2.nc"],
+ "reader2": ["s3://tmp/file3.nc", "file:///tmp/file4.nc", "/tmp/file5.nc"]
+ }
+ res = convert_remote_files_to_fsspec(filenames)
+
+ assert res["reader1"] == filenames["reader1"]
+ assert filenames["reader2"][1] in res["reader2"]
+ assert filenames["reader2"][2] in res["reader2"]
+ assert sum([isinstance(f, FSFile) for f in res["reader2"]]) == 1
+
+
+def test_convert_remote_files_to_fsspec_fsfile():
+ """Test convertion of remote files to fsspec objects.
+
+ Case where the some of the files are already FSFile objects.
+ """
+ from satpy.readers import FSFile
+ from satpy.utils import convert_remote_files_to_fsspec
+
+ filenames = ["/tmp/file1.nc", "s3://data-bucket/file2.nc", FSFile("ssh:///tmp/file3.nc")]
+ res = convert_remote_files_to_fsspec(filenames)
+
+ assert sum([isinstance(f, FSFile) for f in res]) == 2
+
+
+def test_convert_remote_files_to_fsspec_windows_paths():
+ """Test convertion of remote files to fsspec objects.
+
+ Case where windows paths are used.
+ """
+ from satpy.utils import convert_remote_files_to_fsspec
+
+ filenames = [r"C:\wintendo\file1.nc", "e:\\wintendo\\file2.nc", r"wintendo\file3.nc"]
+ res = convert_remote_files_to_fsspec(filenames)
+
+ assert res == filenames
+
+
+@mock.patch('fsspec.open_files')
+def test_convert_remote_files_to_fsspec_storage_options(open_files):
+ """Test convertion of remote files to fsspec objects.
+
+ Case with storage options given.
+ """
+ from satpy.utils import convert_remote_files_to_fsspec
+
+ filenames = ["s3://tmp/file1.nc"]
+ storage_options = {'anon': True}
+
+ _ = convert_remote_files_to_fsspec(filenames, storage_options=storage_options)
+
+ open_files.assert_called_once_with(filenames, **storage_options)
diff --git a/satpy/tests/test_writers.py b/satpy/tests/test_writers.py
index 1000b78368..8d432287b5 100644
--- a/satpy/tests/test_writers.py
+++ b/satpy/tests/test_writers.py
@@ -16,16 +16,19 @@
# satpy. If not, see .
"""Test generic writer functions."""
+from __future__ import annotations
+
import os
import shutil
import unittest
import warnings
+from unittest import mock
-import pytest
+import dask.array as da
import numpy as np
+import pytest
import xarray as xr
from trollimage.colormap import greys
-from unittest import mock
class TestWritersModule(unittest.TestCase):
@@ -42,6 +45,7 @@ def test_to_image_1d(self):
def test_to_image_2d(self, mock_geoimage):
"""Conversion to image."""
from satpy.writers import to_image
+
# 2D
data = np.arange(25).reshape((5, 5))
p = xr.DataArray(data, attrs=dict(mode="L", fill_value=0,
@@ -114,7 +118,7 @@ def test_init_nonexistent_enh_file(self):
class _BaseCustomEnhancementConfigTests:
- TEST_CONFIGS = {}
+ TEST_CONFIGS: dict[str, str] = {}
@classmethod
def setup_class(cls):
@@ -157,7 +161,6 @@ class TestComplexSensorEnhancerConfigs(_BaseCustomEnhancementConfigTests):
TEST_CONFIGS = {
ENH_FN: """
-sensor_name: visir/test_sensor1
enhancements:
test1_sensor1_specific:
name: test1
@@ -169,7 +172,6 @@ class TestComplexSensorEnhancerConfigs(_BaseCustomEnhancementConfigTests):
""",
ENH_FN2: """
-sensor_name: visir/test_sensor2
enhancements:
default:
operations:
@@ -195,8 +197,9 @@ class TestComplexSensorEnhancerConfigs(_BaseCustomEnhancementConfigTests):
def test_multisensor_choice(self):
"""Test that a DataArray with two sensors works."""
- from satpy.writers import Enhancer, get_enhanced_image
from xarray import DataArray
+
+ from satpy.writers import Enhancer, get_enhanced_image
ds = DataArray(np.arange(1, 11.).reshape((2, 5)),
attrs={
'name': 'test1',
@@ -217,8 +220,9 @@ def test_multisensor_choice(self):
def test_multisensor_exact(self):
"""Test that a DataArray with two sensors can match exactly."""
- from satpy.writers import Enhancer, get_enhanced_image
from xarray import DataArray
+
+ from satpy.writers import Enhancer, get_enhanced_image
ds = DataArray(np.arange(1, 11.).reshape((2, 5)),
attrs={
'name': 'my_comp',
@@ -237,6 +241,19 @@ def test_multisensor_exact(self):
# alphabetically first
np.testing.assert_allclose(img.data.values[0], ds.data / 20.0)
+ def test_enhance_bad_query_value(self):
+ """Test Enhancer doesn't fail when query includes bad values."""
+ from xarray import DataArray
+
+ from satpy.writers import Enhancer, get_enhanced_image
+ ds = DataArray(np.arange(1, 11.).reshape((2, 5)),
+ attrs=dict(name=["I", "am", "invalid"], sensor='test_sensor2', mode='L'),
+ dims=['y', 'x'])
+ e = Enhancer()
+ assert e.enhancement_tree is not None
+ with pytest.raises(KeyError, match="No .* found for None"):
+ get_enhanced_image(ds, enhance=e)
+
class TestEnhancerUserConfigs(_BaseCustomEnhancementConfigTests):
"""Test `Enhancer` functionality when user's custom configurations are present."""
@@ -249,7 +266,6 @@ class TestEnhancerUserConfigs(_BaseCustomEnhancementConfigTests):
TEST_CONFIGS = {
ENH_FN: """
-sensor_name: visir/test_sensor
enhancements:
test1_default:
name: test1
@@ -260,7 +276,6 @@ class TestEnhancerUserConfigs(_BaseCustomEnhancementConfigTests):
""",
ENH_ENH_FN: """
-sensor_name: visir/test_sensor
enhancements:
test1_kelvin:
name: test1
@@ -272,12 +287,10 @@ class TestEnhancerUserConfigs(_BaseCustomEnhancementConfigTests):
""",
ENH_FN2: """
-sensor_name: visir/test_sensor2
""",
ENH_ENH_FN2: """
-sensor_name: visir/test_sensor2
""",
ENH_FN3: """""",
@@ -285,8 +298,9 @@ class TestEnhancerUserConfigs(_BaseCustomEnhancementConfigTests):
def test_enhance_empty_config(self):
"""Test Enhancer doesn't fail with empty enhancement file."""
- from satpy.writers import Enhancer, get_enhanced_image
from xarray import DataArray
+
+ from satpy.writers import Enhancer, get_enhanced_image
ds = DataArray(np.arange(1, 11.).reshape((2, 5)),
attrs=dict(sensor='test_empty', mode='L'),
dims=['y', 'x'])
@@ -298,8 +312,9 @@ def test_enhance_empty_config(self):
def test_enhance_with_sensor_no_entry(self):
"""Test enhancing an image that has no configuration sections."""
- from satpy.writers import Enhancer, get_enhanced_image
from xarray import DataArray
+
+ from satpy.writers import Enhancer, get_enhanced_image
ds = DataArray(np.arange(1, 11.).reshape((2, 5)),
attrs=dict(sensor='test_sensor2', mode='L'),
dims=['y', 'x'])
@@ -312,8 +327,9 @@ def test_enhance_with_sensor_no_entry(self):
def test_no_enhance(self):
"""Test turning off enhancements."""
- from satpy.writers import get_enhanced_image
from xarray import DataArray
+
+ from satpy.writers import get_enhanced_image
ds = DataArray(np.arange(1, 11.).reshape((2, 5)),
attrs=dict(name='test1', sensor='test_sensor', mode='L'),
dims=['y', 'x'])
@@ -333,8 +349,9 @@ def test_writer_no_enhance(self):
def test_writer_custom_enhance(self):
"""Test using custom enhancements with writer."""
- from satpy.writers import Enhancer
from xarray import DataArray
+
+ from satpy.writers import Enhancer
ds = DataArray(np.arange(1, 11.).reshape((2, 5)),
attrs=dict(name='test1', sensor='test_sensor', mode='L'),
dims=['y', 'x'])
@@ -346,9 +363,9 @@ def test_writer_custom_enhance(self):
def test_enhance_with_sensor_entry(self):
"""Test enhancing an image with a configuration section."""
- from satpy.writers import Enhancer, get_enhanced_image
from xarray import DataArray
- import dask.array as da
+
+ from satpy.writers import Enhancer, get_enhanced_image
ds = DataArray(np.arange(1, 11.).reshape((2, 5)),
attrs=dict(name='test1', sensor='test_sensor', mode='L'),
dims=['y', 'x'])
@@ -374,8 +391,9 @@ def test_enhance_with_sensor_entry(self):
def test_enhance_with_sensor_entry2(self):
"""Test enhancing an image with a more detailed configuration section."""
- from satpy.writers import Enhancer, get_enhanced_image
from xarray import DataArray
+
+ from satpy.writers import Enhancer, get_enhanced_image
ds = DataArray(np.arange(1, 11.).reshape((2, 5)),
attrs=dict(name='test1', units='kelvin',
sensor='test_sensor', mode='L'),
@@ -389,6 +407,100 @@ def test_enhance_with_sensor_entry2(self):
np.testing.assert_almost_equal(img.data.isel(bands=0).max().values, 0.5)
+class TestReaderEnhancerConfigs(_BaseCustomEnhancementConfigTests):
+ """Test enhancement configs that use reader name."""
+
+ ENH_FN = 'test_sensor1.yaml'
+
+ # NOTE: The sections are ordered in a special way so that if 'reader' key
+ # isn't provided that we'll get the section we didn't want and all tests
+ # will fail. Otherwise the correct sections get chosen just by the order
+ # of how they are added to the decision tree.
+ TEST_CONFIGS = {
+ ENH_FN: """
+enhancements:
+ default_reader2:
+ reader: reader2
+ operations:
+ - name: stretch
+ method: !!python/name:satpy.enhancements.stretch
+ kwargs: {stretch: crude, min_stretch: 0, max_stretch: 75}
+ default:
+ operations:
+ - name: stretch
+ method: !!python/name:satpy.enhancements.stretch
+ kwargs: {stretch: crude, min_stretch: 0, max_stretch: 100}
+ test1_reader2_specific:
+ name: test1
+ reader: reader2
+ operations:
+ - name: stretch
+ method: !!python/name:satpy.enhancements.stretch
+ kwargs: {stretch: crude, min_stretch: 0, max_stretch: 50}
+ test1_reader1_specific:
+ name: test1
+ reader: reader1
+ operations:
+ - name: stretch
+ method: !!python/name:satpy.enhancements.stretch
+ kwargs: {stretch: crude, min_stretch: 0, max_stretch: 200}
+ """,
+ }
+
+ def _get_test_data_array(self):
+ from xarray import DataArray
+ ds = DataArray(np.arange(1, 11.).reshape((2, 5)),
+ attrs={
+ 'name': 'test1',
+ 'sensor': 'test_sensor1',
+ 'mode': 'L',
+ },
+ dims=['y', 'x'])
+ return ds
+
+ def _get_enhanced_image(self, data_arr):
+ from satpy.writers import Enhancer, get_enhanced_image
+ e = Enhancer()
+ assert e.enhancement_tree is not None
+ img = get_enhanced_image(data_arr, enhance=e)
+ # make sure that both configs were loaded
+ assert (set(e.sensor_enhancement_configs) ==
+ {os.path.abspath(self.ENH_FN)})
+ return img
+
+ def test_no_reader(self):
+ """Test that a DataArray with no 'reader' metadata works."""
+ data_arr = self._get_test_data_array()
+ img = self._get_enhanced_image(data_arr)
+ # no reader available, should use default no specified reader
+ np.testing.assert_allclose(img.data.values[0], data_arr.data / 100.0)
+
+ def test_no_matching_reader(self):
+ """Test that a DataArray with no matching 'reader' works."""
+ data_arr = self._get_test_data_array()
+ data_arr.attrs["reader"] = "reader3"
+ img = self._get_enhanced_image(data_arr)
+ # no reader available, should use default no specified reader
+ np.testing.assert_allclose(img.data.values[0], data_arr.data / 100.0)
+
+ def test_only_reader_matches(self):
+ """Test that a DataArray with only a matching 'reader' works."""
+ data_arr = self._get_test_data_array()
+ data_arr.attrs["reader"] = "reader2"
+ data_arr.attrs["name"] = "not_configured"
+ img = self._get_enhanced_image(data_arr)
+ # no reader available, should use default no specified reader
+ np.testing.assert_allclose(img.data.values[0], data_arr.data / 75.0)
+
+ def test_reader_and_name_match(self):
+ """Test that a DataArray with a matching 'reader' and 'name' works."""
+ data_arr = self._get_test_data_array()
+ data_arr.attrs["reader"] = "reader2"
+ img = self._get_enhanced_image(data_arr)
+ # no reader available, should use default no specified reader
+ np.testing.assert_allclose(img.data.values[0], data_arr.data / 50.0)
+
+
class TestYAMLFiles(unittest.TestCase):
"""Test and analyze the writer configuration files."""
@@ -437,7 +549,6 @@ def setUp(self):
from datetime import datetime
from satpy.scene import Scene
- import dask.array as da
ds1 = xr.DataArray(
da.zeros((100, 200), chunks=50),
@@ -560,7 +671,6 @@ def setup_method(self):
from datetime import datetime
from satpy.scene import Scene
- import dask.array as da
ds1 = xr.DataArray(
da.zeros((100, 200), chunks=50),
@@ -629,10 +739,8 @@ class TestOverlays(unittest.TestCase):
def setUp(self):
"""Create test data and mock pycoast/pydecorate."""
- from trollimage.xrimage import XRImage
from pyresample.geometry import AreaDefinition
- import xarray as xr
- import dask.array as da
+ from trollimage.xrimage import XRImage
proj_dict = {'proj': 'lcc', 'datum': 'WGS84', 'ellps': 'WGS84',
'lon_0': -95., 'lat_0': 25, 'lat_1': 25,
@@ -689,8 +797,9 @@ def tearDown(self):
def test_add_overlay_basic_rgb(self):
"""Test basic add_overlay usage with RGB data."""
- from satpy.writers import add_overlay, _burn_overlay
from pycoast import ContourWriterAGG
+
+ from satpy.writers import _burn_overlay, add_overlay
coast_dir = '/path/to/coast/data'
with mock.patch.object(self.orig_rgb_img, "apply_pil") as apply_pil:
apply_pil.return_value = self.orig_rgb_img
diff --git a/satpy/tests/test_yaml_reader.py b/satpy/tests/test_yaml_reader.py
index d889680954..89416ab76d 100644
--- a/satpy/tests/test_yaml_reader.py
+++ b/satpy/tests/test_yaml_reader.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
-# Copyright (c) 2015-2019 Satpy developers
+# Copyright (c) 2015-2022 Satpy developers
#
# This file is part of satpy.
#
@@ -22,14 +22,61 @@
import unittest
from datetime import datetime
from tempfile import mkdtemp
-from unittest.mock import MagicMock, patch
+from unittest.mock import MagicMock, call, patch
+
+import numpy as np
+import xarray as xr
import satpy.readers.yaml_reader as yr
-from satpy.readers.file_handlers import BaseFileHandler
from satpy.dataset import DataQuery
+from satpy.dataset.dataid import ModifierTuple
+from satpy.readers.file_handlers import BaseFileHandler
+from satpy.readers.pmw_channels_definitions import FrequencyDoubleSideBand, FrequencyRange
from satpy.tests.utils import make_dataid
-import xarray as xr
-import numpy as np
+
+MHS_YAML_READER_DICT = {
+ 'reader': {'name': 'mhs_l1c_aapp',
+ 'description': 'AAPP l1c Reader for AMSU-B/MHS data',
+ 'sensors': ['mhs'],
+ 'default_channels': [1, 2, 3, 4, 5],
+ 'data_identification_keys': {'name': {'required': True},
+ 'frequency_double_sideband':
+ {'type': FrequencyDoubleSideBand},
+ 'frequency_range': {'type': FrequencyRange},
+ 'resolution': None,
+ 'polarization': {'enum': ['H', 'V']},
+ 'calibration': {'enum': ['brightness_temperature'], 'transitive': True},
+ 'modifiers': {'required': True,
+ 'default': [],
+ 'type': ModifierTuple}},
+ 'config_files': ('satpy/etc/readers/mhs_l1c_aapp.yaml',)},
+ 'datasets': {'1': {'name': '1',
+ 'frequency_range': {'central': 89.0, 'bandwidth': 2.8, 'unit': 'GHz'},
+ 'polarization': 'V',
+ 'resolution': 16000,
+ 'calibration': {'brightness_temperature': {'standard_name': 'toa_brightness_temperature'}},
+ 'coordinates': ['longitude', 'latitude'],
+ 'file_type': 'mhs_aapp_l1c'},
+ '2': {'name': '2',
+ 'frequency_range': {'central': 157.0, 'bandwidth': 2.8, 'unit': 'GHz'},
+ 'polarization': 'V',
+ 'resolution': 16000,
+ 'calibration': {'brightness_temperature': {'standard_name': 'toa_brightness_temperature'}},
+ 'coordinates': ['longitude', 'latitude'],
+ 'file_type': 'mhs_aapp_l1c'},
+ '3': {'name': '3',
+ 'frequency_double_sideband': {'unit': 'GHz',
+ 'central': 183.31,
+ 'side': 1.0,
+ 'bandwidth': 1.0},
+ 'polarization': 'V',
+ 'resolution': 16000,
+ 'calibration': {'brightness_temperature': {'standard_name': 'toa_brightness_temperature'}},
+ 'coordinates': ['longitude', 'latitude'],
+ 'file_type': 'mhs_aapp_l1c'}},
+ 'file_types': {'mhs_aapp_l1c': {'file_reader': BaseFileHandler,
+ 'file_patterns': [
+ 'mhsl1c_{platform_shortname}_{start_time:%Y%m%d_%H%M}_{orbit_number:05d}.l1c']}}} # noqa
class FakeFH(BaseFileHandler):
@@ -208,6 +255,41 @@ def test_create_filehandlers(self):
self.reader.create_filehandlers(filelist)
self.assertEqual(len(self.reader.file_handlers['ftype1']), 3)
+ def test_serializable(self):
+ """Check that a reader is serializable by dask.
+
+ This ensures users are able to serialize a Scene object that contains
+ readers.
+ """
+ from distributed.protocol import deserialize, serialize
+ filelist = ['a001.bla', 'a002.bla', 'a001.bla', 'a002.bla',
+ 'abcd.bla', 'k001.bla', 'a003.bli']
+
+ self.reader.create_filehandlers(filelist)
+ cloned_reader = deserialize(*serialize(self.reader))
+ assert self.reader.file_handlers.keys() == cloned_reader.file_handlers.keys()
+ assert self.reader.all_ids == cloned_reader.all_ids
+
+
+class TestFileYAMLReaderWithCustomIDKey(unittest.TestCase):
+ """Test units from FileYAMLReader with custom id_keys."""
+
+ def setUp(self):
+ """Set up the test case."""
+ self.config = MHS_YAML_READER_DICT
+ self.reader = yr.FileYAMLReader(MHS_YAML_READER_DICT,
+ filter_parameters={
+ 'start_time': datetime(2000, 1, 1),
+ 'end_time': datetime(2000, 1, 2),
+ })
+
+ def test_custom_type_with_dict_contents_gets_parsed_correctly(self):
+ """Test custom type with dictionary contents gets parsed correctly."""
+ ds_ids = list(self.reader.all_dataset_ids)
+ assert ds_ids[0]["frequency_range"] == FrequencyRange(89., 2.8, "GHz")
+
+ assert ds_ids[2]["frequency_double_sideband"] == FrequencyDoubleSideBand(183.31, 1., 1., "GHz")
+
class TestFileFileYAMLReader(unittest.TestCase):
"""Test units from FileYAMLReader."""
@@ -520,9 +602,8 @@ def setUp(self):
def _assign_array(dsid, *_args, **_kwargs):
if dsid['name'] == 'longitude':
return self.lons
- elif dsid['name'] == 'latitude':
+ if dsid['name'] == 'latitude':
return self.lats
-
return self.data
fake_fh.get_dataset.side_effect = _assign_array
@@ -551,6 +632,7 @@ def _check_area_for_ch01(self):
assert 'area' in res['ch01'].attrs
np.testing.assert_array_equal(res['ch01'].attrs['area'].lons, self.lons)
np.testing.assert_array_equal(res['ch01'].attrs['area'].lats, self.lats)
+ assert res['ch01'].attrs.get("reader") == "fake"
class TestFileFileYAMLReaderMultipleFileTypes(unittest.TestCase):
@@ -603,33 +685,11 @@ def test_update_ds_ids_from_file_handlers(self):
from functools import partial
orig_ids = self.reader.all_ids
- def available_datasets(self, configured_datasets=None):
- res = self.resolution
- # update previously configured datasets
- for is_avail, ds_info in (configured_datasets or []):
- if is_avail is not None:
- yield is_avail, ds_info
-
- matches = self.file_type_matches(ds_info['file_type'])
- if matches and ds_info.get('resolution') != res:
- new_info = ds_info.copy()
- new_info['resolution'] = res
- yield True, new_info
- elif is_avail is None:
- yield is_avail, ds_info
-
- def file_type_matches(self, ds_ftype):
- if isinstance(ds_ftype, str) and ds_ftype == self.filetype_info['file_type']:
- return True
- elif self.filetype_info['file_type'] in ds_ftype:
- return True
- return None
-
for ftype, resol in zip(('ftype1', 'ftype2'), (1, 2)):
# need to copy this because the dataset infos will be modified
_orig_ids = {key: val.copy() for key, val in orig_ids.items()}
with patch.dict(self.reader.all_ids, _orig_ids, clear=True), \
- patch.dict(self.reader.available_ids, {}, clear=True):
+ patch.dict(self.reader.available_ids, {}, clear=True):
# Add a file handler with resolution property
fh = MagicMock(filetype_info={'file_type': ftype},
resolution=resol)
@@ -652,6 +712,35 @@ def file_type_matches(self, ds_ftype):
self.assertEqual(resol, ds_id['resolution'])
+# Test methods
+
+
+def available_datasets(self, configured_datasets=None):
+ """Fake available_datasets for testing multiple file types."""
+ res = self.resolution
+ # update previously configured datasets
+ for is_avail, ds_info in (configured_datasets or []):
+ if is_avail is not None:
+ yield is_avail, ds_info
+
+ matches = self.file_type_matches(ds_info['file_type'])
+ if matches and ds_info.get('resolution') != res:
+ new_info = ds_info.copy()
+ new_info['resolution'] = res
+ yield True, new_info
+ elif is_avail is None:
+ yield is_avail, ds_info
+
+
+def file_type_matches(self, ds_ftype):
+ """Fake file_type_matches for testing multiple file types."""
+ if isinstance(ds_ftype, str) and ds_ftype == self.filetype_info['file_type']:
+ return True
+ if self.filetype_info['file_type'] in ds_ftype:
+ return True
+ return None
+
+
class TestGEOFlippableFileYAMLReader(unittest.TestCase):
"""Test GEOFlippableFileYAMLReader."""
@@ -659,9 +748,8 @@ class TestGEOFlippableFileYAMLReader(unittest.TestCase):
@patch.object(yr.FileYAMLReader, "_load_dataset_with_area")
def test_load_dataset_with_area_for_single_areas(self, ldwa):
"""Test _load_dataset_with_area() for single area definitions."""
- import xarray as xr
- import numpy as np
from pyresample.geometry import AreaDefinition
+
from satpy.readers.yaml_reader import GEOFlippableFileYAMLReader
reader = GEOFlippableFileYAMLReader()
@@ -767,9 +855,8 @@ def test_load_dataset_with_area_for_single_areas(self, ldwa):
@patch.object(yr.FileYAMLReader, "_load_dataset_with_area")
def test_load_dataset_with_area_for_stacked_areas(self, ldwa):
"""Test _load_dataset_with_area() for stacked area definitions."""
- import xarray as xr
- import numpy as np
from pyresample.geometry import AreaDefinition, StackedAreaDefinition
+
from satpy.readers.yaml_reader import GEOFlippableFileYAMLReader
reader = GEOFlippableFileYAMLReader()
@@ -826,6 +913,88 @@ def test_load_dataset_with_area_for_stacked_areas(self, ldwa):
np.testing.assert_equal(res.coords['x'], np.arange(3))
np.testing.assert_equal(res.coords['time'], np.flip(np.arange(4)))
+ @patch.object(yr.FileYAMLReader, "__init__", lambda x: None)
+ @patch.object(yr.FileYAMLReader, "_load_dataset_with_area")
+ def test_load_dataset_with_area_for_swath_def_data(self, ldwa):
+ """Test _load_dataset_with_area() for swath definition data."""
+ from pyresample.geometry import SwathDefinition
+
+ from satpy.readers.yaml_reader import GEOFlippableFileYAMLReader
+
+ reader = GEOFlippableFileYAMLReader()
+
+ dsid = MagicMock()
+ coords = MagicMock()
+
+ # create a dummy upright xarray
+ original_array = np.ones(3)
+ dim = np.arange(3)
+ lats = np.arange(3)
+ lons = np.arange(3)
+
+ swath_def = SwathDefinition(lons, lats)
+ dummy_ds_xr = xr.DataArray(original_array,
+ coords={'y': dim},
+ attrs={'area': swath_def},
+ dims=('y',))
+
+ # assign the dummy xr as return for the super _load_dataset_with_area method
+ ldwa.return_value = dummy_ds_xr
+
+ # returned dataset should be unchanged since datasets with a swath definition are not flippable
+ res = reader._load_dataset_with_area(dsid, coords, 'NE')
+ np.testing.assert_equal(res.values, original_array)
+
+ @patch.object(yr.FileYAMLReader, "__init__", lambda x: None)
+ @patch.object(yr.FileYAMLReader, "_load_dataset_with_area")
+ def test_load_dataset_with_area_for_data_without_area(self, ldwa):
+ """Test _load_dataset_with_area() for data wihtout area information."""
+ from satpy.readers.yaml_reader import GEOFlippableFileYAMLReader
+
+ reader = GEOFlippableFileYAMLReader()
+
+ dsid = MagicMock()
+ coords = MagicMock()
+
+ # create a dummy upright xarray
+ original_array = np.ones(3)
+ dim = np.arange(3)
+
+ dummy_ds_xr = xr.DataArray(original_array,
+ coords={'y': dim},
+ attrs={},
+ dims=('y',))
+
+ # assign the dummy xr as return for the super _load_dataset_with_area method
+ ldwa.return_value = dummy_ds_xr
+
+ # returned dataset should be unchanged since datasets without area information are not flippable
+ res = reader._load_dataset_with_area(dsid, coords, 'NE')
+ np.testing.assert_equal(res.values, original_array)
+
+
+def _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info):
+ seg_area = MagicMock()
+ seg_area.crs = 'some_crs'
+ seg_area.area_extent = aex
+ seg_area.shape = ashape
+ get_area_def = MagicMock()
+ get_area_def.return_value = seg_area
+
+ get_segment_position_info = MagicMock()
+ get_segment_position_info.return_value = chk_pos_info
+
+ fh = MagicMock()
+ filetype_info = {'expected_segments': expected_segments,
+ 'file_type': 'filetype1'}
+ filename_info = {'segment': segment}
+ fh.filetype_info = filetype_info
+ fh.filename_info = filename_info
+ fh.get_area_def = get_area_def
+ fh.get_segment_position_info = get_segment_position_info
+
+ return fh, seg_area
+
class TestGEOSegmentYAMLReader(unittest.TestCase):
"""Test GEOSegmentYAMLReader."""
@@ -875,11 +1044,10 @@ def test_get_expected_segments(self, cfh):
self.assertEqual(es, 5)
@patch.object(yr.FileYAMLReader, "__init__", lambda x: None)
- @patch('satpy.readers.yaml_reader._get_empty_segment_with_height')
@patch('satpy.readers.yaml_reader.FileYAMLReader._load_dataset')
@patch('satpy.readers.yaml_reader.xr')
@patch('satpy.readers.yaml_reader._find_missing_segments')
- def test_load_dataset(self, mss, xr, parent_load_dataset, geswh):
+ def test_load_dataset(self, mss, xr, parent_load_dataset):
"""Test _load_dataset()."""
from satpy.readers.yaml_reader import GEOSegmentYAMLReader
reader = GEOSegmentYAMLReader()
@@ -962,55 +1130,17 @@ def test_load_dataset(self, mss, xr, parent_load_dataset, geswh):
self.assertTrue(slice_list[0] is empty_segment)
self.assertTrue(slice_list[1] is empty_segment)
- # Check that new FCI empty segment is generated if missing in the middle and at the end
- fake_fh = MagicMock()
- fake_fh.filename_info = {}
- fake_fh.filetype_info = {'file_type': 'fci_l1c_fdhsi'}
- empty_segment.shape = (140, 5568)
- slice_list[4] = None
- counter = 7
- mss.return_value = (counter, expected_segments, slice_list,
- failure, projectable)
- res = reader._load_dataset(dataid, ds_info, [fake_fh])
- assert 2 == geswh.call_count
-
# Disable padding
res = reader._load_dataset(dataid, ds_info, file_handlers,
pad_data=False)
parent_load_dataset.assert_called_once_with(dataid, ds_info,
file_handlers)
- def test_get_empty_segment_with_height(self):
- """Test _get_empty_segment_with_height()."""
- import xarray as xr
- import numpy as np
- from satpy.readers.yaml_reader import _get_empty_segment_with_height as geswh
-
- dim = 'y'
-
- # check expansion of empty segment
- empty_segment = xr.DataArray(np.ones((139, 5568)), dims=['y', 'x'])
- new_height = 140
- new_empty_segment = geswh(empty_segment, new_height, dim)
- assert new_empty_segment.shape == (140, 5568)
-
- # check reduction of empty segment
- empty_segment = xr.DataArray(np.ones((140, 5568)), dims=['y', 'x'])
- new_height = 139
- new_empty_segment = geswh(empty_segment, new_height, dim)
- assert new_empty_segment.shape == (139, 5568)
-
- # check that empty segment is not modified if it has the right height already
- empty_segment = xr.DataArray(np.ones((140, 5568)), dims=['y', 'x'])
- new_height = 140
- new_empty_segment = geswh(empty_segment, new_height, dim)
- assert new_empty_segment is empty_segment
-
@patch.object(yr.FileYAMLReader, "__init__", lambda x: None)
@patch('satpy.readers.yaml_reader._load_area_def')
@patch('satpy.readers.yaml_reader._stack_area_defs')
- @patch('satpy.readers.yaml_reader._pad_earlier_segments_area')
- @patch('satpy.readers.yaml_reader._pad_later_segments_area')
+ @patch('satpy.readers.yaml_reader.GEOSegmentYAMLReader._pad_earlier_segments_area')
+ @patch('satpy.readers.yaml_reader.GEOSegmentYAMLReader._pad_later_segments_area')
def test_load_area_def(self, pesa, plsa, sad, parent_load_area_def):
"""Test _load_area_def()."""
from satpy.readers.yaml_reader import GEOSegmentYAMLReader
@@ -1027,125 +1157,54 @@ def test_load_area_def(self, pesa, plsa, sad, parent_load_area_def):
reader._load_area_def(dataid, file_handlers, pad_data=False)
parent_load_area_def.assert_called_once_with(dataid, file_handlers)
+ @patch.object(yr.FileYAMLReader, "__init__", lambda x: None)
@patch('satpy.readers.yaml_reader.AreaDefinition')
def test_pad_later_segments_area(self, AreaDefinition):
"""Test _pad_later_segments_area()."""
- from satpy.readers.yaml_reader import _pad_later_segments_area as plsa
-
- seg1_area = MagicMock()
- seg1_area.crs = 'some_crs'
- seg1_area.area_extent = [0, 1000, 200, 500]
- seg1_area.shape = [200, 500]
- get_area_def = MagicMock()
- get_area_def.return_value = seg1_area
- fh_1 = MagicMock()
- filetype_info = {'expected_segments': 2}
- filename_info = {'segment': 1}
- fh_1.filetype_info = filetype_info
- fh_1.filename_info = filename_info
- fh_1.get_area_def = get_area_def
+ from satpy.readers.yaml_reader import GEOSegmentYAMLReader
+ reader = GEOSegmentYAMLReader()
+
+ expected_segments = 2
+ segment = 1
+ aex = [0, 1000, 200, 500]
+ ashape = [200, 500]
+ fh_1, _ = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, None)
file_handlers = [fh_1]
dataid = 'dataid'
- res = plsa(file_handlers, dataid)
+ res = reader._pad_later_segments_area(file_handlers, dataid)
self.assertEqual(len(res), 2)
seg2_extent = (0, 1500, 200, 1000)
expected_call = ('fill', 'fill', 'fill', 'some_crs', 500, 200,
seg2_extent)
AreaDefinition.assert_called_once_with(*expected_call)
- @patch('satpy.readers.yaml_reader.AreaDefinition')
- def test_pad_later_segments_area_for_FCI_padding(self, AreaDefinition):
- """Test _pad_later_segments_area() in the FCI padding case."""
- from satpy.readers.yaml_reader import _pad_later_segments_area as plsa
-
- seg1_area = MagicMock()
- seg1_area.crs = 'some_crs'
- seg1_area.area_extent = [0, 1000, 200, 500]
- seg1_area.shape = [556, 11136]
- get_area_def = MagicMock()
- get_area_def.return_value = seg1_area
- fh_1 = MagicMock()
- filetype_info = {'expected_segments': 2,
- 'file_type': 'fci_l1c_fdhsi'}
- filename_info = {'segment': 1}
- fh_1.filetype_info = filetype_info
- fh_1.filename_info = filename_info
- fh_1.get_area_def = get_area_def
- file_handlers = [fh_1]
- dataid = 'dataid'
- res = plsa(file_handlers, dataid)
- self.assertEqual(len(res), 2)
-
- # the previous chunk size is 556, which is exactly double the size of the FCI chunk 2 size (278)
- # therefore, the new vertical area extent should be half of the previous size (1000-500)/2=250.
- # The new area extent lower-left row is therefore 1000+250=1250
- seg2_extent = (0, 1250, 200, 1000)
- expected_call = ('fill', 'fill', 'fill', 'some_crs', 11136, 278,
- seg2_extent)
- AreaDefinition.assert_called_once_with(*expected_call)
-
+ @patch.object(yr.FileYAMLReader, "__init__", lambda x: None)
@patch('satpy.readers.yaml_reader.AreaDefinition')
def test_pad_earlier_segments_area(self, AreaDefinition):
"""Test _pad_earlier_segments_area()."""
- from satpy.readers.yaml_reader import _pad_earlier_segments_area as pesa
-
- seg2_area = MagicMock()
- seg2_area.crs = 'some_crs'
- seg2_area.area_extent = [0, 1000, 200, 500]
- seg2_area.shape = [200, 500]
- get_area_def = MagicMock()
- get_area_def.return_value = seg2_area
- fh_2 = MagicMock()
- filetype_info = {'expected_segments': 2}
- filename_info = {'segment': 2}
- fh_2.filetype_info = filetype_info
- fh_2.filename_info = filename_info
- fh_2.get_area_def = get_area_def
+ from satpy.readers.yaml_reader import GEOSegmentYAMLReader
+ reader = GEOSegmentYAMLReader()
+
+ expected_segments = 2
+ segment = 2
+ aex = [0, 1000, 200, 500]
+ ashape = [200, 500]
+ fh_2, seg2_area = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, None)
+
file_handlers = [fh_2]
dataid = 'dataid'
area_defs = {2: seg2_area}
- res = pesa(file_handlers, dataid, area_defs)
+ res = reader._pad_earlier_segments_area(file_handlers, dataid, area_defs)
self.assertEqual(len(res), 2)
seg1_extent = (0, 500, 200, 0)
expected_call = ('fill', 'fill', 'fill', 'some_crs', 500, 200,
seg1_extent)
AreaDefinition.assert_called_once_with(*expected_call)
- @patch('satpy.readers.yaml_reader.AreaDefinition')
- def test_pad_earlier_segments_area_for_FCI_padding(self, AreaDefinition):
- """Test _pad_earlier_segments_area() for the FCI case."""
- from satpy.readers.yaml_reader import _pad_earlier_segments_area as pesa
-
- seg2_area = MagicMock()
- seg2_area.crs = 'some_crs'
- seg2_area.area_extent = [0, 1000, 200, 500]
- seg2_area.shape = [278, 5568]
- get_area_def = MagicMock()
- get_area_def.return_value = seg2_area
- fh_2 = MagicMock()
- filetype_info = {'expected_segments': 2,
- 'file_type': 'fci_l1c_fdhsi'}
- filename_info = {'segment': 2}
- fh_2.filetype_info = filetype_info
- fh_2.filename_info = filename_info
- fh_2.get_area_def = get_area_def
- file_handlers = [fh_2]
- dataid = 'dataid'
- area_defs = {2: seg2_area}
- res = pesa(file_handlers, dataid, area_defs)
- self.assertEqual(len(res), 2)
-
- # the previous chunk size is 278, which is exactly double the size of the FCI chunk 1 size (139)
- # therefore, the new vertical area extent should be half of the previous size (1000-500)/2=250.
- # The new area extent lower-left row is therefore 500-250=250
- seg1_extent = (0, 500, 200, 250)
- expected_call = ('fill', 'fill', 'fill', 'some_crs', 5568, 139,
- seg1_extent)
- AreaDefinition.assert_called_once_with(*expected_call)
-
def test_find_missing_segments(self):
"""Test _find_missing_segments()."""
from satpy.readers.yaml_reader import _find_missing_segments as fms
+
# Dataset with only one segment
filename_info = {'segment': 1}
fh_seg1 = MagicMock(filename_info=filename_info)
@@ -1184,3 +1243,258 @@ def test_find_missing_segments(self):
self.assertEqual(slice_list, [None, projectable, None])
self.assertFalse(failure)
self.assertTrue(proj is projectable)
+
+
+class TestGEOVariableSegmentYAMLReader(unittest.TestCase):
+ """Test GEOVariableSegmentYAMLReader."""
+
+ @patch.object(yr.FileYAMLReader, "__init__", lambda x: None)
+ @patch('satpy.readers.yaml_reader._get_empty_segment_with_height')
+ @patch('satpy.readers.yaml_reader.xr')
+ @patch('satpy.readers.yaml_reader._find_missing_segments')
+ def test_get_empty_segment(self, mss, xr, geswh):
+ """Test execution of (overridden) get_empty_segment inside _load_dataset."""
+ from satpy.readers.yaml_reader import GEOVariableSegmentYAMLReader
+ reader = GEOVariableSegmentYAMLReader()
+ # Setup input, and output of mocked functions for first segment missing
+ chk_pos_info = {
+ '1km': {'start_position_row': 0,
+ 'end_position_row': 0,
+ 'segment_height': 0,
+ 'segment_width': 11136},
+ '2km': {'start_position_row': 140,
+ 'end_position_row': None,
+ 'segment_height': 278,
+ 'segment_width': 5568}
+ }
+ expected_segments = 2
+ segment = 2
+ aex = [0, 1000, 200, 500]
+ ashape = [278, 5568]
+ fh_2, seg2_area = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info)
+
+ file_handlers = {'filetype1': [fh_2]}
+ reader._extract_segment_location_dicts(file_handlers)
+
+ counter = 2
+ seg = MagicMock(dims=['y', 'x'])
+ slice_list = [None, seg]
+ failure = False
+ projectable = MagicMock()
+ empty_segment = MagicMock()
+ empty_segment.shape = [278, 5568]
+ xr.full_like.return_value = empty_segment
+ dataid = MagicMock()
+ ds_info = MagicMock()
+
+ mss.return_value = (counter, expected_segments, slice_list,
+ failure, projectable)
+ reader._load_dataset(dataid, ds_info, [fh_2])
+ # the return of get_empty_segment
+ geswh.assert_called_once_with(empty_segment, 139, dim='y')
+
+ @patch.object(yr.FileYAMLReader, "__init__", lambda x: None)
+ @patch('satpy.readers.yaml_reader.AreaDefinition')
+ def test_pad_earlier_segments_area(self, AreaDefinition):
+ """Test _pad_earlier_segments_area() for the variable segment case."""
+ from satpy.readers.yaml_reader import GEOVariableSegmentYAMLReader
+ reader = GEOVariableSegmentYAMLReader()
+ # setting to 0 or None values that shouldn't be relevant
+ chk_pos_info = {
+ '1km': {'start_position_row': 0,
+ 'end_position_row': 0,
+ 'segment_height': 0,
+ 'segment_width': 11136},
+ '2km': {'start_position_row': 140,
+ 'end_position_row': None,
+ 'segment_height': 278,
+ 'segment_width': 5568}
+ }
+ expected_segments = 2
+ segment = 2
+ aex = [0, 1000, 200, 500]
+ ashape = [278, 5568]
+ fh_2, seg2_area = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info)
+
+ file_handlers = {'filetype1': [fh_2]}
+ reader._extract_segment_location_dicts(file_handlers)
+ dataid = 'dataid'
+ area_defs = {2: seg2_area}
+ res = reader._pad_earlier_segments_area([fh_2], dataid, area_defs)
+ self.assertEqual(len(res), 2)
+
+ # The later vertical chunk (nr. 2) size is 278, which is exactly double the size
+ # of the gap left by the missing first chunk (139, as the second chunk starts at line 140).
+ # Therefore, the new vertical area extent for the first chunk should be
+ # half of the previous size (1000-500)/2=250.
+ # The new area extent lower-left row is therefore 500-250=250
+ seg1_extent = (0, 500, 200, 250)
+ expected_call = ('fill', 'fill', 'fill', 'some_crs', 5568, 139,
+ seg1_extent)
+ AreaDefinition.assert_called_once_with(*expected_call)
+
+ @patch.object(yr.FileYAMLReader, "__init__", lambda x: None)
+ @patch('satpy.readers.yaml_reader.AreaDefinition')
+ def test_pad_later_segments_area(self, AreaDefinition):
+ """Test _pad_later_segments_area() in the variable padding case."""
+ from satpy.readers.yaml_reader import GEOVariableSegmentYAMLReader
+ reader = GEOVariableSegmentYAMLReader()
+
+ chk_pos_info = {
+ '1km': {'start_position_row': None,
+ 'end_position_row': 11136 - 278,
+ 'segment_height': 556,
+ 'segment_width': 11136},
+ '2km': {'start_position_row': 0,
+ 'end_position_row': 0,
+ 'segment_height': 0,
+ 'segment_width': 5568}}
+
+ expected_segments = 2
+ segment = 1
+ aex = [0, 1000, 200, 500]
+ ashape = [556, 11136]
+ fh_1, _ = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info)
+ file_handlers = {'filetype1': [fh_1]}
+ reader._extract_segment_location_dicts(file_handlers)
+ dataid = 'dataid'
+ res = reader._pad_later_segments_area([fh_1], dataid)
+ self.assertEqual(len(res), 2)
+
+ # The previous chunk size is 556, which is exactly double the size of the gap left
+ # by the missing last chunk (278, as the second-to-last chunk ends at line 11136 - 278 )
+ # therefore, the new vertical area extent should be half of the previous size (1000-500)/2=250.
+ # The new area extent lower-left row is therefore 1000+250=1250
+ seg2_extent = (0, 1250, 200, 1000)
+ expected_call = ('fill', 'fill', 'fill', 'some_crs', 11136, 278,
+ seg2_extent)
+ AreaDefinition.assert_called_once_with(*expected_call)
+
+ @patch.object(yr.FileYAMLReader, "__init__", lambda x: None)
+ @patch('satpy.readers.yaml_reader.AreaDefinition')
+ def test_pad_later_segments_area_for_multiple_chunks_gap(self, AreaDefinition):
+ """Test _pad_later_segments_area() in the variable padding case for multiple gaps with multiple chunks."""
+ from satpy.readers.yaml_reader import GEOVariableSegmentYAMLReader
+ reader = GEOVariableSegmentYAMLReader()
+
+ def side_effect_areadef(a, b, c, crs, width, height, aex):
+ m = MagicMock()
+ m.shape = [height, width]
+ m.area_extent = aex
+ m.crs = crs
+ return m
+
+ AreaDefinition.side_effect = side_effect_areadef
+
+ chk_pos_info = {
+ '1km': {'start_position_row': 11136 - 600 - 100 + 1,
+ 'end_position_row': 11136 - 600,
+ 'segment_height': 100,
+ 'segment_width': 11136},
+ '2km': {'start_position_row': 0,
+ 'end_position_row': 0,
+ 'segment_height': 0,
+ 'segment_width': 5568}}
+ expected_segments = 8
+ segment = 1
+ aex = [0, 1000, 200, 500]
+ ashape = [100, 11136]
+ fh_1, _ = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info)
+ chk_pos_info = {
+ '1km': {'start_position_row': 11136 - 300 - 100 + 1,
+ 'end_position_row': 11136 - 300,
+ 'segment_height': 100,
+ 'segment_width': 11136},
+ '2km': {'start_position_row': 0,
+ 'end_position_row': 0,
+ 'segment_height': 0,
+ 'segment_width': 5568}}
+ segment = 4
+ fh_4, _ = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info)
+
+ chk_pos_info = {
+ '1km': {'start_position_row': 11136 - 100 + 1,
+ 'end_position_row': None,
+ 'segment_height': 100,
+ 'segment_width': 11136},
+ '2km': {'start_position_row': 0,
+ 'end_position_row': 0,
+ 'segment_height': 0,
+ 'segment_width': 5568}}
+ segment = 8
+ fh_8, _ = _create_mocked_fh_and_areadef(aex, ashape, expected_segments, segment, chk_pos_info)
+
+ file_handlers = {'filetype1': [fh_1, fh_4, fh_8]}
+
+ reader._extract_segment_location_dicts(file_handlers)
+ dataid = 'dataid'
+ res = reader._pad_later_segments_area([fh_1, fh_4, fh_8], dataid)
+ self.assertEqual(len(res), 8)
+
+ # Regarding the chunk sizes:
+ # First group of missing chunks:
+ # The end position row of the gap is the start row of the last available chunk-1:11136-300-100+1-1=10736
+ # The start position row of the gap is the end row fo the first available chunk+1: 11136-600+1=10837
+ # hence the gap is 10736-10537+1=200 px high
+ # The 200px have to be split between two missing chunks, the most equal way to do it is with
+ # sizes 100: 100+100=200
+ # Second group:
+ # The end position row of the gap is the start row of the last chunk -1: 11136-100+1-1=11036
+ # The start position row of the gap is the end row fo the first chunk +1: 11136-300+1=10837
+ # hence the gap is 11036-10837+1=200 px high
+ # The 200px have to be split between three missing chunks, the most equal way to do it is with
+ # sizes 66 and 67: 66+67+67=200
+
+ # Regarding the heights:
+ # First group:
+ # The first chunk has 100px height and 500 area extent height.
+ # The first padded chunk has 100px height -> 500*100/100=500 area extent height ->1000+500=1500
+ # The second padded chunk has 100px height -> 500*100/100=500 area extent height ->1500+500=2000
+ # Second group:
+ # The first chunk has 100px height and 500 area extent height.
+ # The first padded chunk has 66px height -> 500*66/100=330 area extent height ->1000+330=1330
+ # The second padded chunk has 67px height -> 500*67/100=335 area extent height ->1330+335=1665
+ # The first padded chunk has 67px height -> 500*67/100=335 area extent height ->1665+335=2000
+ self.assertEqual(AreaDefinition.call_count, 5)
+ expected_call1 = ('fill', 'fill', 'fill', 'some_crs', 11136, 100,
+ (0, 1500.0, 200, 1000))
+ expected_call2 = ('fill', 'fill', 'fill', 'some_crs', 11136, 100,
+ (0, 2000.0, 200, 1500))
+ expected_call3 = ('fill', 'fill', 'fill', 'some_crs', 11136, 66,
+ (0, 1330.0, 200, 1000))
+ expected_call4 = ('fill', 'fill', 'fill', 'some_crs', 11136, 67,
+ (0, 1665.0, 200, 1330.0))
+ expected_call5 = ('fill', 'fill', 'fill', 'some_crs', 11136, 67,
+ (0, 2000.0, 200, 1665.0))
+
+ AreaDefinition.side_effect = None
+ AreaDefinition.assert_has_calls([call(*expected_call1),
+ call(*expected_call2),
+ call(*expected_call3),
+ call(*expected_call4),
+ call(*expected_call5)
+ ])
+
+ def test_get_empty_segment_with_height(self):
+ """Test _get_empty_segment_with_height()."""
+ from satpy.readers.yaml_reader import _get_empty_segment_with_height as geswh
+
+ dim = 'y'
+
+ # check expansion of empty segment
+ empty_segment = xr.DataArray(np.ones((139, 5568)), dims=['y', 'x'])
+ new_height = 140
+ new_empty_segment = geswh(empty_segment, new_height, dim)
+ assert new_empty_segment.shape == (140, 5568)
+
+ # check reduction of empty segment
+ empty_segment = xr.DataArray(np.ones((140, 5568)), dims=['y', 'x'])
+ new_height = 139
+ new_empty_segment = geswh(empty_segment, new_height, dim)
+ assert new_empty_segment.shape == (139, 5568)
+
+ # check that empty segment is not modified if it has the right height already
+ empty_segment = xr.DataArray(np.ones((140, 5568)), dims=['y', 'x'])
+ new_height = 140
+ new_empty_segment = geswh(empty_segment, new_height, dim)
+ assert new_empty_segment is empty_segment
diff --git a/satpy/tests/utils.py b/satpy/tests/utils.py
index 74650e4abd..bdfc7455b7 100644
--- a/satpy/tests/utils.py
+++ b/satpy/tests/utils.py
@@ -16,21 +16,22 @@
# along with this program. If not, see .
"""Utilities for various satpy tests."""
-from unittest import mock
+from contextlib import contextmanager
from datetime import datetime
+from unittest import mock
+
+import dask.array as da
+import numpy as np
+from pyresample import create_area_def
+from pyresample.geometry import BaseDefinition, SwathDefinition
+from xarray import DataArray
from satpy import Scene
+from satpy.composites import GenericCompositor, IncompatibleAreas
from satpy.dataset import DataID, DataQuery
from satpy.dataset.dataid import default_id_keys_config, minimal_default_keys_config
-from satpy.readers.file_handlers import BaseFileHandler
-from satpy.composites import GenericCompositor, IncompatibleAreas
from satpy.modifiers import ModifierBase
-
-from pyresample.geometry import SwathDefinition, BaseDefinition
-from pyresample import create_area_def
-from xarray import DataArray
-import dask.array as da
-import numpy as np
+from satpy.readers.file_handlers import BaseFileHandler
FAKE_FILEHANDLER_START = datetime(2020, 1, 1, 0, 0, 0)
FAKE_FILEHANDLER_END = datetime(2020, 1, 1, 1, 0, 0)
@@ -246,6 +247,25 @@ def get_dataset(self, data_id: DataID, ds_info: dict):
attrs=attrs,
dims=['y', 'x'])
+ def available_datasets(self, configured_datasets=None):
+ """Report YAML datasets available unless 'not_available' is specified during creation."""
+ not_available_names = self.kwargs.get("not_available", [])
+ for is_avail, ds_info in (configured_datasets or []):
+ if is_avail is not None:
+ # some other file handler said it has this dataset
+ # we don't know any more information than the previous
+ # file handler so let's yield early
+ yield is_avail, ds_info
+ continue
+ ft_matches = self.file_type_matches(ds_info['file_type'])
+ if not ft_matches:
+ yield None, ds_info
+ continue
+ # mimic what happens when a reader "knows" about one variable
+ # but the files loaded don't have that variable
+ is_avail = ds_info["name"] not in not_available_names
+ yield is_avail, ds_info
+
class CustomScheduler(object):
"""Scheduler raising an exception if data are computed too many times."""
@@ -265,6 +285,14 @@ def __call__(self, dsk, keys, **kwargs):
return dask.get(dsk, keys, **kwargs)
+@contextmanager
+def assert_maximum_dask_computes(max_computes=1):
+ """Context manager to make sure dask computations are not executed more than ``max_computes`` times."""
+ import dask
+ with dask.config.set(scheduler=CustomScheduler(max_computes=max_computes)) as new_config:
+ yield new_config
+
+
def make_fake_scene(content_dict, daskify=False, area=True,
common_attrs=None):
"""Create a fake Scene.
@@ -297,9 +325,9 @@ def make_fake_scene(content_dict, daskify=False, area=True,
area (bool or BaseDefinition): Can be ``True``, ``False``, or an
instance of ``pyresample.geometry.BaseDefinition`` such as
``AreaDefinition`` or ``SwathDefinition``. If ``True``, which is
- the default, automatically generate areas. If ``False``, values
- will not have assigned areas. If an instance of
- ``pyresample.geometry.BaseDefinition``, those instances will be
+ the default, automatically generate areas with the name "test-area".
+ If ``False``, values will not have assigned areas. If an instance
+ of ``pyresample.geometry.BaseDefinition``, those instances will be
used for all generated fake datasets. Warning: Passing an area as
a string (``area="germ"``) is not supported.
common_attrs (Mapping): optional, additional attributes that will
@@ -313,31 +341,45 @@ def make_fake_scene(content_dict, daskify=False, area=True,
sc = Scene()
for (did, arr) in content_dict.items():
extra_attrs = common_attrs.copy()
- if isinstance(area, BaseDefinition):
- extra_attrs["area"] = area
- elif area:
- extra_attrs["area"] = create_area_def(
- "test-area",
- {"proj": "eqc", "lat_ts": 0, "lat_0": 0, "lon_0": 0,
- "x_0": 0, "y_0": 0, "ellps": "sphere", "units": "m",
- "no_defs": None, "type": "crs"},
- units="m",
- shape=arr.shape,
- resolution=1000,
- center=(0, 0))
- if isinstance(arr, DataArray):
- sc[did] = arr.copy() # don't change attributes of input
- sc[did].attrs.update(extra_attrs)
- else:
- if daskify:
- arr = da.from_array(arr)
- sc[did] = DataArray(
- arr,
- dims=("y", "x"),
- attrs=extra_attrs)
+ if area:
+ extra_attrs["area"] = _get_fake_scene_area(arr, area)
+ sc[did] = _get_did_for_fake_scene(area, arr, extra_attrs, daskify)
return sc
+def _get_fake_scene_area(arr, area):
+ """Get area for fake scene. Helper for make_fake_scene."""
+ if isinstance(area, BaseDefinition):
+ return area
+ return create_area_def(
+ "test-area",
+ {"proj": "eqc", "lat_ts": 0, "lat_0": 0, "lon_0": 0,
+ "x_0": 0, "y_0": 0, "ellps": "sphere", "units": "m",
+ "no_defs": None, "type": "crs"},
+ units="m",
+ shape=arr.shape,
+ resolution=1000,
+ center=(0, 0))
+
+
+def _get_did_for_fake_scene(area, arr, extra_attrs, daskify):
+ """Add instance to fake scene. Helper for make_fake_scene."""
+ from satpy.resample import add_crs_xy_coords
+ if isinstance(arr, DataArray):
+ new = arr.copy() # don't change attributes of input
+ new.attrs.update(extra_attrs)
+ else:
+ if daskify:
+ arr = da.from_array(arr)
+ new = DataArray(
+ arr,
+ dims=("y", "x"),
+ attrs=extra_attrs)
+ if area:
+ new = add_crs_xy_coords(new, extra_attrs["area"])
+ return new
+
+
def assert_attrs_equal(attrs, attrs_exp, tolerance=0):
"""Test that attributes are equal.
diff --git a/satpy/tests/writer_tests/test_awips_tiled.py b/satpy/tests/writer_tests/test_awips_tiled.py
index aa31458d9b..3509f6d6f8 100644
--- a/satpy/tests/writer_tests/test_awips_tiled.py
+++ b/satpy/tests/writer_tests/test_awips_tiled.py
@@ -16,13 +16,24 @@
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
"""Tests for the AWIPS Tiled writer."""
+
+import logging
import os
-from glob import glob
+import shutil
from datetime import datetime, timedelta
+from glob import glob
-import numpy as np
+import dask
import dask.array as da
+import numpy as np
import pytest
+import xarray as xr
+from pyproj import CRS
+
+from satpy.resample import update_resampled_coords
+
+START_TIME = datetime(2018, 1, 1, 12, 0, 0)
+END_TIME = START_TIME + timedelta(minutes=20)
def _check_production_location(ds):
@@ -37,8 +48,37 @@ def _check_production_location(ds):
assert len(ds.attrs[prod_loc_name]) == 31
-def check_required_common_attributes(ds):
+def check_required_properties(unmasked_ds, masked_ds):
+ """Check various aspects of coordinates and attributes for correctness."""
+ _check_scaled_x_coordinate_variable(unmasked_ds, masked_ds)
+ _check_scaled_y_coordinate_variable(unmasked_ds, masked_ds)
+ _check_required_common_attributes(unmasked_ds)
+
+
+def _check_required_common_attributes(ds):
"""Check common properties of the created AWIPS tiles for validity."""
+ for attr_name in ('tile_row_offset', 'tile_column_offset',
+ 'product_tile_height', 'product_tile_width',
+ 'number_product_tiles',
+ 'product_rows', 'product_columns'):
+ assert attr_name in ds.attrs
+ _check_production_location(ds)
+
+ for data_arr in ds.data_vars.values():
+ if data_arr.ndim == 0:
+ # grid mapping variable
+ assert 'grid_mapping_name' in data_arr.attrs
+ continue
+ assert data_arr.encoding.get('zlib', False)
+ assert 'grid_mapping' in data_arr.attrs
+ assert data_arr.attrs['grid_mapping'] in ds
+ assert 'units' in data_arr.attrs
+ if data_arr.name != "DQF":
+ assert data_arr.dtype == np.int16
+ assert data_arr.attrs["_Unsigned"] == "true"
+
+
+def _check_scaled_x_coordinate_variable(ds, masked_ds):
assert 'x' in ds.coords
x_coord = ds.coords['x']
np.testing.assert_equal(np.diff(x_coord), 1)
@@ -46,8 +86,14 @@ def check_required_common_attributes(ds):
assert x_attrs.get('standard_name') == 'projection_x_coordinate'
assert x_attrs.get('units') == 'meters'
assert 'scale_factor' in x_attrs
+ assert x_attrs['scale_factor'] > 0
assert 'add_offset' in x_attrs
+ unscaled_x = masked_ds.coords['x'].values
+ assert (np.diff(unscaled_x) > 0).all()
+
+
+def _check_scaled_y_coordinate_variable(ds, masked_ds):
assert 'y' in ds.coords
y_coord = ds.coords['y']
np.testing.assert_equal(np.diff(y_coord), 1)
@@ -55,99 +101,111 @@ def check_required_common_attributes(ds):
assert y_attrs.get('standard_name') == 'projection_y_coordinate'
assert y_attrs.get('units') == 'meters'
assert 'scale_factor' in y_attrs
+ assert y_attrs['scale_factor'] < 0
assert 'add_offset' in y_attrs
- for attr_name in ('tile_row_offset', 'tile_column_offset',
- 'product_tile_height', 'product_tile_width',
- 'number_product_tiles',
- 'product_rows', 'product_columns'):
- assert attr_name in ds.attrs
- _check_production_location(ds)
+ unscaled_y = masked_ds.coords['y'].values
+ assert (np.diff(unscaled_y) < 0).all()
+
+
+def _get_test_area(shape=(200, 100), crs=None, extents=None):
+ from pyresample.geometry import AreaDefinition
+ if crs is None:
+ crs = CRS('+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. +lat_0=25 +lat_1=25 +units=m +no_defs')
+ if extents is None:
+ extents = (-1000., -1500., 1000., 1500.)
+ area_def = AreaDefinition(
+ 'test',
+ 'test',
+ 'test',
+ crs,
+ shape[1],
+ shape[0],
+ extents,
+ )
+ return area_def
- for data_arr in ds.data_vars.values():
- if data_arr.ndim == 0:
- # grid mapping variable
- assert 'grid_mapping_name' in data_arr.attrs
- continue
- assert data_arr.encoding.get('zlib', False)
- assert 'grid_mapping' in data_arr.attrs
- assert data_arr.attrs['grid_mapping'] in ds
+def _get_test_data(shape=(200, 100), chunks=50):
+ data = np.linspace(0., 1., shape[0] * shape[1], dtype=np.float32).reshape(shape)
+ return da.from_array(data, chunks=chunks)
-class TestAWIPSTiledWriter:
- """Test basic functionality of AWIPS Tiled writer."""
- def setup_method(self):
- """Create temporary directory to save files to."""
- import tempfile
- self.base_dir = tempfile.mkdtemp()
+def _get_test_lcc_data(dask_arr, area_def, extra_attrs=None):
+ attrs = dict(
+ name='test_ds',
+ platform_name='PLAT',
+ sensor='SENSOR',
+ units='1',
+ standard_name='toa_bidirectional_reflectance',
+ area=area_def,
+ start_time=START_TIME,
+ end_time=END_TIME
+ )
+ if extra_attrs:
+ attrs.update(extra_attrs)
+ ds = xr.DataArray(
+ dask_arr,
+ dims=('y', 'x') if dask_arr.ndim == 2 else ('bands', 'y', 'x'),
+ attrs=attrs,
+ )
+ return update_resampled_coords(ds, ds, area_def)
- def teardown_method(self):
- """Remove the temporary directory created for a test."""
- try:
- import shutil
- shutil.rmtree(self.base_dir, ignore_errors=True)
- except OSError:
- pass
- def test_init(self):
+class TestAWIPSTiledWriter:
+ """Test basic functionality of AWIPS Tiled writer."""
+
+ def test_init(self, tmp_path):
"""Test basic init method of writer."""
from satpy.writers.awips_tiled import AWIPSTiledWriter
- AWIPSTiledWriter(base_dir=self.base_dir)
-
- def _get_test_lcc_data(self):
- from xarray import DataArray
- from pyresample.geometry import AreaDefinition
- from pyresample.utils import proj4_str_to_dict
- area_def = AreaDefinition(
- 'test',
- 'test',
- 'test',
- proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. '
- '+lat_0=25 +lat_1=25 +units=m +no_defs'),
- 100,
- 200,
- (-1000., -1500., 1000., 1500.),
- )
- now = datetime(2018, 1, 1, 12, 0, 0)
- data = np.linspace(0., 1., 20000, dtype=np.float32).reshape((200, 100))
- ds = DataArray(
- da.from_array(data, chunks=50),
- attrs=dict(
- name='test_ds',
- platform_name='PLAT',
- sensor='SENSOR',
- units='1',
- area=area_def,
- start_time=now,
- end_time=now + timedelta(minutes=20))
- )
- return ds
+ AWIPSTiledWriter(base_dir=str(tmp_path))
@pytest.mark.parametrize('use_save_dataset',
[(False,), (True,)])
- def test_basic_numbered_1_tile(self, use_save_dataset):
+ @pytest.mark.parametrize(
+ ('extra_attrs', 'expected_filename'),
+ [
+ ({}, 'TESTS_AII_PLAT_SENSOR_test_ds_TEST_T001_20180101_1200.nc'),
+ ({'sensor': 'viirs', 'name': 'I01'}, 'TESTS_AII_PLAT_viirs_I01_TEST_T001_20180101_1200.nc'),
+ ]
+ )
+ def test_basic_numbered_1_tile(self, extra_attrs, expected_filename, use_save_dataset, caplog, tmp_path):
"""Test creating a single numbered tile."""
- import xarray as xr
from satpy.writers.awips_tiled import AWIPSTiledWriter
- input_data_arr = self._get_test_lcc_data()
- w = AWIPSTiledWriter(base_dir=self.base_dir, compress=True)
- if use_save_dataset:
- w.save_dataset(input_data_arr, sector_id='TEST', source_name='TESTS')
- else:
- w.save_datasets([input_data_arr], sector_id='TEST', source_name='TESTS')
-
- all_files = glob(os.path.join(self.base_dir, 'TESTS_AII*.nc'))
+ data = _get_test_data()
+ area_def = _get_test_area()
+ input_data_arr = _get_test_lcc_data(data, area_def, extra_attrs)
+ with caplog.at_level(logging.DEBUG):
+ w = AWIPSTiledWriter(base_dir=str(tmp_path), compress=True)
+ if use_save_dataset:
+ w.save_dataset(input_data_arr, sector_id='TEST', source_name='TESTS')
+ else:
+ w.save_datasets([input_data_arr], sector_id='TEST', source_name='TESTS')
+
+ assert "no routine matching" not in caplog.text
+ assert "Can't format string" not in caplog.text
+ all_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*.nc'))
assert len(all_files) == 1
- assert os.path.basename(all_files[0]) == 'TESTS_AII_PLAT_SENSOR_test_ds_TEST_T001_20180101_1200.nc'
+ assert os.path.basename(all_files[0]) == expected_filename
for fn in all_files:
- output_ds = xr.open_dataset(fn, mask_and_scale=False)
- check_required_common_attributes(output_ds)
+ unmasked_ds = xr.open_dataset(fn, mask_and_scale=False)
output_ds = xr.open_dataset(fn, mask_and_scale=True)
+ check_required_properties(unmasked_ds, output_ds)
scale_factor = output_ds['data'].encoding['scale_factor']
np.testing.assert_allclose(input_data_arr.values, output_ds['data'].data,
atol=scale_factor / 2)
+ def test_units_length_warning(self, tmp_path):
+ """Test long 'units' warnings are raised."""
+ from satpy.writers.awips_tiled import AWIPSTiledWriter
+ data = _get_test_data()
+ area_def = _get_test_area()
+ input_data_arr = _get_test_lcc_data(data, area_def)
+ input_data_arr.attrs["units"] = "this is a really long units string"
+ w = AWIPSTiledWriter(base_dir=str(tmp_path), compress=True)
+ with pytest.warns(UserWarning, match=r'.*this is a really long units string.*too long.*'):
+ w.save_dataset(input_data_arr, sector_id='TEST', source_name='TESTS')
+
@pytest.mark.parametrize(
("tile_count", "tile_size"),
[
@@ -156,14 +214,14 @@ def test_basic_numbered_1_tile(self, use_save_dataset):
(None, None),
]
)
- def test_basic_numbered_tiles(self, tile_count, tile_size):
+ def test_basic_numbered_tiles(self, tile_count, tile_size, tmp_path):
"""Test creating a multiple numbered tiles."""
- import xarray as xr
- import dask
- from satpy.writers.awips_tiled import AWIPSTiledWriter
from satpy.tests.utils import CustomScheduler
- input_data_arr = self._get_test_lcc_data()
- w = AWIPSTiledWriter(base_dir=self.base_dir, compress=True)
+ from satpy.writers.awips_tiled import AWIPSTiledWriter
+ data = _get_test_data()
+ area_def = _get_test_area()
+ input_data_arr = _get_test_lcc_data(data, area_def)
+ w = AWIPSTiledWriter(base_dir=str(tmp_path), compress=True)
save_kwargs = dict(
sector_id='TEST',
source_name="TESTS",
@@ -180,97 +238,76 @@ def test_basic_numbered_tiles(self, tile_count, tile_size):
with dask.config.set(scheduler=CustomScheduler(1 * 2)): # precompute=*2
w.save_datasets([input_data_arr], **save_kwargs)
- all_files = glob(os.path.join(self.base_dir, 'TESTS_AII*.nc'))
+ all_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*.nc'))
expected_num_files = 0 if should_error else 9
assert len(all_files) == expected_num_files
for fn in all_files:
- ds = xr.open_dataset(fn, mask_and_scale=False)
- check_required_common_attributes(ds)
- assert ds.attrs['my_global'] == 'TEST'
+ unmasked_ds = xr.open_dataset(fn, mask_and_scale=False)
+ masked_ds = xr.open_dataset(fn, mask_and_scale=True)
+ check_required_properties(unmasked_ds, masked_ds)
+ assert unmasked_ds.attrs['my_global'] == 'TEST'
+ assert unmasked_ds.attrs['sector_id'] == 'TEST'
+ assert 'physical_element' in unmasked_ds.attrs
stime = input_data_arr.attrs['start_time']
- assert ds.attrs['start_date_time'] == stime.strftime('%Y-%m-%dT%H:%M:%S')
+ assert unmasked_ds.attrs['start_date_time'] == stime.strftime('%Y-%m-%dT%H:%M:%S')
- def test_basic_lettered_tiles(self):
+ def test_basic_lettered_tiles(self, tmp_path):
"""Test creating a lettered grid."""
- import xarray as xr
from satpy.writers.awips_tiled import AWIPSTiledWriter
- from xarray import DataArray
- from pyresample.geometry import AreaDefinition
- from pyresample.utils import proj4_str_to_dict
- w = AWIPSTiledWriter(base_dir=self.base_dir, compress=True)
- area_def = AreaDefinition(
- 'test',
- 'test',
- 'test',
- proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. '
- '+lat_0=25 +lat_1=25 +units=m +no_defs'),
- 1000,
- 2000,
- (-1000000., -1500000., 1000000., 1500000.),
- )
- now = datetime(2018, 1, 1, 12, 0, 0)
- ds = DataArray(
- da.from_array(np.linspace(0., 1., 2000000, dtype=np.float32).reshape((2000, 1000)), chunks=500),
- attrs=dict(
- name='test_ds',
- platform_name='PLAT',
- sensor='SENSOR',
- units='1',
- area=area_def,
- start_time=now,
- end_time=now + timedelta(minutes=20))
- )
+ w = AWIPSTiledWriter(base_dir=str(tmp_path), compress=True)
+ data = _get_test_data(shape=(2000, 1000), chunks=500)
+ area_def = _get_test_area(shape=(2000, 1000),
+ extents=(-1000000., -1500000., 1000000., 1500000.))
+ ds = _get_test_lcc_data(data, area_def)
# tile_count should be ignored since we specified lettered_grid
w.save_datasets([ds], sector_id='LCC', source_name="TESTS", tile_count=(3, 3), lettered_grid=True)
- all_files = glob(os.path.join(self.base_dir, 'TESTS_AII*.nc'))
+ all_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*.nc'))
assert len(all_files) == 16
for fn in all_files:
- ds = xr.open_dataset(fn, mask_and_scale=False)
- check_required_common_attributes(ds)
- assert ds.attrs['start_date_time'] == now.strftime('%Y-%m-%dT%H:%M:%S')
+ unmasked_ds = xr.open_dataset(fn, mask_and_scale=False)
+ masked_ds = xr.open_dataset(fn, mask_and_scale=True)
+ check_required_properties(unmasked_ds, masked_ds)
+ assert masked_ds.attrs['start_date_time'] == START_TIME.strftime('%Y-%m-%dT%H:%M:%S')
- def test_lettered_tiles_update_existing(self):
+ def test_basic_lettered_tiles_diff_projection(self, tmp_path):
+ """Test creating a lettered grid from data with differing projection.."""
+ from satpy.writers.awips_tiled import AWIPSTiledWriter
+ w = AWIPSTiledWriter(base_dir=str(tmp_path), compress=True)
+ crs = CRS("+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. +lat_0=45 +lat_1=45 +units=m +no_defs")
+ data = _get_test_data(shape=(2000, 1000), chunks=500)
+ area_def = _get_test_area(shape=(2000, 1000), crs=crs,
+ extents=(-1000000., -1500000., 1000000., 1500000.))
+ ds = _get_test_lcc_data(data, area_def)
+ # tile_count should be ignored since we specified lettered_grid
+ w.save_datasets([ds], sector_id='LCC', source_name="TESTS", tile_count=(3, 3), lettered_grid=True)
+ all_files = sorted(glob(os.path.join(str(tmp_path), 'TESTS_AII*.nc')))
+ assert len(all_files) == 24
+ assert "TC02" in all_files[0] # the first tile should be TC02
+ for fn in all_files:
+ unmasked_ds = xr.open_dataset(fn, mask_and_scale=False)
+ masked_ds = xr.open_dataset(fn, mask_and_scale=True)
+ check_required_properties(unmasked_ds, masked_ds)
+ assert masked_ds.attrs['start_date_time'] == START_TIME.strftime('%Y-%m-%dT%H:%M:%S')
+
+ def test_lettered_tiles_update_existing(self, tmp_path):
"""Test updating lettered tiles with additional data."""
- import shutil
- import xarray as xr
from satpy.writers.awips_tiled import AWIPSTiledWriter
- from xarray import DataArray
- from pyresample.geometry import AreaDefinition
- from pyresample.utils import proj4_str_to_dict
- import dask
- first_base_dir = os.path.join(self.base_dir, 'first')
+ first_base_dir = os.path.join(str(tmp_path), 'first')
w = AWIPSTiledWriter(base_dir=first_base_dir, compress=True)
- area_def = AreaDefinition(
- 'test',
- 'test',
- 'test',
- proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. '
- '+lat_0=25 +lat_1=25 +units=m +no_defs'),
- 1000,
- 2000,
- (-1000000., -1500000., 1000000., 1500000.),
- )
- now = datetime(2018, 1, 1, 12, 0, 0)
- data = np.linspace(0., 1., 2000000, dtype=np.float32).reshape((2000, 1000))
+ shape = (2000, 1000)
+ data = np.linspace(0., 1., shape[0] * shape[1], dtype=np.float32).reshape(shape)
# pixels to be filled in later
data[:, -200:] = np.nan
- ds = DataArray(
- da.from_array(data, chunks=500),
- attrs=dict(
- name='test_ds',
- platform_name='PLAT',
- sensor='SENSOR',
- units='1',
- area=area_def,
- start_time=now,
- end_time=now + timedelta(minutes=20))
- )
+ data = da.from_array(data, chunks=500)
+ area_def = _get_test_area(shape=(2000, 1000),
+ extents=(-1000000., -1500000., 1000000., 1500000.))
+ ds = _get_test_lcc_data(data, area_def)
# tile_count should be ignored since we specified lettered_grid
w.save_datasets([ds], sector_id='LCC', source_name="TESTS", tile_count=(3, 3), lettered_grid=True)
all_files = sorted(glob(os.path.join(first_base_dir, 'TESTS_AII*.nc')))
assert len(all_files) == 16
first_files = []
- second_base_dir = os.path.join(self.base_dir, 'second')
+ second_base_dir = os.path.join(str(tmp_path), 'second')
os.makedirs(second_base_dir)
for fn in all_files:
new_fn = fn.replace(first_base_dir, second_base_dir)
@@ -279,32 +316,15 @@ def test_lettered_tiles_update_existing(self):
# Second writing/updating
# Area is about 100 pixels to the right
- area_def2 = AreaDefinition(
- 'test',
- 'test',
- 'test',
- proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. '
- '+lat_0=25 +lat_1=25 +units=m +no_defs'),
- 1000,
- 2000,
- (-800000., -1500000., 1200000., 1500000.),
- )
+ area_def2 = _get_test_area(shape=(2000, 1000),
+ extents=(-800000., -1500000., 1200000., 1500000.))
data2 = np.linspace(0., 1., 2000000, dtype=np.float32).reshape((2000, 1000))
# a gap at the beginning where old values remain
data2[:, :200] = np.nan
# a gap at the end where old values remain
data2[:, -400:-300] = np.nan
- ds2 = DataArray(
- da.from_array(data2, chunks=500),
- attrs=dict(
- name='test_ds',
- platform_name='PLAT',
- sensor='SENSOR',
- units='1',
- area=area_def2,
- start_time=now,
- end_time=now + timedelta(minutes=20))
- )
+ data2 = da.from_array(data2, chunks=500)
+ ds2 = _get_test_lcc_data(data2, area_def2)
w = AWIPSTiledWriter(base_dir=second_base_dir, compress=True)
# HACK: The _copy_to_existing function hangs when opening the output
# file multiple times...sometimes. If we limit dask to one worker
@@ -336,142 +356,60 @@ def test_lettered_tiles_update_existing(self):
assert np.isnan(orig_data[:, 200:]).all()
assert not np.isnan(new_data[:, 200:]).all()
- def test_lettered_tiles_sector_ref(self):
+ def test_lettered_tiles_sector_ref(self, tmp_path):
"""Test creating a lettered grid using the sector as reference."""
- import xarray as xr
from satpy.writers.awips_tiled import AWIPSTiledWriter
- from xarray import DataArray
- from pyresample.geometry import AreaDefinition
- from pyresample.utils import proj4_str_to_dict
- w = AWIPSTiledWriter(base_dir=self.base_dir, compress=True)
- area_def = AreaDefinition(
- 'test',
- 'test',
- 'test',
- proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. '
- '+lat_0=25 +lat_1=25 +units=m +no_defs'),
- 1000,
- 2000,
- (-1000000., -1500000., 1000000., 1500000.),
- )
- now = datetime(2018, 1, 1, 12, 0, 0)
- ds = DataArray(
- da.from_array(np.linspace(0., 1., 2000000, dtype=np.float32).reshape((2000, 1000)), chunks=500),
- attrs=dict(
- name='test_ds',
- platform_name='PLAT',
- sensor='SENSOR',
- units='1',
- area=area_def,
- start_time=now,
- end_time=now + timedelta(minutes=20))
- )
+ w = AWIPSTiledWriter(base_dir=str(tmp_path), compress=True)
+ data = _get_test_data(shape=(2000, 1000), chunks=500)
+ area_def = _get_test_area(shape=(2000, 1000),
+ extents=(-1000000., -1500000., 1000000., 1500000.))
+ ds = _get_test_lcc_data(data, area_def)
w.save_datasets([ds], sector_id='LCC', source_name="TESTS",
lettered_grid=True, use_sector_reference=True,
use_end_time=True)
- all_files = glob(os.path.join(self.base_dir, 'TESTS_AII*.nc'))
+ all_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*.nc'))
assert len(all_files) == 16
for fn in all_files:
- ds = xr.open_dataset(fn, mask_and_scale=False)
- check_required_common_attributes(ds)
- assert ds.attrs['start_date_time'] == (now + timedelta(minutes=20)).strftime('%Y-%m-%dT%H:%M:%S')
+ unmasked_ds = xr.open_dataset(fn, mask_and_scale=False)
+ masked_ds = xr.open_dataset(fn, mask_and_scale=True)
+ check_required_properties(unmasked_ds, masked_ds)
+ expected_start = (START_TIME + timedelta(minutes=20)).strftime('%Y-%m-%dT%H:%M:%S')
+ assert masked_ds.attrs['start_date_time'] == expected_start
- def test_lettered_tiles_no_fit(self):
+ def test_lettered_tiles_no_fit(self, tmp_path):
"""Test creating a lettered grid with no data overlapping the grid."""
from satpy.writers.awips_tiled import AWIPSTiledWriter
- from xarray import DataArray
- from pyresample.geometry import AreaDefinition
- from pyresample.utils import proj4_str_to_dict
- w = AWIPSTiledWriter(base_dir=self.base_dir, compress=True)
- area_def = AreaDefinition(
- 'test',
- 'test',
- 'test',
- proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. '
- '+lat_0=25 +lat_1=25 +units=m +no_defs'),
- 1000,
- 2000,
- (4000000., 5000000., 5000000., 6000000.),
- )
- now = datetime(2018, 1, 1, 12, 0, 0)
- ds = DataArray(
- da.from_array(np.linspace(0., 1., 2000000, dtype=np.float32).reshape((2000, 1000)), chunks=500),
- attrs=dict(
- name='test_ds',
- platform_name='PLAT',
- sensor='SENSOR',
- units='1',
- area=area_def,
- start_time=now,
- end_time=now + timedelta(minutes=20))
- )
+ w = AWIPSTiledWriter(base_dir=str(tmp_path), compress=True)
+ data = _get_test_data(shape=(2000, 1000), chunks=500)
+ area_def = _get_test_area(shape=(2000, 1000),
+ extents=(4000000., 5000000., 5000000., 6000000.))
+ ds = _get_test_lcc_data(data, area_def)
w.save_datasets([ds], sector_id='LCC', source_name="TESTS", tile_count=(3, 3), lettered_grid=True)
# No files created
- all_files = glob(os.path.join(self.base_dir, 'TESTS_AII*.nc'))
+ all_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*.nc'))
assert not all_files
- def test_lettered_tiles_no_valid_data(self):
+ def test_lettered_tiles_no_valid_data(self, tmp_path):
"""Test creating a lettered grid with no valid data."""
from satpy.writers.awips_tiled import AWIPSTiledWriter
- from xarray import DataArray
- from pyresample.geometry import AreaDefinition
- from pyresample.utils import proj4_str_to_dict
- w = AWIPSTiledWriter(base_dir=self.base_dir, compress=True)
- area_def = AreaDefinition(
- 'test',
- 'test',
- 'test',
- proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. '
- '+lat_0=25 +lat_1=25 +units=m +no_defs'),
- 1000,
- 2000,
- (-1000000., -1500000., 1000000., 1500000.),
- )
- now = datetime(2018, 1, 1, 12, 0, 0)
- ds = DataArray(
- da.full((2000, 1000), np.nan, chunks=500, dtype=np.float32),
- attrs=dict(
- name='test_ds',
- platform_name='PLAT',
- sensor='SENSOR',
- units='1',
- area=area_def,
- start_time=now,
- end_time=now + timedelta(minutes=20))
- )
+ w = AWIPSTiledWriter(base_dir=str(tmp_path), compress=True)
+ data = da.full((2000, 1000), np.nan, chunks=500, dtype=np.float32)
+ area_def = _get_test_area(shape=(2000, 1000),
+ extents=(-1000000., -1500000., 1000000., 1500000.))
+ ds = _get_test_lcc_data(data, area_def)
w.save_datasets([ds], sector_id='LCC', source_name="TESTS", tile_count=(3, 3), lettered_grid=True)
# No files created - all NaNs should result in no tiles being created
- all_files = glob(os.path.join(self.base_dir, 'TESTS_AII*.nc'))
+ all_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*.nc'))
assert not all_files
- def test_lettered_tiles_bad_filename(self):
+ def test_lettered_tiles_bad_filename(self, tmp_path):
"""Test creating a lettered grid with a bad filename."""
from satpy.writers.awips_tiled import AWIPSTiledWriter
- from xarray import DataArray
- from pyresample.geometry import AreaDefinition
- w = AWIPSTiledWriter(base_dir=self.base_dir, compress=True, filename="{Bad Key}.nc")
- area_def = AreaDefinition(
- 'test',
- 'test',
- 'test',
- ('+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. '
- '+lat_0=25 +lat_1=25 +units=m +no_defs'),
- 1000,
- 2000,
- (-1000000., -1500000., 1000000., 1500000.),
- )
- now = datetime(2018, 1, 1, 12, 0, 0)
- ds = DataArray(
- da.from_array(np.linspace(0., 1., 2000000, dtype=np.float32).reshape((2000, 1000)), chunks=500),
- attrs=dict(
- name='test_ds',
- platform_name='PLAT',
- sensor='SENSOR',
- units='1',
- area=area_def,
- start_time=now,
- end_time=now + timedelta(minutes=20))
- )
+ w = AWIPSTiledWriter(base_dir=str(tmp_path), compress=True, filename="{Bad Key}.nc")
+ data = _get_test_data(shape=(2000, 1000), chunks=500)
+ area_def = _get_test_area(shape=(2000, 1000),
+ extents=(-1000000., -1500000., 1000000., 1500000.))
+ ds = _get_test_lcc_data(data, area_def)
with pytest.raises(KeyError):
w.save_datasets([ds],
sector_id='LCC',
@@ -479,50 +417,30 @@ def test_lettered_tiles_bad_filename(self):
tile_count=(3, 3),
lettered_grid=True)
- def test_basic_numbered_tiles_rgb(self):
+ def test_basic_numbered_tiles_rgb(self, tmp_path):
"""Test creating a multiple numbered tiles with RGB."""
from satpy.writers.awips_tiled import AWIPSTiledWriter
- import xarray as xr
- from xarray import DataArray
- from pyresample.geometry import AreaDefinition
- w = AWIPSTiledWriter(base_dir=self.base_dir, compress=True)
- area_def = AreaDefinition(
- 'test',
- 'test',
- 'test',
- ('+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. '
- '+lat_0=25 +lat_1=25 +units=m +no_defs'),
- 100,
- 200,
- (-1000., -1500., 1000., 1500.),
- )
- now = datetime(2018, 1, 1, 12, 0, 0)
- ds = DataArray(
- da.from_array(np.linspace(0., 1., 60000, dtype=np.float32).reshape((3, 200, 100)), chunks=50),
- dims=('bands', 'y', 'x'),
- coords={'bands': ['R', 'G', 'B']},
- attrs=dict(
- name='test_ds',
- platform_name='PLAT',
- sensor='SENSOR',
- units='1',
- area=area_def,
- start_time=now,
- end_time=now + timedelta(minutes=20))
- )
+ w = AWIPSTiledWriter(base_dir=str(tmp_path), compress=True)
+ data = da.from_array(np.linspace(0., 1., 60000, dtype=np.float32).reshape((3, 200, 100)), chunks=50)
+ area_def = _get_test_area()
+ ds = _get_test_lcc_data(data, area_def)
+ ds = ds.rename(dict((old, new) for old, new in zip(ds.dims, ['bands', 'y', 'x'])))
+ ds.coords['bands'] = ['R', 'G', 'B']
+
w.save_datasets([ds], sector_id='TEST', source_name="TESTS", tile_count=(3, 3))
- chan_files = glob(os.path.join(self.base_dir, 'TESTS_AII*test_ds_R*.nc'))
+ chan_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*test_ds_R*.nc'))
all_files = chan_files[:]
assert len(chan_files) == 9
- chan_files = glob(os.path.join(self.base_dir, 'TESTS_AII*test_ds_G*.nc'))
+ chan_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*test_ds_G*.nc'))
all_files.extend(chan_files)
assert len(chan_files) == 9
- chan_files = glob(os.path.join(self.base_dir, 'TESTS_AII*test_ds_B*.nc'))
+ chan_files = glob(os.path.join(str(tmp_path), 'TESTS_AII*test_ds_B*.nc'))
assert len(chan_files) == 9
all_files.extend(chan_files)
for fn in all_files:
- ds = xr.open_dataset(fn, mask_and_scale=False)
- check_required_common_attributes(ds)
+ unmasked_ds = xr.open_dataset(fn, mask_and_scale=False)
+ masked_ds = xr.open_dataset(fn, mask_and_scale=True)
+ check_required_properties(unmasked_ds, masked_ds)
@pytest.mark.parametrize(
"sector",
@@ -537,38 +455,20 @@ def test_basic_numbered_tiles_rgb(self):
{'environment_prefix': 'BB', 'filename': '{environment_prefix}_{name}_GLM_T{tile_number:04d}.nc'},
]
)
- def test_multivar_numbered_tiles_glm(self, sector, extra_kwargs):
+ def test_multivar_numbered_tiles_glm(self, sector, extra_kwargs, tmp_path):
"""Test creating a tiles with multiple variables."""
- import xarray as xr
from satpy.writers.awips_tiled import AWIPSTiledWriter
- from xarray import DataArray
- from pyresample.geometry import AreaDefinition
- from pyresample.utils import proj4_str_to_dict
- import os
os.environ['ORGANIZATION'] = '1' * 50
- w = AWIPSTiledWriter(base_dir=self.base_dir, compress=True)
- area_def = AreaDefinition(
- 'test',
- 'test',
- 'test',
- proj4_str_to_dict('+proj=lcc +datum=WGS84 +ellps=WGS84 +lon_0=-95. '
- '+lat_0=25 +lat_1=25 +units=m +no_defs'),
- 100,
- 200,
- (-1000., -1500., 1000., 1500.),
- )
- now = datetime(2018, 1, 1, 12, 0, 0)
- end_time = now + timedelta(minutes=20)
- ds1 = DataArray(
- da.from_array(np.linspace(0., 1., 20000, dtype=np.float32).reshape((200, 100)), chunks=50),
- attrs=dict(
+ w = AWIPSTiledWriter(base_dir=tmp_path, compress=True)
+ data = _get_test_data()
+ area_def = _get_test_area()
+ ds1 = _get_test_lcc_data(data, area_def)
+ ds1.attrs.update(
+ dict(
name='total_energy',
platform_name='GOES-17',
sensor='SENSOR',
units='1',
- area=area_def,
- start_time=now,
- end_time=end_time,
scan_mode='M3',
scene_abbr=sector,
platform_shortname="G17"
@@ -594,15 +494,16 @@ def test_multivar_numbered_tiles_glm(self, sector, extra_kwargs):
tile_count=(3, 3), template='glm_l2_rad{}'.format(sector.lower()),
**extra_kwargs)
fn_glob = self._get_glm_glob_filename(extra_kwargs)
- all_files = glob(os.path.join(self.base_dir, fn_glob))
+ all_files = glob(os.path.join(str(tmp_path), fn_glob))
assert len(all_files) == 9
for fn in all_files:
- ds = xr.open_dataset(fn, mask_and_scale=False)
- check_required_common_attributes(ds)
+ unmasked_ds = xr.open_dataset(fn, mask_and_scale=False)
+ masked_ds = xr.open_dataset(fn, mask_and_scale=True)
+ check_required_properties(unmasked_ds, masked_ds)
if sector == 'C':
- assert ds.attrs['time_coverage_end'] == end_time.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
+ assert masked_ds.attrs['time_coverage_end'] == END_TIME.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
else: # 'F'
- assert ds.attrs['time_coverage_end'] == end_time.strftime('%Y-%m-%dT%H:%M:%SZ')
+ assert masked_ds.attrs['time_coverage_end'] == END_TIME.strftime('%Y-%m-%dT%H:%M:%SZ')
@staticmethod
def _get_glm_glob_filename(extra_kwargs):
diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py
index 8c6bced030..95399c4a55 100644
--- a/satpy/tests/writer_tests/test_cf.py
+++ b/satpy/tests/writer_tests/test_cf.py
@@ -17,16 +17,18 @@
# satpy. If not, see .
"""Tests for the CF writer."""
-from collections import OrderedDict
+import logging
import os
+import tempfile
import unittest
-from unittest import mock
+from collections import OrderedDict
from datetime import datetime
-import tempfile
-from satpy.tests.utils import make_dsq
+from unittest import mock
import numpy as np
+from satpy.tests.utils import make_dsq
+
try:
from pyproj import CRS
except ImportError:
@@ -57,14 +59,15 @@ class TestCFWriter(unittest.TestCase):
def test_init(self):
"""Test initializing the CFWriter class."""
- from satpy.writers.cf_writer import CFWriter
from satpy.writers import configs_for_writer
+ from satpy.writers.cf_writer import CFWriter
CFWriter(config_files=list(configs_for_writer('cf'))[0])
def test_save_array(self):
"""Test saving an array to netcdf/cf."""
- from satpy import Scene
import xarray as xr
+
+ from satpy import Scene
scn = Scene()
start_time = datetime(2018, 5, 30, 10, 0)
end_time = datetime(2018, 5, 30, 10, 15)
@@ -82,8 +85,9 @@ def test_save_array(self):
def test_save_with_compression(self):
"""Test saving an array with compression."""
- from satpy import Scene
import xarray as xr
+
+ from satpy import Scene
scn = Scene()
start_time = datetime(2018, 5, 30, 10, 0)
end_time = datetime(2018, 5, 30, 10, 15)
@@ -101,9 +105,10 @@ def test_save_with_compression(self):
def test_save_array_coords(self):
"""Test saving array with coordinates."""
- from satpy import Scene
- import xarray as xr
import numpy as np
+ import xarray as xr
+
+ from satpy import Scene
scn = Scene()
start_time = datetime(2018, 5, 30, 10, 0)
end_time = datetime(2018, 5, 30, 10, 15)
@@ -137,8 +142,9 @@ def test_save_array_coords(self):
def test_save_dataset_a_digit(self):
"""Test saving an array to netcdf/cf where dataset name starting with a digit."""
- from satpy import Scene
import xarray as xr
+
+ from satpy import Scene
scn = Scene()
scn['1'] = xr.DataArray([1, 2, 3])
with TempFile() as filename:
@@ -148,8 +154,9 @@ def test_save_dataset_a_digit(self):
def test_save_dataset_a_digit_prefix(self):
"""Test saving an array to netcdf/cf where dataset name starting with a digit with prefix."""
- from satpy import Scene
import xarray as xr
+
+ from satpy import Scene
scn = Scene()
scn['1'] = xr.DataArray([1, 2, 3])
with TempFile() as filename:
@@ -159,8 +166,9 @@ def test_save_dataset_a_digit_prefix(self):
def test_save_dataset_a_digit_prefix_include_attr(self):
"""Test saving an array to netcdf/cf where dataset name starting with a digit with prefix include orig name."""
- from satpy import Scene
import xarray as xr
+
+ from satpy import Scene
scn = Scene()
scn['1'] = xr.DataArray([1, 2, 3])
with TempFile() as filename:
@@ -171,8 +179,9 @@ def test_save_dataset_a_digit_prefix_include_attr(self):
def test_save_dataset_a_digit_no_prefix_include_attr(self):
"""Test saving an array to netcdf/cf dataset name starting with a digit with no prefix include orig name."""
- from satpy import Scene
import xarray as xr
+
+ from satpy import Scene
scn = Scene()
scn['1'] = xr.DataArray([1, 2, 3])
with TempFile() as filename:
@@ -184,6 +193,7 @@ def test_save_dataset_a_digit_no_prefix_include_attr(self):
def test_ancillary_variables(self):
"""Test ancillary_variables cited each other."""
import xarray as xr
+
from satpy import Scene
from satpy.tests.utils import make_dataid
scn = Scene()
@@ -208,6 +218,7 @@ def test_ancillary_variables(self):
def test_groups(self):
"""Test creating a file with groups."""
import xarray as xr
+
from satpy import Scene
tstart = datetime(2019, 4, 1, 12, 0)
@@ -264,8 +275,9 @@ def test_groups(self):
def test_single_time_value(self):
"""Test setting a single time value."""
- from satpy import Scene
import xarray as xr
+
+ from satpy import Scene
scn = Scene()
start_time = datetime(2018, 5, 30, 10, 0)
end_time = datetime(2018, 5, 30, 10, 15)
@@ -282,10 +294,29 @@ def test_single_time_value(self):
bounds_exp = np.array([[start_time, end_time]], dtype='datetime64[m]')
np.testing.assert_array_equal(f['time_bnds'], bounds_exp)
+ def test_time_coordinate_on_a_swath(self):
+ """Test that time dimension is not added on swath data with time already as a coordinate."""
+ import xarray as xr
+
+ from satpy import Scene
+ scn = Scene()
+ test_array = np.array([[1, 2], [3, 4], [5, 6], [7, 8]])
+ times = np.array(['2018-05-30T10:05:00', '2018-05-30T10:05:01',
+ '2018-05-30T10:05:02', '2018-05-30T10:05:03'], dtype=np.datetime64)
+ scn['test-array'] = xr.DataArray(test_array,
+ dims=['y', 'x'],
+ coords={'time': ('y', times)},
+ attrs=dict(start_time=times[0], end_time=times[-1]))
+ with TempFile() as filename:
+ scn.save_datasets(filename=filename, writer='cf', pretty=True)
+ with xr.open_dataset(filename, decode_cf=True) as f:
+ np.testing.assert_array_equal(f['time'], scn['test-array']['time'])
+
def test_bounds(self):
"""Test setting time bounds."""
- from satpy import Scene
import xarray as xr
+
+ from satpy import Scene
scn = Scene()
start_time = datetime(2018, 5, 30, 10, 0)
end_time = datetime(2018, 5, 30, 10, 15)
@@ -317,8 +348,9 @@ def test_bounds(self):
def test_bounds_minimum(self):
"""Test minimum bounds."""
- from satpy import Scene
import xarray as xr
+
+ from satpy import Scene
scn = Scene()
start_timeA = datetime(2018, 5, 30, 10, 0) # expected to be used
end_timeA = datetime(2018, 5, 30, 10, 20)
@@ -344,8 +376,9 @@ def test_bounds_minimum(self):
def test_bounds_missing_time_info(self):
"""Test time bounds generation in case of missing time."""
- from satpy import Scene
import xarray as xr
+
+ from satpy import Scene
scn = Scene()
start_timeA = datetime(2018, 5, 30, 10, 0)
end_timeA = datetime(2018, 5, 30, 10, 15)
@@ -367,8 +400,9 @@ def test_bounds_missing_time_info(self):
def test_encoding_kwarg(self):
"""Test 'encoding' keyword argument."""
- from satpy import Scene
import xarray as xr
+
+ from satpy import Scene
scn = Scene()
start_time = datetime(2018, 5, 30, 10, 0)
end_time = datetime(2018, 5, 30, 10, 15)
@@ -390,8 +424,9 @@ def test_encoding_kwarg(self):
def test_unlimited_dims_kwarg(self):
"""Test specification of unlimited dimensions."""
- from satpy import Scene
import xarray as xr
+
+ from satpy import Scene
scn = Scene()
start_time = datetime(2018, 5, 30, 10, 0)
end_time = datetime(2018, 5, 30, 10, 15)
@@ -408,8 +443,9 @@ def test_unlimited_dims_kwarg(self):
def test_header_attrs(self):
"""Check global attributes are set."""
- from satpy import Scene
import xarray as xr
+
+ from satpy import Scene
scn = Scene()
start_time = datetime(2018, 5, 30, 10, 0)
end_time = datetime(2018, 5, 30, 10, 15)
@@ -542,9 +578,10 @@ def assertDictWithArraysEqual(self, d1, d2):
def test_encode_attrs_nc(self):
"""Test attributes encoding."""
- from satpy.writers.cf_writer import encode_attrs_nc
import json
+ from satpy.writers.cf_writer import encode_attrs_nc
+
attrs, expected, _ = self.get_test_attrs()
# Test encoding
@@ -562,9 +599,10 @@ def test_encode_attrs_nc(self):
def test_da2cf(self):
"""Test the conversion of a DataArray to a CF-compatible DataArray."""
- from satpy.writers.cf_writer import CFWriter
import xarray as xr
+ from satpy.writers.cf_writer import CFWriter
+
# Create set of test attributes
attrs, attrs_expected, attrs_expected_flat = self.get_test_attrs()
attrs['area'] = 'some_area'
@@ -602,9 +640,10 @@ def test_da2cf(self):
@mock.patch('satpy.writers.cf_writer.CFWriter.__init__', return_value=None)
def test_collect_datasets(self, *mocks):
"""Test collecting CF datasets from a DataArray objects."""
- from satpy.writers.cf_writer import CFWriter
- import xarray as xr
import pyresample.geometry
+ import xarray as xr
+
+ from satpy.writers.cf_writer import CFWriter
geos = pyresample.geometry.AreaDefinition(
area_id='geos',
description='geos',
@@ -648,6 +687,7 @@ def test_collect_datasets(self, *mocks):
def test_assert_xy_unique(self):
"""Test that the x and y coordinates are unique."""
import xarray as xr
+
from satpy.writers.cf_writer import assert_xy_unique
dummy = [[1, 2], [3, 4]]
@@ -661,9 +701,10 @@ def test_assert_xy_unique(self):
def test_link_coords(self):
"""Check that coordinates link has been established correctly."""
+ import numpy as np
import xarray as xr
+
from satpy.writers.cf_writer import link_coords
- import numpy as np
data = [[1, 2], [3, 4]]
lon = np.zeros((2, 2))
@@ -699,6 +740,7 @@ def test_link_coords(self):
def test_make_alt_coords_unique(self):
"""Test that created coordinate variables are unique."""
import xarray as xr
+
from satpy.writers.cf_writer import make_alt_coords_unique
data = [[1, 2], [3, 4]]
@@ -745,8 +787,9 @@ def test_make_alt_coords_unique(self):
def test_area2cf(self):
"""Test the conversion of an area to CF standards."""
- import xarray as xr
import pyresample.geometry
+ import xarray as xr
+
from satpy.writers.cf_writer import area2cf
ds_base = xr.DataArray(data=[[1, 2], [3, 4]], dims=('y', 'x'), coords={'y': [1, 2], 'x': [3, 4]},
@@ -793,8 +836,9 @@ def test_area2cf(self):
def test_area2gridmapping(self):
"""Test the conversion from pyresample area object to CF grid mapping."""
- import xarray as xr
import pyresample.geometry
+ import xarray as xr
+
from satpy.writers.cf_writer import area2gridmapping
def _gm_matches(gmapping, expected):
@@ -979,9 +1023,10 @@ def _gm_matches(gmapping, expected):
def test_area2lonlat(self):
"""Test the conversion from areas to lon/lat."""
+ import dask.array as da
import pyresample.geometry
import xarray as xr
- import dask.array as da
+
from satpy.writers.cf_writer import area2lonlat
area = pyresample.geometry.AreaDefinition(
@@ -1032,9 +1077,10 @@ def test_area2lonlat(self):
def test_load_module_with_old_pyproj(self):
"""Test that cf_writer can still be loaded with pyproj 1.9.6."""
- import pyproj # noqa 401
- import sys
import importlib
+ import sys
+
+ import pyproj # noqa 401
old_version = sys.modules['pyproj'].__version__
sys.modules['pyproj'].__version__ = "1.9.6"
try:
@@ -1046,8 +1092,9 @@ def test_load_module_with_old_pyproj(self):
def test_global_attr_default_history_and_Conventions(self):
"""Test saving global attributes history and Conventions."""
- from satpy import Scene
import xarray as xr
+
+ from satpy import Scene
scn = Scene()
start_time = datetime(2018, 5, 30, 10, 0)
end_time = datetime(2018, 5, 30, 10, 15)
@@ -1064,8 +1111,9 @@ def test_global_attr_default_history_and_Conventions(self):
def test_global_attr_history_and_Conventions(self):
"""Test saving global attributes history and Conventions."""
- from satpy import Scene
import xarray as xr
+
+ from satpy import Scene
scn = Scene()
start_time = datetime(2018, 5, 30, 10, 0)
end_time = datetime(2018, 5, 30, 10, 15)
@@ -1075,7 +1123,7 @@ def test_global_attr_history_and_Conventions(self):
end_time=end_time,
prerequisites=[make_dsq(name='hej')]))
header_attrs = {}
- header_attrs['history'] = 'TEST add history',
+ header_attrs['history'] = ('TEST add history',)
header_attrs['Conventions'] = 'CF-1.7, ACDD-1.3'
with TempFile() as filename:
scn.save_datasets(filename=filename, writer='cf', header_attrs=header_attrs)
@@ -1085,13 +1133,86 @@ def test_global_attr_history_and_Conventions(self):
self.assertIn('Created by pytroll/satpy on', f.attrs['history'])
+def test_lonlat_storage(tmp_path):
+ """Test correct storage for area with lon/lat units."""
+ import xarray as xr
+ from pyresample import create_area_def
+
+ from ..utils import make_fake_scene
+ scn = make_fake_scene(
+ {"ketolysis": np.arange(25).reshape(5, 5)},
+ daskify=True,
+ area=create_area_def("mavas", 4326, shape=(5, 5),
+ center=(0, 0), resolution=(1, 1)))
+
+ filename = os.fspath(tmp_path / "test.nc")
+ scn.save_datasets(filename=filename, writer="cf", include_lonlats=False)
+ with xr.open_dataset(filename) as ds:
+ assert ds["ketolysis"].attrs["grid_mapping"] == "mavas"
+ assert ds["mavas"].attrs["grid_mapping_name"] == "latitude_longitude"
+ assert ds["x"].attrs["units"] == "degrees_east"
+ assert ds["y"].attrs["units"] == "degrees_north"
+ assert ds["mavas"].attrs["longitude_of_prime_meridian"] == 0.0
+ np.testing.assert_allclose(ds["mavas"].attrs["semi_major_axis"], 6378137.0)
+ np.testing.assert_allclose(ds["mavas"].attrs["inverse_flattening"], 298.257223563)
+
+
+def test_da2cf_lonlat():
+ """Test correct da2cf encoding for area with lon/lat units."""
+ import xarray as xr
+ from pyresample import create_area_def
+
+ from satpy.resample import add_crs_xy_coords
+ from satpy.writers.cf_writer import CFWriter
+
+ area = create_area_def("mavas", 4326, shape=(5, 5),
+ center=(0, 0), resolution=(1, 1))
+ da = xr.DataArray(
+ np.arange(25).reshape(5, 5),
+ dims=("y", "x"),
+ attrs={"area": area})
+ da = add_crs_xy_coords(da, area)
+ new_da = CFWriter.da2cf(da)
+ assert new_da["x"].attrs["units"] == "degrees_east"
+ assert new_da["y"].attrs["units"] == "degrees_north"
+
+
+def test_is_projected(caplog):
+ """Tests for private _is_projected function."""
+ import xarray as xr
+
+ from satpy.writers.cf_writer import CFWriter
+
+ # test case with units but no area
+ da = xr.DataArray(
+ np.arange(25).reshape(5, 5),
+ dims=("y", "x"),
+ coords={"x": xr.DataArray(np.arange(5), dims=("x",), attrs={"units": "m"}),
+ "y": xr.DataArray(np.arange(5), dims=("y",), attrs={"units": "m"})})
+ assert CFWriter._is_projected(da)
+
+ da = xr.DataArray(
+ np.arange(25).reshape(5, 5),
+ dims=("y", "x"),
+ coords={"x": xr.DataArray(np.arange(5), dims=("x",), attrs={"units": "degrees_east"}),
+ "y": xr.DataArray(np.arange(5), dims=("y",), attrs={"units": "degrees_north"})})
+ assert not CFWriter._is_projected(da)
+
+ da = xr.DataArray(
+ np.arange(25).reshape(5, 5),
+ dims=("y", "x"))
+ with caplog.at_level(logging.WARNING):
+ assert CFWriter._is_projected(da)
+ assert "Failed to tell if data are projected." in caplog.text
+
+
class TestCFWriterData(unittest.TestCase):
"""Test case for CF writer where data arrays are needed."""
def setUp(self):
"""Create some test data."""
- import xarray as xr
import pyresample.geometry
+ import xarray as xr
data = [[75, 2], [3, 4]]
y = [1, 2]
x = [1, 2]
@@ -1140,8 +1261,9 @@ def test_has_projection_coords(self):
@mock.patch('satpy.writers.cf_writer.CFWriter.__init__', return_value=None)
def test_collect_datasets_with_latitude_named_lat(self, *mocks):
"""Test collecting CF datasets with latitude named lat."""
- from satpy.writers.cf_writer import CFWriter
from operator import getitem
+
+ from satpy.writers.cf_writer import CFWriter
self.datasets_list = [self.datasets[key] for key in self.datasets]
self.datasets_list_no_latlon = [self.datasets[key] for key in ['var1', 'var2']]
diff --git a/satpy/tests/writer_tests/test_geotiff.py b/satpy/tests/writer_tests/test_geotiff.py
index 99494e8287..47cab92047 100644
--- a/satpy/tests/writer_tests/test_geotiff.py
+++ b/satpy/tests/writer_tests/test_geotiff.py
@@ -17,140 +17,194 @@
# satpy. If not, see .
"""Tests for the geotiff writer."""
-import unittest
+from datetime import datetime
from unittest import mock
-import numpy as np
-
-class TestGeoTIFFWriter(unittest.TestCase):
+import dask.array as da
+import numpy as np
+import pytest
+import xarray as xr
+
+
+def _get_test_datasets_2d():
+ """Create a single 2D test dataset."""
+ ds1 = xr.DataArray(
+ da.zeros((100, 200), chunks=50),
+ dims=('y', 'x'),
+ attrs={'name': 'test',
+ 'start_time': datetime.utcnow()}
+ )
+ return [ds1]
+
+
+def _get_test_datasets_2d_nonlinear_enhancement():
+ data_arrays = _get_test_datasets_2d()
+ enh_history = [
+ {"gamma": 2.0},
+ ]
+ for data_arr in data_arrays:
+ data_arr.attrs["enhancement_history"] = enh_history
+ return data_arrays
+
+
+def _get_test_datasets_3d():
+ """Create a single 3D test dataset."""
+ ds1 = xr.DataArray(
+ da.zeros((3, 100, 200), chunks=50),
+ dims=('bands', 'y', 'x'),
+ coords={'bands': ['R', 'G', 'B']},
+ attrs={'name': 'test',
+ 'start_time': datetime.utcnow()}
+ )
+ return [ds1]
+
+
+class TestGeoTIFFWriter:
"""Test the GeoTIFF Writer class."""
- def setUp(self):
- """Create temporary directory to save files to."""
- import tempfile
- self.base_dir = tempfile.mkdtemp()
-
- def tearDown(self):
- """Remove the temporary directory created for a test."""
- try:
- import shutil
- shutil.rmtree(self.base_dir, ignore_errors=True)
- except OSError:
- pass
-
- def _get_test_datasets(self):
- """Create a single test dataset."""
- import xarray as xr
- import dask.array as da
- from datetime import datetime
- ds1 = xr.DataArray(
- da.zeros((100, 200), chunks=50),
- dims=('y', 'x'),
- attrs={'name': 'test',
- 'start_time': datetime.utcnow()}
- )
- return [ds1]
-
def test_init(self):
"""Test creating the writer with no arguments."""
from satpy.writers.geotiff import GeoTIFFWriter
GeoTIFFWriter()
- def test_simple_write(self):
+ @pytest.mark.parametrize(
+ "input_func",
+ [
+ _get_test_datasets_2d,
+ _get_test_datasets_3d
+ ]
+ )
+ def test_simple_write(self, input_func, tmp_path):
"""Test basic writer operation."""
from satpy.writers.geotiff import GeoTIFFWriter
- datasets = self._get_test_datasets()
- w = GeoTIFFWriter(base_dir=self.base_dir)
+ datasets = input_func()
+ w = GeoTIFFWriter(base_dir=tmp_path)
w.save_datasets(datasets)
- def test_simple_delayed_write(self):
+ def test_simple_delayed_write(self, tmp_path):
"""Test writing can be delayed."""
- import dask.array as da
from satpy.writers.geotiff import GeoTIFFWriter
- datasets = self._get_test_datasets()
- w = GeoTIFFWriter(base_dir=self.base_dir)
+ datasets = _get_test_datasets_2d()
+ w = GeoTIFFWriter(base_dir=tmp_path)
# when we switch to rio_save on XRImage then this will be sources
# and targets
res = w.save_datasets(datasets, compute=False)
# this will fail if rasterio isn't installed
- self.assertIsInstance(res, tuple)
+ assert isinstance(res, tuple)
# two lists, sources and destinations
- self.assertEqual(len(res), 2)
- self.assertIsInstance(res[0], list)
- self.assertIsInstance(res[1], list)
- self.assertIsInstance(res[0][0], da.Array)
+ assert len(res) == 2
+ assert isinstance(res[0], list)
+ assert isinstance(res[1], list)
+ assert isinstance(res[0][0], da.Array)
da.store(res[0], res[1])
for target in res[1]:
if hasattr(target, 'close'):
target.close()
- def test_colormap_write(self):
+ def test_colormap_write(self, tmp_path):
"""Test writing an image with a colormap."""
- from satpy.writers.geotiff import GeoTIFFWriter
- from trollimage.xrimage import XRImage
from trollimage.colormap import spectral
- datasets = self._get_test_datasets()
- w = GeoTIFFWriter(base_dir=self.base_dir)
+ from trollimage.xrimage import XRImage
+
+ from satpy.writers.geotiff import GeoTIFFWriter
+ datasets = _get_test_datasets_2d()
+ w = GeoTIFFWriter(base_dir=tmp_path)
# we'd have to customize enhancements to test this through
# save_datasets. We'll use `save_image` as a workaround.
img = XRImage(datasets[0])
img.palettize(spectral)
w.save_image(img, keep_palette=True)
- def test_float_write(self):
+ def test_float_write(self, tmp_path):
"""Test that geotiffs can be written as floats.
NOTE: Does not actually check that the output is floats.
"""
from satpy.writers.geotiff import GeoTIFFWriter
- datasets = self._get_test_datasets()
- w = GeoTIFFWriter(base_dir=self.base_dir,
+ datasets = _get_test_datasets_2d()
+ w = GeoTIFFWriter(base_dir=tmp_path,
enhance=False,
dtype=np.float32)
w.save_datasets(datasets)
- def test_dtype_for_enhance_false(self):
- """Test that dtype of dataset is used if enhance=False."""
+ def test_dtype_for_enhance_false(self, tmp_path):
+ """Test that dtype of dataset is used if parameters enhance=False and dtype=None."""
+ from satpy.writers.geotiff import GeoTIFFWriter
+ datasets = _get_test_datasets_2d()
+ w = GeoTIFFWriter(base_dir=tmp_path, enhance=False)
+ with mock.patch('satpy.writers.XRImage.save') as save_method:
+ save_method.return_value = None
+ w.save_datasets(datasets, compute=False)
+ assert save_method.call_args[1]['dtype'] == np.float64
+
+ def test_dtype_for_enhance_false_and_given_dtype(self, tmp_path):
+ """Test that dtype of dataset is used if enhance=False and dtype=uint8."""
from satpy.writers.geotiff import GeoTIFFWriter
- datasets = self._get_test_datasets()
- w = GeoTIFFWriter(base_dir=self.base_dir, enhance=False)
+ datasets = _get_test_datasets_2d()
+ w = GeoTIFFWriter(base_dir=tmp_path, enhance=False, dtype=np.uint8)
with mock.patch('satpy.writers.XRImage.save') as save_method:
save_method.return_value = None
w.save_datasets(datasets, compute=False)
- self.assertEqual(save_method.call_args[1]['dtype'], np.float64)
+ assert save_method.call_args[1]['dtype'] == np.uint8
- def test_fill_value_from_config(self):
+ def test_fill_value_from_config(self, tmp_path):
"""Test fill_value coming from the writer config."""
from satpy.writers.geotiff import GeoTIFFWriter
- datasets = self._get_test_datasets()
- w = GeoTIFFWriter(base_dir=self.base_dir)
+ datasets = _get_test_datasets_2d()
+ w = GeoTIFFWriter(base_dir=tmp_path)
w.info['fill_value'] = 128
with mock.patch('satpy.writers.XRImage.save') as save_method:
save_method.return_value = None
w.save_datasets(datasets, compute=False)
- self.assertEqual(save_method.call_args[1]['fill_value'], 128)
+ assert save_method.call_args[1]['fill_value'] == 128
- def test_tags(self):
+ def test_tags(self, tmp_path):
"""Test tags being added."""
from satpy.writers.geotiff import GeoTIFFWriter
- datasets = self._get_test_datasets()
- w = GeoTIFFWriter(tags={'test1': 1}, base_dir=self.base_dir)
+ datasets = _get_test_datasets_2d()
+ w = GeoTIFFWriter(tags={'test1': 1}, base_dir=tmp_path)
w.info['fill_value'] = 128
with mock.patch('satpy.writers.XRImage.save') as save_method:
save_method.return_value = None
w.save_datasets(datasets, tags={'test2': 2}, compute=False)
called_tags = save_method.call_args[1]['tags']
- self.assertDictEqual(called_tags, {'test1': 1, 'test2': 2})
-
- def test_scale_offset(self):
+ assert called_tags == {'test1': 1, 'test2': 2}
+
+ @pytest.mark.parametrize(
+ "input_func",
+ [
+ _get_test_datasets_2d,
+ _get_test_datasets_3d,
+ _get_test_datasets_2d_nonlinear_enhancement,
+ ]
+ )
+ @pytest.mark.parametrize(
+ "save_kwargs",
+ [
+ {"include_scale_offset": True},
+ {"scale_offset_tags": ("scale", "offset")},
+ ]
+ )
+ def test_scale_offset(self, input_func, save_kwargs, tmp_path):
"""Test tags being added."""
from satpy.writers.geotiff import GeoTIFFWriter
- datasets = self._get_test_datasets()
- w = GeoTIFFWriter(tags={'test1': 1}, base_dir=self.base_dir)
+ datasets = input_func()
+ w = GeoTIFFWriter(tags={'test1': 1}, base_dir=tmp_path)
w.info['fill_value'] = 128
with mock.patch('satpy.writers.XRImage.save') as save_method:
save_method.return_value = None
- w.save_datasets(datasets, tags={'test2': 2}, compute=False, include_scale_offset=True)
- called_include = save_method.call_args[1]['include_scale_offset_tags']
- self.assertTrue(called_include)
+ w.save_datasets(datasets, tags={'test2': 2}, compute=False, **save_kwargs)
+ kwarg_name = "include_scale_offset_tags" if "include_scale_offset" in save_kwargs else "scale_offset_tags"
+ kwarg_value = save_method.call_args[1].get(kwarg_name)
+ assert kwarg_value is not None
+
+ def test_tiled_value_from_config(self, tmp_path):
+ """Test tiled value coming from the writer config."""
+ from satpy.writers.geotiff import GeoTIFFWriter
+ datasets = _get_test_datasets_2d()
+ w = GeoTIFFWriter(base_dir=tmp_path)
+ with mock.patch('satpy.writers.XRImage.save') as save_method:
+ save_method.return_value = None
+ w.save_datasets(datasets, compute=False)
+ assert save_method.call_args[1]['tiled']
diff --git a/satpy/tests/writer_tests/test_mitiff.py b/satpy/tests/writer_tests/test_mitiff.py
index 4d1fcff29f..498ed0e5a3 100644
--- a/satpy/tests/writer_tests/test_mitiff.py
+++ b/satpy/tests/writer_tests/test_mitiff.py
@@ -41,9 +41,10 @@ def tearDown(self):
def _get_test_datasets(self):
"""Create a datasets list."""
- import xarray as xr
- import dask.array as da
from datetime import datetime
+
+ import dask.array as da
+ import xarray as xr
from pyresample.geometry import AreaDefinition
from pyresample.utils import proj4_str_to_dict
area_def = AreaDefinition(
@@ -107,9 +108,10 @@ def _get_test_datasets(self):
def _get_test_datasets_sensor_set(self):
"""Create a datasets list."""
- import xarray as xr
- import dask.array as da
from datetime import datetime
+
+ import dask.array as da
+ import xarray as xr
from pyresample.geometry import AreaDefinition
from pyresample.utils import proj4_str_to_dict
area_def = AreaDefinition(
@@ -173,9 +175,10 @@ def _get_test_datasets_sensor_set(self):
def _get_test_dataset(self, bands=3):
"""Create a single test dataset."""
- import xarray as xr
- import dask.array as da
from datetime import datetime
+
+ import dask.array as da
+ import xarray as xr
from pyresample.geometry import AreaDefinition
from pyresample.utils import proj4_str_to_dict
area_def = AreaDefinition(
@@ -203,9 +206,10 @@ def _get_test_dataset(self, bands=3):
def _get_test_one_dataset(self):
"""Create a single test dataset."""
- import xarray as xr
- import dask.array as da
from datetime import datetime
+
+ import dask.array as da
+ import xarray as xr
from pyresample.geometry import AreaDefinition
from pyresample.utils import proj4_str_to_dict
area_def = AreaDefinition(
@@ -233,9 +237,10 @@ def _get_test_one_dataset(self):
def _get_test_one_dataset_sensor_set(self):
"""Create a single test dataset."""
- import xarray as xr
- import dask.array as da
from datetime import datetime
+
+ import dask.array as da
+ import xarray as xr
from pyresample.geometry import AreaDefinition
from pyresample.utils import proj4_str_to_dict
area_def = AreaDefinition(
@@ -263,9 +268,10 @@ def _get_test_one_dataset_sensor_set(self):
def _get_test_dataset_with_bad_values(self, bands=3):
"""Create a single test dataset."""
- import xarray as xr
- import numpy as np
from datetime import datetime
+
+ import numpy as np
+ import xarray as xr
from pyresample.geometry import AreaDefinition
from pyresample.utils import proj4_str_to_dict
area_def = AreaDefinition(
@@ -297,13 +303,15 @@ def _get_test_dataset_with_bad_values(self, bands=3):
def _get_test_dataset_calibration(self, bands=6):
"""Create a single test dataset."""
- import xarray as xr
- import dask.array as da
from datetime import datetime
+
+ import dask.array as da
+ import xarray as xr
from pyresample.geometry import AreaDefinition
from pyresample.utils import proj4_str_to_dict
- from satpy.tests.utils import make_dsq
+
from satpy.scene import Scene
+ from satpy.tests.utils import make_dsq
area_def = AreaDefinition(
'test',
'test',
@@ -400,13 +408,15 @@ def _get_test_dataset_calibration(self, bands=6):
def _get_test_dataset_calibration_one_dataset(self, bands=1):
"""Create a single test dataset."""
- import xarray as xr
- import dask.array as da
from datetime import datetime
+
+ import dask.array as da
+ import xarray as xr
from pyresample.geometry import AreaDefinition
from pyresample.utils import proj4_str_to_dict
- from satpy.tests.utils import make_dsq
+
from satpy.scene import Scene
+ from satpy.tests.utils import make_dsq
area_def = AreaDefinition(
'test',
'test',
@@ -453,11 +463,13 @@ def _get_test_dataset_calibration_one_dataset(self, bands=1):
def _get_test_dataset_three_bands_two_prereq(self, bands=3):
"""Create a single test dataset."""
- import xarray as xr
- import dask.array as da
from datetime import datetime
+
+ import dask.array as da
+ import xarray as xr
from pyresample.geometry import AreaDefinition
from pyresample.utils import proj4_str_to_dict
+
from satpy.tests.utils import make_dsq
area_def = AreaDefinition(
'test',
@@ -486,11 +498,13 @@ def _get_test_dataset_three_bands_two_prereq(self, bands=3):
def _get_test_dataset_three_bands_prereq(self, bands=3):
"""Create a single test dataset."""
- import xarray as xr
- import dask.array as da
from datetime import datetime
+
+ import dask.array as da
+ import xarray as xr
from pyresample.geometry import AreaDefinition
from pyresample.utils import proj4_str_to_dict
+
from satpy.tests.utils import make_dsq
area_def = AreaDefinition(
'test',
@@ -532,8 +546,10 @@ def test_simple_write(self):
def test_save_datasets(self):
"""Test basic writer operation save_datasets."""
import os
+
import numpy as np
from libtiff import TIFF
+
from satpy.writers.mitiff import MITIFFWriter
expected = np.full((100, 200), 0)
dataset = self._get_test_datasets()
@@ -548,8 +564,10 @@ def test_save_datasets(self):
def test_save_datasets_sensor_set(self):
"""Test basic writer operation save_datasets."""
import os
+
import numpy as np
from libtiff import TIFF
+
from satpy.writers.mitiff import MITIFFWriter
expected = np.full((100, 200), 0)
dataset = self._get_test_datasets_sensor_set()
@@ -564,7 +582,9 @@ def test_save_datasets_sensor_set(self):
def test_save_one_dataset(self):
"""Test basic writer operation with one dataset ie. no bands."""
import os
+
from libtiff import TIFF
+
from satpy.writers.mitiff import MITIFFWriter
dataset = self._get_test_one_dataset()
w = MITIFFWriter(base_dir=self.base_dir)
@@ -579,7 +599,9 @@ def test_save_one_dataset(self):
def test_save_one_dataset_sesnor_set(self):
"""Test basic writer operation with one dataset ie. no bands."""
import os
+
from libtiff import TIFF
+
from satpy.writers.mitiff import MITIFFWriter
dataset = self._get_test_one_dataset_sensor_set()
w = MITIFFWriter(base_dir=self.base_dir)
@@ -594,8 +616,10 @@ def test_save_one_dataset_sesnor_set(self):
def test_save_dataset_with_calibration(self):
"""Test writer operation with calibration."""
import os
+
import numpy as np
from libtiff import TIFF
+
from satpy.writers.mitiff import MITIFFWriter
expected_ir = np.full((100, 200), 255)
@@ -767,8 +791,10 @@ def test_save_dataset_with_calibration(self):
def test_save_dataset_with_calibration_one_dataset(self):
"""Test saving if mitiff as dataset with only one channel."""
import os
+
import numpy as np
from libtiff import TIFF
+
from satpy.writers.mitiff import MITIFFWriter
expected = np.full((100, 200), 255)
@@ -819,8 +845,10 @@ def test_save_dataset_with_calibration_one_dataset(self):
def test_save_dataset_with_bad_value(self):
"""Test writer operation with bad values."""
import os
+
import numpy as np
from libtiff import TIFF
+
from satpy.writers.mitiff import MITIFFWriter
expected = np.array([[0, 4, 1, 37, 73],
@@ -837,10 +865,11 @@ def test_save_dataset_with_bad_value(self):
def test_convert_proj4_string(self):
"""Test conversion of geolocations."""
- import xarray as xr
import dask.array as da
- from satpy.writers.mitiff import MITIFFWriter
+ import xarray as xr
from pyresample.geometry import AreaDefinition
+
+ from satpy.writers.mitiff import MITIFFWriter
checks = [{'epsg': '+init=EPSG:32631',
'proj4': (' Proj string: +proj=etmerc +lat_0=0 +lon_0=3 +k=0.9996 '
'+ellps=WGS84 +datum=WGS84 +units=km +x_0=501020.000000 '
@@ -885,8 +914,10 @@ def test_convert_proj4_string(self):
def test_save_dataset_palette(self):
"""Test writer operation as palette."""
import os
+
import numpy as np
from libtiff import TIFF
+
from satpy.writers.mitiff import MITIFFWriter
expected = np.full((100, 200), 0)
@@ -979,7 +1010,9 @@ def test_simple_write_two_bands(self):
def test_get_test_dataset_three_bands_prereq(self):
"""Test basic writer operation with 3 bands with DataQuery prerequisites with missing name."""
import os
+
from libtiff import TIFF
+
from satpy.writers.mitiff import MITIFFWriter
IMAGEDESCRIPTION = 270
diff --git a/satpy/tests/writer_tests/test_ninjogeotiff.py b/satpy/tests/writer_tests/test_ninjogeotiff.py
new file mode 100644
index 0000000000..a339edaa8b
--- /dev/null
+++ b/satpy/tests/writer_tests/test_ninjogeotiff.py
@@ -0,0 +1,920 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Tests for writing GeoTIFF files with NinJoTIFF tags."""
+
+import datetime
+import logging
+import os
+
+import dask.array as da
+import numpy as np
+import pytest
+import xarray as xr
+from pyresample import create_area_def
+
+from satpy import Scene
+from satpy.writers import get_enhanced_image
+
+try:
+ from math import prod
+except ImportError: # Remove when dropping Python < 3.8
+ from functools import reduce
+ from operator import mul
+
+ def prod(iterable): # type: ignore
+ """Drop-in replacement for math.prod."""
+ return reduce(mul, iterable, 1)
+
+
+def _get_fake_da(lo, hi, shp, dtype="f4"):
+ """Generate dask array with synthetic data.
+
+ This is more or less a 2d linspace: it'll return a 2-d dask array of shape
+ ``shp``, lowest value is ``lo``, highest value is ``hi``.
+ """
+ return da.arange(lo, hi, (hi-lo)/prod(shp), chunks=50, dtype=dtype).reshape(shp)
+
+
+@pytest.fixture(scope="module")
+def test_area_tiny_eqc_sphere():
+ """Create 10x00 test equirectangular area centered on (40, -30), spherical geoid, m."""
+ shp = (10, 20)
+ test_area = create_area_def(
+ "test-area-eqc-sphere",
+ {"proj": "eqc", "lat_ts": 0., "lat_0": 0., "lon_0": 0.,
+ "x_0": 0., "y_0": 0., "ellps": "sphere", "units": "m",
+ "no_defs": None, "type": "crs"},
+ units="m",
+ shape=shp,
+ resolution=1000,
+ center=(-3330000.0, 4440000.0))
+ return test_area
+
+
+@pytest.fixture(scope="module")
+def test_area_small_eqc_wgs84():
+ """Create 50x100 test equirectangular area centered on (50, 90), wgs84."""
+ shp = (50, 100)
+ test_area = create_area_def(
+ "test-area-eqc-wgs84",
+ {"proj": "eqc", "lat_0": 2.5, "lon_0": 1., "ellps": "WGS84"},
+ units="m",
+ shape=shp,
+ resolution=1000,
+ center=(10000000.0, 6000000.0))
+ return test_area
+
+
+@pytest.fixture(scope="module")
+def test_area_tiny_stereographic_wgs84():
+ """Create a 20x10 test stereographic area centered near the north pole, wgs84."""
+ shp = (20, 10)
+ test_area = create_area_def(
+ "test-area-north-stereo",
+ {"proj": "stere", "lat_0": 75.0, "lon_0": 2.0, "lat_ts": 60.0,
+ "ellps": "WGS84", "units": "m", "type": "crs"},
+ units="m",
+ shape=shp,
+ resolution=1000,
+ center=(0.0, 1500000.0))
+ return test_area
+
+
+@pytest.fixture(scope="module")
+def test_area_tiny_antarctic():
+ """Create a 20x10 test stereographic area centered near the south pole, wgs84."""
+ shp = (20, 10)
+ test_area = create_area_def(
+ "test-area-south-stereo",
+ {"proj": "stere", "lat_0": -75.0, "lon_0": 2.0, "lat_ts": 60.0,
+ "ellps": "WGS84", "units": "m", "type": "crs"},
+ units="m",
+ shape=shp,
+ resolution=1000,
+ center=(0.0, -1500000.0))
+ return test_area
+
+
+@pytest.fixture(scope="module")
+def test_area_northpole():
+ """Create a 20x10 test area centered exactly on the north pole.
+
+ This has no well-defined central meridian so needs separate testing.
+ """
+ shp = (20, 10)
+ test_area = create_area_def(
+ "test-area-north-pole",
+ {"proj": "stere", "lat_0": 90, "lat_ts": 60, "ellps": "WGS84"},
+ shape=shp,
+ resolution=1000,
+ center=(0.0, 15000000.0))
+ return test_area
+
+
+@pytest.fixture(scope="module")
+def test_area_merc():
+ """Create a mercator area."""
+ from pyproj import CRS
+ shp = (20, 10)
+ test_area = create_area_def(
+ "test-area-merc",
+ CRS("+proj=merc"),
+ units="m",
+ shape=shp,
+ resolution=1000,
+ center=(0.0, 0.0))
+ return test_area
+
+
+@pytest.fixture(scope="module")
+def test_area_weird():
+ """Create a weird area (interrupted goode homolosine) to test error handling."""
+ from pyproj import CRS
+ shp = (20, 10)
+ test_area = create_area_def(
+ "test-area-north-stereo",
+ CRS("+proj=igh"),
+ units="m",
+ shape=shp,
+ resolution=1000,
+ center=(0.0, 1500000.0))
+ return test_area
+
+
+@pytest.fixture(scope="module")
+def test_area_epsg4326():
+ """Test with EPSG4326 (latlong) area, which has no CRS coordinate operation."""
+ from pyproj import CRS
+ shp = (16, 8)
+ euro4326 = create_area_def(
+ "epgs4326europa",
+ CRS.from_epsg(4326),
+ resolution=1/128,
+ shape=shp,
+ center=(0, 0))
+ return euro4326
+
+
+@pytest.fixture(scope="module")
+def test_image_small_mid_atlantic_L(test_area_tiny_eqc_sphere):
+ """Get a small test image in mode L, over Atlantic."""
+ arr = xr.DataArray(
+ _get_fake_da(-80, 40, test_area_tiny_eqc_sphere.shape + (1,)),
+ dims=("y", "x", "bands"),
+ attrs={
+ "name": "test-small-mid-atlantic",
+ "start_time": datetime.datetime(1985, 8, 13, 13, 0),
+ "area": test_area_tiny_eqc_sphere})
+ return get_enhanced_image(arr)
+
+
+@pytest.fixture(scope="module")
+def test_image_small_mid_atlantic_K_L(test_area_tiny_eqc_sphere):
+ """Get a small test image in units K, mode L, over Atlantic."""
+ arr = xr.DataArray(
+ _get_fake_da(-80+273.15, 40+273.15, test_area_tiny_eqc_sphere.shape + (1,)),
+ dims=("y", "x", "bands"),
+ attrs={
+ "name": "test-small-mid-atlantic",
+ "start_time": datetime.datetime(1985, 8, 13, 13, 0),
+ "area": test_area_tiny_eqc_sphere,
+ "units": "K"})
+ return get_enhanced_image(arr)
+
+
+@pytest.fixture(scope="module")
+def test_image_large_asia_RGB(test_area_small_eqc_wgs84):
+ """Get a large-ish test image in mode RGB, over Asia."""
+ arr = xr.DataArray(
+ _get_fake_da(0, 255, test_area_small_eqc_wgs84.shape + (3,), "uint8"),
+ dims=("y", "x", "bands"),
+ coords={"bands": ["R", "G", "B"]},
+ attrs={
+ "name": "test-large-asia",
+ "start_time": datetime.datetime(2015, 10, 21, 20, 25, 0),
+ "area": test_area_small_eqc_wgs84,
+ "mode": "RGB"})
+ return get_enhanced_image(arr)
+
+
+@pytest.fixture(scope="module")
+def test_image_small_arctic_P(test_area_tiny_stereographic_wgs84):
+ """Get a small-ish test image in mode P, over Arctic."""
+ arr = xr.DataArray(
+ _get_fake_da(0, 10, test_area_tiny_stereographic_wgs84.shape + (1,), "uint8"),
+ dims=("y", "x", "bands"),
+ coords={"bands": ["P"]},
+ attrs={
+ "name": "test-small-arctic",
+ "start_time": datetime.datetime(2027, 8, 2, 8, 20),
+ "area": test_area_tiny_stereographic_wgs84,
+ "mode": "P"})
+ return get_enhanced_image(arr)
+
+
+@pytest.fixture(scope="module")
+def test_image_northpole(test_area_northpole):
+ """Test image with area exactly on northpole."""
+ arr = xr.DataArray(
+ _get_fake_da(1, 100, test_area_northpole.shape + (1,), "uint8"),
+ dims=("y", "x", "bands"),
+ coords={"bands": ["L"]},
+ attrs={
+ "name": "test-northpole",
+ "start_time": datetime.datetime(1926, 5, 12, 0),
+ "area": test_area_northpole,
+ "mode": "L"})
+ return get_enhanced_image(arr)
+
+
+@pytest.fixture(scope="module")
+def test_image_weird(test_area_weird):
+ """Get a small image with some weird properties to test error handling."""
+ da = xr.DataArray(
+ _get_fake_da(1, 2, test_area_weird.shape + (2,), "uint8"),
+ dims=("y", "x", "bands"),
+ coords={"bands": ["L", "A"]},
+ attrs={
+ "name": "interrupted image",
+ "start_time": datetime.datetime(1970, 1, 1),
+ "area": test_area_weird,
+ "mode": "LA"})
+ return get_enhanced_image(da)
+
+
+@pytest.fixture(scope="module")
+def test_image_rgba_merc(test_area_merc):
+ """Get a small test image in mode RGBA and mercator."""
+ arr = xr.DataArray(
+ _get_fake_da(-80, 40, test_area_merc.shape + (4,)),
+ dims=("y", "x", "bands"),
+ coords={"bands": ["R", "G", "B", "A"]},
+ attrs={
+ "name": "test-rgba",
+ "start_time": datetime.datetime(2013, 2, 22, 12, 0),
+ "area": test_area_merc,
+ "mode": "RGBA"})
+ return get_enhanced_image(arr)
+
+
+@pytest.fixture(scope="module")
+def test_image_cmyk_antarctic(test_area_tiny_antarctic):
+ """Get a small test image in mode CMYK on south pole."""
+ arr = xr.DataArray(
+ _get_fake_da(-80, 40, test_area_tiny_antarctic.shape + (4,)),
+ dims=("y", "x", "bands"),
+ coords={"bands": ["C", "M", "Y", "K"]},
+ attrs={
+ "name": "test-cmyk",
+ "start_time": datetime.datetime(2065, 11, 22, 11),
+ "area": test_area_tiny_antarctic,
+ "mode": "CMYK"})
+ return get_enhanced_image(arr)
+
+
+@pytest.fixture(scope="module")
+def test_image_latlon(test_area_epsg4326):
+ """Get image with latlon areadefinition."""
+ arr = xr.DataArray(
+ _get_fake_da(-50, 30, test_area_epsg4326.shape + (1,)),
+ dims=("y", "x", "bands"),
+ coords={"bands": ["L"]},
+ attrs={
+ "name": "test-latlon",
+ "start_time": datetime.datetime(2001, 1, 1, 0),
+ "area": test_area_epsg4326,
+ "mode": "L"})
+ return get_enhanced_image(arr)
+
+
+@pytest.fixture(scope="module")
+def ntg1(test_image_small_mid_atlantic_L):
+ """Create instance of NinJoTagGenerator class."""
+ from satpy.writers.ninjogeotiff import NinJoTagGenerator
+ return NinJoTagGenerator(
+ test_image_small_mid_atlantic_L,
+ 255,
+ "quinoa.tif",
+ ChannelID=900015,
+ DataType="GORN",
+ PhysicUnit="C",
+ PhysicValue="Temperature",
+ SatelliteNameID=6400014,
+ DataSource="dowsing rod")
+
+
+@pytest.fixture(scope="module")
+def ntg2(test_image_large_asia_RGB):
+ """Create instance of NinJoTagGenerator class."""
+ from satpy.writers.ninjogeotiff import NinJoTagGenerator
+ return NinJoTagGenerator(
+ test_image_large_asia_RGB,
+ 0,
+ "seitan.tif",
+ ChannelID=1000015,
+ DataType="GORN",
+ PhysicUnit="N/A",
+ PhysicValue="N/A",
+ SatelliteNameID=6400014)
+
+
+@pytest.fixture(scope="module")
+def ntg3(test_image_small_arctic_P):
+ """Create instance of NinJoTagGenerator class."""
+ from satpy.writers.ninjogeotiff import NinJoTagGenerator
+ return NinJoTagGenerator(
+ test_image_small_arctic_P,
+ 255,
+ "spelt.tif",
+ ChannelID=800012,
+ DataType="PPRN",
+ PhysicUnit="N/A",
+ PhysicValue="N/A",
+ SatelliteNameID=6500014,
+ OverFlightTime=42)
+
+
+@pytest.fixture(scope="module")
+def ntg_northpole(test_image_northpole):
+ """Create NinJoTagGenerator with north pole image."""
+ from satpy.writers.ninjogeotiff import NinJoTagGenerator
+ return NinJoTagGenerator(
+ test_image_northpole,
+ 255,
+ "lentils.tif",
+ ChannelID=900012,
+ DataType="PORN",
+ PhysicUnit="Temperature",
+ PhysicValue="K",
+ SatelliteNameID=7500014)
+
+
+@pytest.fixture(scope="module")
+def ntg_weird(test_image_weird):
+ """Create NinJoTagGenerator instance with weird image."""
+ from satpy.writers.ninjogeotiff import NinJoTagGenerator
+ return NinJoTagGenerator(
+ test_image_weird,
+ 12,
+ "tempeh.tif",
+ ChannelID=800012,
+ DataType="PPRN",
+ PhysicUnit="N/A",
+ PhysicValue="N/A",
+ SatelliteNameID=6500014)
+
+
+@pytest.fixture(scope="module")
+def ntg_no_fill_value(test_image_small_mid_atlantic_L):
+ """Create instance of NinJoTagGenerator class."""
+ from satpy.writers.ninjogeotiff import NinJoTagGenerator
+ return NinJoTagGenerator(
+ test_image_small_mid_atlantic_L,
+ None,
+ "bulgur.tif",
+ ChannelID=900015,
+ DataType="GORN",
+ PhysicUnit="C",
+ PhysicValue="Temperature",
+ SatelliteNameID=6400014,
+ DataSource="dowsing rod")
+
+
+@pytest.fixture(scope="module")
+def ntg_rgba(test_image_rgba_merc):
+ """Create NinJoTagGenerator instance with RGBA image."""
+ from satpy.writers.ninjogeotiff import NinJoTagGenerator
+ return NinJoTagGenerator(
+ test_image_rgba_merc,
+ 12,
+ "soy.tif",
+ ChannelID=800042,
+ DataType="GORN",
+ PhysicUnit="N/A",
+ PhysicValue="N/A",
+ SatelliteNameID=6500014)
+
+
+@pytest.fixture(scope="module")
+def ntg_cmyk(test_image_cmyk_antarctic):
+ """Create NinJoTagGenerator instance with CMYK image."""
+ from satpy.writers.ninjogeotiff import NinJoTagGenerator
+ return NinJoTagGenerator(
+ test_image_cmyk_antarctic,
+ 0,
+ "tvp.tif",
+ ChannelID=123042,
+ DataType="PPRN",
+ PhysicUnit="N/A",
+ PhysicValue="N/A",
+ SatelliteNameID=6500014)
+
+
+@pytest.fixture(scope="module")
+def ntg_latlon(test_image_latlon):
+ """Create NinJoTagGenerator with latlon-area image."""
+ from satpy.writers.ninjogeotiff import NinJoTagGenerator
+ return NinJoTagGenerator(
+ test_image_latlon,
+ 0,
+ "latlon.tif",
+ ChannelID=123456,
+ DataType="GORN",
+ PhysicUnit="%",
+ PhysicValue="Reflectance",
+ SatelliteNameID=654321)
+
+
+@pytest.fixture
+def patch_datetime_now(monkeypatch):
+ """Get a fake datetime.datetime.now()."""
+ # Source: https://stackoverflow.com/a/20503374/974555, CC-BY-SA 4.0
+
+ class mydatetime(datetime.datetime):
+ """Drop-in replacement for datetime.datetime."""
+
+ @classmethod
+ def now(cls, tz=datetime.timezone.utc):
+ """Drop-in replacement for datetime.datetime.now."""
+ return datetime.datetime(2033, 5, 18, 3, 33, 20,
+ tzinfo=tz)
+
+ monkeypatch.setattr(datetime, 'datetime', mydatetime)
+
+
+def test_write_and_read_file(test_image_small_mid_atlantic_L, tmp_path):
+ """Test that it writes a GeoTIFF with the appropriate NinJo-tags."""
+ import rasterio
+
+ from satpy.writers.ninjogeotiff import NinJoGeoTIFFWriter
+ fn = os.fspath(tmp_path / "test.tif")
+ ngtw = NinJoGeoTIFFWriter()
+ ngtw.save_dataset(
+ test_image_small_mid_atlantic_L.data,
+ filename=fn,
+ fill_value=0,
+ blockxsize=128,
+ blockysize=128,
+ compress="lzw",
+ predictor=2,
+ PhysicUnit="C",
+ PhysicValue="Temperature",
+ SatelliteNameID=6400014,
+ ChannelID=900015,
+ DataType="GORN",
+ DataSource="dowsing rod")
+ src = rasterio.open(fn)
+ tgs = src.tags()
+ assert tgs["ninjo_FileName"] == fn
+ assert tgs["ninjo_DataSource"] == "dowsing rod"
+ np.testing.assert_allclose(float(tgs["ninjo_Gradient"]),
+ 0.4653780307919959)
+ np.testing.assert_allclose(float(tgs["ninjo_AxisIntercept"]),
+ -79.86837954904149)
+
+
+def test_write_and_read_file_RGB(test_image_large_asia_RGB, tmp_path):
+ """Test writing and reading RGB."""
+ import rasterio
+
+ from satpy.writers.ninjogeotiff import NinJoGeoTIFFWriter
+ fn = os.fspath(tmp_path / "test.tif")
+ ngtw = NinJoGeoTIFFWriter()
+ ngtw.save_dataset(
+ test_image_large_asia_RGB.data,
+ filename=fn,
+ fill_value=0,
+ PhysicUnit="N/A",
+ PhysicValue="N/A",
+ SatelliteNameID=6400014,
+ ChannelID=900015,
+ DataType="GORN",
+ DataSource="dowsing rod")
+ src = rasterio.open(fn)
+ tgs = src.tags()
+ assert tgs["ninjo_FileName"] == fn
+ assert tgs["ninjo_DataSource"] == "dowsing rod"
+ assert "ninjo_Gradient" not in tgs.keys()
+ assert "ninjo_AxisIntercept" not in tgs.keys()
+ assert tgs["ninjo_PhysicValue"] == "N/A"
+
+
+def test_write_and_read_file_LA(test_image_latlon, tmp_path):
+ """Test writing and reading LA image."""
+ import rasterio
+
+ from satpy.writers.ninjogeotiff import NinJoGeoTIFFWriter
+ fn = os.fspath(tmp_path / "test.tif")
+ ngtw = NinJoGeoTIFFWriter()
+ ngtw.save_dataset(
+ test_image_latlon.data,
+ filename=fn,
+ fill_value=None, # to make it LA
+ PhysicUnit="%",
+ PhysicValue="Reflectance",
+ SatelliteNameID=6400014,
+ ChannelID=900015,
+ DataType="GORN",
+ DataSource="dowsing rod")
+ src = rasterio.open(fn)
+ assert len(src.indexes) == 2 # mode LA
+ tgs = src.tags()
+ assert tgs["ninjo_FileName"] == fn
+ assert tgs["ninjo_DataSource"] == "dowsing rod"
+ np.testing.assert_allclose(float(tgs["ninjo_Gradient"]), 0.30816176470588236)
+ np.testing.assert_allclose(float(tgs["ninjo_AxisIntercept"]), -49.603125)
+ assert tgs["ninjo_PhysicValue"] == "Reflectance"
+ assert tgs["ninjo_TransparentPixel"] == "-1" # meaning not set
+
+
+def test_write_and_read_file_P(test_image_small_arctic_P, tmp_path):
+ """Test writing and reading P image."""
+ import rasterio
+ from trollimage.colormap import Colormap
+
+ from satpy.writers.ninjogeotiff import NinJoGeoTIFFWriter
+ fn = os.fspath(tmp_path / "test.tif")
+ ngtw = NinJoGeoTIFFWriter()
+ ngtw.save_image(
+ test_image_small_arctic_P,
+ filename=fn,
+ fill_value=255,
+ PhysicUnit="N/A",
+ PhysicValue="N/A",
+ SatelliteNameID=6400014,
+ ChannelID=900015,
+ DataType="PPRN",
+ DataSource="dowsing rod",
+ keep_palette=True,
+ cmap=Colormap(*enumerate(zip(*([np.linspace(0, 1, 256)]*3)))))
+ src = rasterio.open(fn)
+ assert len(src.indexes) == 1 # mode P
+ assert src.colorinterp[0] == rasterio.enums.ColorInterp.palette
+ tgs = src.tags()
+ assert tgs["ninjo_FileName"] == fn
+ assert tgs["ninjo_DataSource"] == "dowsing rod"
+
+
+def test_write_and_read_file_units(
+ test_image_small_mid_atlantic_K_L, tmp_path, caplog):
+ """Test that it writes a GeoTIFF with the appropriate NinJo-tags and units."""
+ import rasterio
+
+ from satpy.writers.ninjogeotiff import NinJoGeoTIFFWriter
+ fn = os.fspath(tmp_path / "test.tif")
+ ngtw = NinJoGeoTIFFWriter()
+ with caplog.at_level(logging.DEBUG):
+ ngtw.save_dataset(
+ test_image_small_mid_atlantic_K_L.data,
+ filename=fn,
+ fill_value=0,
+ blockxsize=128,
+ blockysize=128,
+ compress="lzw",
+ predictor=2,
+ PhysicUnit="C",
+ PhysicValue="Temperature",
+ SatelliteNameID=6400014,
+ ChannelID=900015,
+ DataType="GORN",
+ DataSource="dowsing rod")
+ assert "Adding offset for K → °C conversion" in caplog.text
+ # a better test would be to check that the attributes haven't changed at
+ # all, but that currently fails due to
+ # https://github.com/pytroll/satpy/issues/2022
+ assert test_image_small_mid_atlantic_K_L.data.attrs["enhancement_history"][0] != {"scale": 1, "offset": 273.15}
+ src = rasterio.open(fn)
+ tgs = src.tags()
+ assert tgs["ninjo_FileName"] == fn
+ assert tgs["ninjo_DataSource"] == "dowsing rod"
+ np.testing.assert_allclose(float(tgs["ninjo_Gradient"]),
+ 0.465379, rtol=1e-5)
+ np.testing.assert_allclose(float(tgs["ninjo_AxisIntercept"]),
+ -79.86838)
+ fn2 = os.fspath(tmp_path / "test2.tif")
+ with caplog.at_level(logging.WARNING):
+ ngtw.save_dataset(
+ test_image_small_mid_atlantic_K_L.data,
+ filename=fn2,
+ fill_value=0,
+ blockxsize=128,
+ blockysize=128,
+ compress="lzw",
+ predictor=2,
+ PhysicUnit="F",
+ PhysicValue="Temperature",
+ SatelliteNameID=6400014,
+ ChannelID=900015,
+ DataType="GORN",
+ DataSource="dowsing rod")
+ assert ("Writing F to ninjogeotiff headers, but "
+ "data attributes have unit K. "
+ "No conversion applied.") in caplog.text
+
+
+def test_write_and_read_via_scene(test_image_small_mid_atlantic_L, tmp_path):
+ """Test that all attributes are written also when writing from scene.
+
+ It appears that :func:`Satpy.Scene.save_dataset` does not pass the filename
+ to the writer. Test that filename is still written to header when saving
+ this way (the regular way).
+ """
+ import rasterio
+ sc = Scene()
+ fn = os.fspath(tmp_path / "test-{name}.tif")
+ sc["montanha-do-pico"] = test_image_small_mid_atlantic_L.data
+ sc.save_dataset(
+ "montanha-do-pico",
+ writer="ninjogeotiff",
+ filename=fn,
+ fill_value=0,
+ PhysicUnit="C",
+ PhysicValue="Temperature",
+ SatelliteNameID=6400014,
+ ChannelID=900015,
+ DataType="GORN")
+ src = rasterio.open(tmp_path / "test-montanha-do-pico.tif")
+ tgs = src.tags()
+ assert tgs["ninjo_FileName"] == os.fspath(tmp_path / "test-montanha-do-pico.tif")
+
+
+def test_get_all_tags(ntg1, ntg3, ntg_latlon, ntg_northpole, caplog):
+ """Test getting all tags from dataset."""
+ # test that passed, dynamic, and mandatory tags are all included, and
+ # nothing more
+ t1 = ntg1.get_all_tags()
+ assert set(t1.keys()) == (
+ ntg1.fixed_tags.keys() |
+ ntg1.passed_tags |
+ ntg1.dynamic_tags.keys() |
+ {"DataSource"})
+ # test that when extra tag is passed this is also included
+ t3 = ntg3.get_all_tags()
+ assert t3.keys() == (
+ ntg3.fixed_tags.keys() |
+ ntg3.passed_tags |
+ ntg3.dynamic_tags.keys() |
+ {"OverFlightTime"})
+ assert t3["OverFlightTime"] == 42
+ # test that CentralMeridian skipped and warning logged
+ with caplog.at_level(logging.DEBUG):
+ t_latlon = ntg_latlon.get_all_tags()
+ assert ("Unable to obtain value for optional NinJo tag CentralMeridian"
+ in caplog.text)
+ assert "CentralMeridian" not in t_latlon.keys()
+ t_northpole = ntg_northpole.get_all_tags()
+ assert "CentralMeridian" not in t_northpole.keys()
+
+
+def test_calc_single_tag_by_name(ntg1, ntg2, ntg3):
+ """Test calculating single tag from dataset."""
+ assert ntg1.get_tag("Magic") == "NINJO"
+ assert ntg1.get_tag("DataType") == "GORN"
+ assert ntg2.get_tag("DataType") == "GORN"
+ assert ntg3.get_tag("DataType") == "PPRN"
+ assert ntg1.get_tag("DataSource") == "dowsing rod"
+ with pytest.raises(ValueError):
+ ntg1.get_tag("invalid")
+ with pytest.raises(ValueError):
+ ntg1.get_tag("OriginalHeader")
+ with pytest.raises(ValueError):
+ ntg1.get_tag("Gradient")
+
+
+def test_get_central_meridian(ntg1, ntg2, ntg3, ntg_latlon, ntg_northpole):
+ """Test calculating the central meridian."""
+ cm = ntg1.get_central_meridian()
+ assert isinstance(cm, float)
+ np.testing.assert_allclose(cm, 0.0)
+ np.testing.assert_allclose(ntg2.get_central_meridian(), 1.0)
+ np.testing.assert_allclose(ntg3.get_central_meridian(), 2.0)
+ with pytest.raises(AttributeError):
+ # latlon area has no central meridian
+ ntg_latlon.get_central_meridian()
+ with pytest.raises(KeyError):
+ # nor does area exactly on northpole
+ ntg_northpole.get_central_meridian()
+
+
+def test_get_color_depth(ntg1, ntg2, ntg3, ntg_weird, ntg_rgba, ntg_cmyk):
+ """Test extracting the color depth."""
+ cd = ntg1.get_color_depth()
+ assert isinstance(cd, int)
+ assert cd == 8 # mode L
+ assert ntg2.get_color_depth() == 24 # mode RGB
+ assert ntg3.get_color_depth() == 8 # mode P
+ assert ntg_weird.get_color_depth() == 16 # mode LA
+ assert ntg_rgba.get_color_depth() == 32 # mode RGBA
+ with pytest.raises(ValueError):
+ ntg_cmyk.get_color_depth()
+
+
+def test_get_creation_date_id(ntg1, ntg2, ntg3, patch_datetime_now):
+ """Test getting the creation date ID.
+
+ This is the time at which the file was created.
+
+ This test believes it is run at 2033-5-18 05:33:20Z.
+ """
+ cdid = ntg1.get_creation_date_id()
+ assert isinstance(cdid, int)
+ assert cdid == 2000000000
+ assert ntg2.get_creation_date_id() == 2000000000
+ assert ntg3.get_creation_date_id() == 2000000000
+
+
+def test_get_date_id(ntg1, ntg2, ntg3):
+ """Test getting the date ID."""
+ did = ntg1.get_date_id()
+ assert isinstance(did, int)
+ assert did == 492786000
+ assert ntg2.get_date_id() == 1445459100
+ assert ntg3.get_date_id() == 1817194800
+
+
+def test_get_earth_radius_large(ntg1, ntg2, ntg3):
+ """Test getting the Earth semi-major axis."""
+ erl = ntg1.get_earth_radius_large()
+ assert isinstance(erl, float)
+ np.testing.assert_allclose(erl, 6370997.0)
+ np.testing.assert_allclose(ntg2.get_earth_radius_large(), 6378137.0)
+ np.testing.assert_allclose(ntg3.get_earth_radius_large(), 6378137.0)
+
+
+def test_get_earth_radius_small(ntg1, ntg2, ntg3):
+ """Test getting the Earth semi-minor axis."""
+ ers = ntg1.get_earth_radius_small()
+ assert isinstance(ers, float)
+ np.testing.assert_allclose(ers, 6370997.0)
+ np.testing.assert_allclose(ntg2.get_earth_radius_small(), 6356752.314245179)
+ np.testing.assert_allclose(ntg3.get_earth_radius_small(), 6356752.314245179)
+
+
+def test_get_filename(ntg1, ntg2, ntg3):
+ """Test getting the filename."""
+ assert ntg1.get_filename() == "quinoa.tif"
+ assert ntg2.get_filename() == "seitan.tif"
+ assert ntg3.get_filename() == "spelt.tif"
+
+
+def test_get_min_gray_value_L(ntg1):
+ """Test getting min gray value for mode L."""
+ mg = ntg1.get_min_gray_value()
+ assert isinstance(mg.compute().item(), int)
+ assert mg.compute() == 0
+
+
+def test_get_min_gray_value_RGB(ntg2):
+ """Test getting min gray value for RGB.
+
+ Note that min/max gray value is mandatory in NinJo even for RGBs?
+ """
+ assert ntg2.get_min_gray_value().compute().item() == 1 # fill value 0
+
+
+def test_get_min_gray_value_P(ntg3):
+ """Test getting min gray value for mode P."""
+ assert ntg3.get_min_gray_value().compute().item() == 0
+
+
+def test_get_max_gray_value_L(ntg1):
+ """Test getting max gray value for mode L."""
+ mg = ntg1.get_max_gray_value().compute().item()
+ assert isinstance(mg, int)
+ assert mg == 254 # fill value is 255
+
+
+def test_get_max_gray_value_RGB(ntg2):
+ """Test max gray value for RGB."""
+ assert ntg2.get_max_gray_value() == 255
+
+
+@pytest.mark.xfail(reason="Needs GeoTIFF P fixes, see GH#1844")
+def test_get_max_gray_value_P(ntg3):
+ """Test getting max gray value for mode P."""
+ assert ntg3.get_max_gray_value().compute().item() == 10
+
+
+@pytest.mark.xfail(reason="not easy, not needed, not implemented")
+def test_get_meridian_east(ntg1, ntg2, ntg3):
+ """Test getting east meridian."""
+ np.testing.assert_allclose(ntg1.get_meridian_east(), -29.048101549452294)
+ np.testing.assert_allclose(ntg2.get_meridian_east(), 180.0)
+ np.testing.assert_allclose(ntg3.get_meridian_east(), 99.81468125314737)
+
+
+@pytest.mark.xfail(reason="not easy, not needed, not implemented")
+def test_get_meridian_west(ntg1, ntg2, ntg3):
+ """Test getting west meridian."""
+ np.testing.assert_allclose(ntg1.get_meridian_west(), -30.846745608241903)
+ np.testing.assert_allclose(ntg2.get_meridian_east(), -180.0)
+ np.testing.assert_allclose(ntg3.get_meridian_west(), 81.84837557075694)
+
+
+def test_get_projection(ntg1, ntg2, ntg3, ntg_weird, ntg_rgba, ntg_cmyk,
+ ntg_latlon):
+ """Test getting projection string."""
+ assert ntg1.get_projection() == "PLAT"
+ assert ntg2.get_projection() == "PLAT"
+ assert ntg3.get_projection() == "NPOL"
+ assert ntg_cmyk.get_projection() == "SPOL"
+ assert ntg_rgba.get_projection() == "MERC"
+ assert ntg_latlon.get_projection() == "PLAT"
+ with pytest.raises(ValueError):
+ ntg_weird.get_projection()
+
+
+def test_get_ref_lat_1(ntg1, ntg2, ntg3, ntg_weird, ntg_latlon):
+ """Test getting reference latitude 1."""
+ rl1 = ntg1.get_ref_lat_1()
+ assert isinstance(rl1, float)
+ np.testing.assert_allclose(rl1, 0.0)
+ np.testing.assert_allclose(ntg2.get_ref_lat_1(), 2.5)
+ np.testing.assert_allclose(ntg3.get_ref_lat_1(), 75)
+ with pytest.raises(ValueError):
+ ntg_weird.get_ref_lat_1()
+ with pytest.raises(AttributeError):
+ ntg_latlon.get_ref_lat_1()
+
+
+@pytest.mark.xfail(reason="Not implemented, what is this?")
+def test_get_ref_lat_2(ntg1, ntg2, ntg3):
+ """Test getting reference latitude 2."""
+ rl2 = ntg1.get_ref_lat_2()
+ assert isinstance(rl2, float)
+ np.testing.assert_allclose(rl2, 0.0)
+ np.testing.assert_allclose(ntg2.get_ref_lat_2(), 0.0)
+ np.testing.assert_allclose(ntg2.get_ref_lat_3(), 0.0)
+
+
+def test_get_transparent_pixel(ntg1, ntg2, ntg3, ntg_no_fill_value):
+ """Test getting fill value."""
+ tp = ntg1.get_transparent_pixel()
+ assert isinstance(tp, int)
+ assert tp == 255
+ assert ntg2.get_transparent_pixel() == 0
+ assert ntg3.get_transparent_pixel() == 255
+ assert ntg_no_fill_value.get_transparent_pixel() == -1
+
+
+def test_get_xmax(ntg1, ntg2, ntg3):
+ """Test getting maximum x."""
+ xmax = ntg1.get_xmaximum()
+ assert isinstance(xmax, int)
+ assert xmax == 20
+ assert ntg2.get_xmaximum() == 100
+ assert ntg3.get_xmaximum() == 10
+
+
+def test_get_ymax(ntg1, ntg2, ntg3):
+ """Test getting maximum y."""
+ ymax = ntg1.get_ymaximum()
+ assert isinstance(ymax, int)
+ assert ymax == 10
+ assert ntg2.get_ymaximum() == 50
+ assert ntg3.get_ymaximum() == 20
+
+
+def test_create_unknown_tags(test_image_small_arctic_P):
+ """Test that unknown tags raise ValueError."""
+ from satpy.writers.ninjogeotiff import NinJoTagGenerator
+ with pytest.raises(ValueError):
+ NinJoTagGenerator(
+ test_image_small_arctic_P,
+ 42,
+ "quorn.tif",
+ ChannelID=800012,
+ DataType="GPRN",
+ PhysicUnit="N/A",
+ PhysicValue="N/A",
+ SatelliteNameID=6500014,
+ Locatie="Hozomeen")
+
+
+def test_str_ids(test_image_small_arctic_P):
+ """Test that channel and satellit IDs can be str."""
+ from satpy.writers.ninjogeotiff import NinJoTagGenerator
+ NinJoTagGenerator(
+ test_image_small_arctic_P,
+ 42,
+ "quorn.tif",
+ ChannelID="la manche",
+ DataType="GPRN",
+ PhysicUnit="N/A",
+ PhysicValue="N/A",
+ SatelliteNameID="trollsat")
diff --git a/satpy/tests/writer_tests/test_ninjotiff.py b/satpy/tests/writer_tests/test_ninjotiff.py
index 76fc42638c..58f991e73d 100644
--- a/satpy/tests/writer_tests/test_ninjotiff.py
+++ b/satpy/tests/writer_tests/test_ninjotiff.py
@@ -21,8 +21,8 @@
import unittest
from unittest import mock
-import pytest
import numpy as np
+import pytest
import xarray as xr
@@ -67,6 +67,19 @@ def test_dataset(self, iwsd):
uconv.assert_called_once_with(dataset, 'K', 'CELSIUS')
self.assertEqual(iwsd.call_count, 1)
+ @mock.patch('satpy.writers.ninjotiff.ImageWriter.save_dataset')
+ @mock.patch('satpy.writers.ninjotiff.nt', pyninjotiff_mock.ninjotiff)
+ def test_dataset_skip_unit_conversion(self, iwsd):
+ """Test saving a dataset without unit conversion."""
+ from satpy.writers.ninjotiff import NinjoTIFFWriter
+ ntw = NinjoTIFFWriter()
+ dataset = xr.DataArray([1, 2, 3], attrs={'units': 'K'})
+ with mock.patch('satpy.writers.ninjotiff.convert_units') as uconv:
+ ntw.save_dataset(dataset, physic_unit='CELSIUS',
+ convert_temperature_units=False)
+ uconv.assert_not_called()
+ self.assertEqual(iwsd.call_count, 1)
+
@mock.patch('satpy.writers.ninjotiff.NinjoTIFFWriter.save_dataset')
@mock.patch('satpy.writers.ninjotiff.ImageWriter.save_image')
@mock.patch('satpy.writers.ninjotiff.nt', pyninjotiff_mock.ninjotiff)
@@ -80,15 +93,16 @@ def test_image(self, iwsi, save_dataset):
img = FakeImage(dataset, 'L')
ret = ntw.save_image(img, filename='bla.tif', compute=False)
nt.save.assert_called()
- assert(nt.save.mock_calls[0][2]['compute'] is False)
- assert(nt.save.mock_calls[0][2]['ch_min_measurement_unit']
- < nt.save.mock_calls[0][2]['ch_max_measurement_unit'])
- assert(ret == nt.save.return_value)
+ assert nt.save.mock_calls[0][2]['compute'] is False
+ assert nt.save.mock_calls[0][2]['ch_min_measurement_unit'] < nt.save.mock_calls[0][2]['ch_max_measurement_unit']
+ assert ret == nt.save.return_value
def test_convert_units_self(self):
"""Test that unit conversion to themselves do nothing."""
- from ..utils import make_fake_scene
from satpy.writers.ninjotiff import convert_units
+
+ from ..utils import make_fake_scene
+
# ensure that converting from % to itself does not change the data
sc = make_fake_scene(
{"VIS006": np.arange(25, dtype="f4").reshape(5, 5)},
@@ -101,26 +115,28 @@ def test_convert_units_self(self):
def test_convert_units_temp(self):
"""Test that temperature unit conversions works as expected."""
# test converting between °C and K
- from ..utils import make_fake_scene
from satpy.writers.ninjotiff import convert_units
+
+ from ..utils import make_fake_scene
sc = make_fake_scene(
{"IR108": np.arange(25, dtype="f4").reshape(5, 5)},
common_attrs={"units": "K"})
- ds_in = sc["IR108"]
+ ds_in_k = sc["IR108"]
for out_unit in ("C", "CELSIUS"):
- ds_out = convert_units(ds_in, "K", out_unit)
- np.testing.assert_array_almost_equal(ds_in + 273.15, ds_out)
- assert ds_in.attrs != ds_out.attrs
- assert ds_out.attrs["units"] == out_unit
+ ds_out_c = convert_units(ds_in_k, "K", out_unit)
+ np.testing.assert_array_almost_equal(ds_in_k - 273.15, ds_out_c)
+ assert ds_in_k.attrs != ds_out_c.attrs
+ assert ds_out_c.attrs["units"] == out_unit
# test that keys aren't lost
- assert ds_out.attrs.keys() - ds_in.attrs.keys() <= {"units"}
- assert ds_in.attrs.keys() <= ds_out.attrs.keys()
+ assert ds_out_c.attrs.keys() - ds_in_k.attrs.keys() <= {"units"}
+ assert ds_in_k.attrs.keys() <= ds_out_c.attrs.keys()
def test_convert_units_other(self):
"""Test that other unit conversions are not implemented."""
# test arbitrary different conversion
- from ..utils import make_fake_scene
from satpy.writers.ninjotiff import convert_units
+
+ from ..utils import make_fake_scene
sc = make_fake_scene(
{"rain_rate": np.arange(25, dtype="f8").reshape(5, 5)},
common_attrs={"units": "millimeter/hour"})
@@ -128,3 +144,17 @@ def test_convert_units_other(self):
ds_in = sc["rain_rate"]
with pytest.raises(NotImplementedError):
convert_units(ds_in, "millimeter/hour", "m/s")
+
+ @mock.patch('satpy.writers.ninjotiff.NinjoTIFFWriter.save_dataset')
+ @mock.patch('satpy.writers.ninjotiff.ImageWriter.save_image')
+ @mock.patch('satpy.writers.ninjotiff.nt', pyninjotiff_mock.ninjotiff)
+ def test_P_image_is_uint8(self, iwsi, save_dataset):
+ """Test that a P-mode image is converted to uint8s."""
+ nt = pyninjotiff_mock.ninjotiff
+ nt.reset_mock()
+ from satpy.writers.ninjotiff import NinjoTIFFWriter
+ ntw = NinjoTIFFWriter()
+ dataset = xr.DataArray([1, 2, 3]).astype(int)
+ img = FakeImage(dataset, 'P')
+ ntw.save_image(img, filename='bla.tif', compute=False)
+ assert nt.save.mock_calls[0][1][0].data.dtype == np.uint8
diff --git a/satpy/tests/writer_tests/test_simple_image.py b/satpy/tests/writer_tests/test_simple_image.py
index f92615f59d..5ebf0dfb47 100644
--- a/satpy/tests/writer_tests/test_simple_image.py
+++ b/satpy/tests/writer_tests/test_simple_image.py
@@ -38,9 +38,10 @@ def tearDown(self):
@staticmethod
def _get_test_datasets():
"""Create DataArray for testing."""
- import xarray as xr
- import dask.array as da
from datetime import datetime
+
+ import dask.array as da
+ import xarray as xr
ds1 = xr.DataArray(
da.zeros((100, 200), chunks=50),
dims=('y', 'x'),
@@ -64,8 +65,9 @@ def test_simple_write(self):
def test_simple_delayed_write(self):
"""Test writing datasets with delayed computation."""
from dask.delayed import Delayed
- from satpy.writers.simple_image import PillowWriter
+
from satpy.writers import compute_writer_results
+ from satpy.writers.simple_image import PillowWriter
datasets = self._get_test_datasets()
w = PillowWriter(base_dir=self.base_dir)
res = w.save_datasets(datasets, compute=False)
diff --git a/satpy/tests/writer_tests/test_utils.py b/satpy/tests/writer_tests/test_utils.py
index 34728e3bec..10a199d8b0 100644
--- a/satpy/tests/writer_tests/test_utils.py
+++ b/satpy/tests/writer_tests/test_utils.py
@@ -18,6 +18,7 @@
"""Tests for writer utilities."""
import unittest
+
import satpy.writers.utils as wutils
diff --git a/satpy/utils.py b/satpy/utils.py
index 8ff9baa3f3..26efc90bf5 100644
--- a/satpy/utils.py
+++ b/satpy/utils.py
@@ -18,24 +18,32 @@
# along with satpy. If not, see .
"""Module defining various utilities."""
+from __future__ import annotations
+
+import contextlib
+import datetime
import logging
import os
import warnings
-import contextlib
-from typing import Mapping
+from typing import Mapping, Optional
+from urllib.parse import urlparse
import numpy as np
+import xarray as xr
import yaml
-from yaml import BaseLoader
+from yaml import BaseLoader, UnsafeLoader
-try:
- from yaml import UnsafeLoader
-except ImportError:
- from yaml import Loader as UnsafeLoader
+from satpy import CHUNK_SIZE
_is_logging_on = False
TRACE_LEVEL = 5
+logger = logging.getLogger(__name__)
+
+
+class PerformanceWarning(Warning):
+ """Warning raised when there is a possible performance impact."""
+
def ensure_dir(filename):
"""Check if the dir of f exists, otherwise create it."""
@@ -173,7 +181,18 @@ def in_ipynb():
def lonlat2xyz(lon, lat):
- """Convert lon lat to cartesian."""
+ """Convert lon lat to cartesian.
+
+ For a sphere with unit radius, convert the spherical coordinates
+ longitude and latitude to cartesian coordinates.
+
+ Args:
+ lon (number or array of numbers): Longitude in °.
+ lat (number or array of numbers): Latitude in °.
+
+ Returns:
+ (x, y, z) Cartesian coordinates [1]
+ """
lat = np.deg2rad(lat)
lon = np.deg2rad(lon)
x = np.cos(lat) * np.cos(lon)
@@ -183,7 +202,21 @@ def lonlat2xyz(lon, lat):
def xyz2lonlat(x, y, z, asin=False):
- """Convert cartesian to lon lat."""
+ """Convert cartesian to lon lat.
+
+ For a sphere with unit radius, convert cartesian coordinates to spherical
+ coordinates longitude and latitude.
+
+ Args:
+ x (number or array of numbers): x-coordinate, unitless
+ y (number or array of numbers): y-coordinate, unitless
+ z (number or array of numbers): z-coordinate, unitless
+ asin (optional, bool): If true, use arcsin for calculations.
+ If false, use arctan2 for calculations.
+
+ Returns:
+ (lon, lat): Longitude and latitude in °.
+ """
lon = np.rad2deg(np.arctan2(y, x))
if asin:
lat = np.rad2deg(np.arcsin(z))
@@ -237,42 +270,6 @@ def _get_sunz_corr_li_and_shibata(cos_zen):
return 24.35 / (2. * cos_zen + np.sqrt(498.5225 * cos_zen**2 + 1))
-def sunzen_corr_cos(data, cos_zen, limit=88., max_sza=95.):
- """Perform Sun zenith angle correction.
-
- The correction is based on the provided cosine of the zenith
- angle (``cos_zen``). The correction is limited
- to ``limit`` degrees (default: 88.0 degrees). For larger zenith
- angles, the correction is the same as at the ``limit`` if ``max_sza``
- is `None`. The default behavior is to gradually reduce the correction
- past ``limit`` degrees up to ``max_sza`` where the correction becomes
- 0. Both ``data`` and ``cos_zen`` should be 2D arrays of the same shape.
-
- """
- # Convert the zenith angle limit to cosine of zenith angle
- limit_rad = np.deg2rad(limit)
- limit_cos = np.cos(limit_rad)
- max_sza_rad = np.deg2rad(max_sza) if max_sza is not None else max_sza
-
- # Cosine correction
- corr = 1. / cos_zen
- if max_sza is not None:
- # gradually fall off for larger zenith angle
- grad_factor = (np.arccos(cos_zen) - limit_rad) / (max_sza_rad - limit_rad)
- # invert the factor so maximum correction is done at `limit` and falls off later
- grad_factor = 1. - np.log(grad_factor + 1) / np.log(2)
- # make sure we don't make anything negative
- grad_factor = grad_factor.clip(0.)
- else:
- # Use constant value (the limit) for larger zenith angles
- grad_factor = 1.
- corr = corr.where(cos_zen > limit_cos, grad_factor / limit_cos)
- # Force "night" pixels to 0 (where SZA is invalid)
- corr = corr.where(cos_zen.notnull(), 0)
-
- return data * corr
-
-
def atmospheric_path_length_correction(data, cos_zen, limit=88., max_sza=95.):
"""Perform Sun zenith angle correction.
@@ -314,56 +311,123 @@ def atmospheric_path_length_correction(data, cos_zen, limit=88., max_sza=95.):
return data * corr
-def get_satpos(dataset):
+def get_satpos(
+ data_arr: xr.DataArray,
+ preference: Optional[str] = None,
+ use_tle: bool = False
+) -> tuple[float, float, float]:
"""Get satellite position from dataset attributes.
- Preferences are:
-
- * Longitude & Latitude: Nadir, actual, nominal, projection
- * Altitude: Actual, nominal, projection
-
- A warning is issued when projection values have to be used because nothing else is available.
+ Args:
+ data_arr: DataArray object to access ``.attrs`` metadata
+ from.
+ preference: Optional preference for one of the available types of
+ position information. If not provided or ``None`` then the default
+ preference is:
+
+ * Longitude & Latitude: nadir, actual, nominal, projection
+ * Altitude: actual, nominal, projection
+
+ The provided ``preference`` can be any one of these individual
+ strings (nadir, actual, nominal, projection). If the
+ preference is not available then the original preference list is
+ used. A warning is issued when projection values have to be used because
+ nothing else is available and it wasn't provided as the ``preference``.
+ use_tle: If true, try to obtain position via satellite name
+ and TLE if it can't be determined otherwise. This requires pyorbital, skyfield,
+ and astropy to be installed and may need network access to obtain the TLE.
+ Note that even if ``use_tle`` is true, the TLE will not be used if
+ the dataset metadata contain the satellite position directly.
Returns:
- Geodetic longitude, latitude, altitude
+ Geodetic longitude, latitude, altitude [km]
"""
+ if preference is not None and preference not in ("nadir", "actual", "nominal", "projection"):
+ raise ValueError(f"Unrecognized satellite coordinate preference: {preference}")
+ lonlat_prefixes = ("nadir_", "satellite_actual_", "satellite_nominal_", "projection_")
+ alt_prefixes = _get_prefix_order_by_preference(lonlat_prefixes[1:], preference)
+ lonlat_prefixes = _get_prefix_order_by_preference(lonlat_prefixes, preference)
try:
- orb_params = dataset.attrs['orbital_parameters']
+ lon, lat = _get_sat_lonlat(data_arr, lonlat_prefixes)
+ alt = _get_sat_altitude(data_arr, alt_prefixes)
+ except KeyError:
+ if use_tle:
+ logger.warning(
+ "Orbital parameters missing from metadata. "
+ "Calculating from TLE using skyfield and astropy.")
+ return _get_satpos_from_platform_name(data_arr)
+ raise KeyError("Unable to determine satellite position. Either the "
+ "reader doesn't provide that information or "
+ "geolocation datasets were not available.")
+ return lon, lat, alt
- # Altitude
- try:
- alt = orb_params['satellite_actual_altitude']
- except KeyError:
- try:
- alt = orb_params['satellite_nominal_altitude']
- except KeyError:
- alt = orb_params['projection_altitude']
- warnings.warn('Actual satellite altitude not available, using projection altitude instead.')
- # Longitude & Latitude
- try:
- lon = orb_params['nadir_longitude']
- lat = orb_params['nadir_latitude']
- except KeyError:
- try:
- lon = orb_params['satellite_actual_longitude']
- lat = orb_params['satellite_actual_latitude']
- except KeyError:
- try:
- lon = orb_params['satellite_nominal_longitude']
- lat = orb_params['satellite_nominal_latitude']
- except KeyError:
- lon = orb_params['projection_longitude']
- lat = orb_params['projection_latitude']
- warnings.warn('Actual satellite lon/lat not available, using projection centre instead.')
+def _get_prefix_order_by_preference(prefixes, preference):
+ preferred_prefixes = [prefix for prefix in prefixes if preference and preference in prefix]
+ nonpreferred_prefixes = [prefix for prefix in prefixes if not preference or preference not in prefix]
+ if nonpreferred_prefixes[-1] == "projection_":
+ # remove projection as a prefix as it is our fallback
+ nonpreferred_prefixes = nonpreferred_prefixes[:-1]
+ return preferred_prefixes + nonpreferred_prefixes
+
+
+def _get_sat_altitude(data_arr, key_prefixes):
+ orb_params = data_arr.attrs["orbital_parameters"]
+ alt_keys = [prefix + "altitude" for prefix in key_prefixes]
+ try:
+ alt = _get_first_available_item(orb_params, alt_keys)
except KeyError:
- # Legacy
- lon = dataset.attrs['satellite_longitude']
- lat = dataset.attrs['satellite_latitude']
- alt = dataset.attrs['satellite_altitude']
+ alt = orb_params['projection_altitude']
+ warnings.warn('Actual satellite altitude not available, using projection altitude instead.')
+ return alt
- return lon, lat, alt
+
+def _get_sat_lonlat(data_arr, key_prefixes):
+ orb_params = data_arr.attrs["orbital_parameters"]
+ lon_keys = [prefix + "longitude" for prefix in key_prefixes]
+ lat_keys = [prefix + "latitude" for prefix in key_prefixes]
+ try:
+ lon = _get_first_available_item(orb_params, lon_keys)
+ lat = _get_first_available_item(orb_params, lat_keys)
+ except KeyError:
+ lon = orb_params['projection_longitude']
+ lat = orb_params['projection_latitude']
+ warnings.warn('Actual satellite lon/lat not available, using projection center instead.')
+ return lon, lat
+
+
+def _get_satpos_from_platform_name(cth_dataset):
+ """Get satellite position if no orbital parameters in metadata.
+
+ Some cloud top height datasets lack orbital parameter information in
+ metadata. Here, orbital parameters are calculated based on the platform
+ name and start time, via Two Line Element (TLE) information.
+
+ Needs pyorbital, skyfield, and astropy to be installed.
+ """
+ from pyorbital.orbital import tlefile
+ from skyfield.api import EarthSatellite, load
+ from skyfield.toposlib import wgs84
+
+ name = cth_dataset.attrs["platform_name"]
+ tle = tlefile.read(name)
+ es = EarthSatellite(tle.line1, tle.line2, name)
+ ts = load.timescale()
+ gc = es.at(ts.from_datetime(
+ cth_dataset.attrs["start_time"].replace(tzinfo=datetime.timezone.utc)))
+ (lat, lon) = wgs84.latlon_of(gc)
+ height = wgs84.height_of(gc).to("km")
+ return (lon.degrees, lat.degrees, height.value)
+
+
+def _get_first_available_item(data_dict, possible_keys):
+ for possible_key in possible_keys:
+ try:
+ return data_dict[possible_key]
+ except KeyError:
+ continue
+ raise KeyError("None of the possible keys found: {}".format(", ".join(possible_keys)))
def recursive_dict_update(d, u):
@@ -391,10 +455,11 @@ def _check_yaml_configs(configs, key):
diagnostic = {}
for i in configs:
for fname in i:
+ msg = 'ok'
+ res = None
with open(fname, 'r', encoding='utf-8') as stream:
try:
res = yaml.load(stream, Loader=UnsafeLoader)
- msg = 'ok'
except yaml.YAMLError as err:
stream.seek(0)
res = yaml.load(stream, Loader=BaseLoader)
@@ -457,3 +522,161 @@ def check_satpy(readers=None, writers=None, extras=None):
for module_name, res in sorted(_check_import(module_names).items()):
print(module_name + ': ', res)
print()
+
+
+def unify_chunks(*data_arrays: xr.DataArray) -> tuple[xr.DataArray, ...]:
+ """Run :func:`xarray.unify_chunks` if input dimensions are all the same size.
+
+ This is mostly used in :class:`satpy.composites.CompositeBase` to safe
+ guard against running :func:`dask.array.core.map_blocks` with arrays of
+ different chunk sizes. Doing so can cause unexpected results or errors.
+ However, xarray's ``unify_chunks`` will raise an exception if dimensions
+ of the provided DataArrays are different sizes. This is a common case for
+ Satpy. For example, the "bands" dimension may be 1 (L), 2 (LA), 3 (RGB), or
+ 4 (RGBA) for most compositor operations that combine other composites
+ together.
+
+ """
+ if not hasattr(xr, "unify_chunks"):
+ return data_arrays
+ if not _all_dims_same_size(data_arrays):
+ return data_arrays
+ return tuple(xr.unify_chunks(*data_arrays))
+
+
+def _all_dims_same_size(data_arrays: tuple[xr.DataArray, ...]) -> bool:
+ known_sizes: dict[str, int] = {}
+ for data_arr in data_arrays:
+ for dim, dim_size in data_arr.sizes.items():
+ known_size = known_sizes.setdefault(dim, dim_size)
+ if dim_size != known_size:
+ # this dimension is a different size than previously found
+ # xarray.unify_chunks will error out if we tried to use it
+ return False
+ return True
+
+
+@contextlib.contextmanager
+def ignore_invalid_float_warnings():
+ """Ignore warnings generated for working with NaN/inf values.
+
+ Numpy and dask sometimes don't like NaN or inf values in normal function
+ calls. This context manager hides/ignores them inside its context.
+
+ Examples:
+ Use around numpy operations that you expect to produce warnings::
+
+ with ignore_invalid_float_warnings():
+ np.nanmean(np.nan)
+
+ """
+ with np.errstate(invalid="ignore"), warnings.catch_warnings():
+ warnings.simplefilter("ignore", RuntimeWarning)
+ yield
+
+
+def get_chunk_size_limit(dtype):
+ """Compute the chunk size limit in bytes given *dtype*.
+
+ Returns:
+ If PYTROLL_CHUNK_SIZE is not defined, this function returns None,
+ otherwise it returns the computed chunk size in bytes.
+ """
+ pixel_size = get_chunk_pixel_size()
+ if pixel_size is not None:
+ return pixel_size * np.dtype(dtype).itemsize
+ return None
+
+
+def get_chunk_pixel_size():
+ """Compute the maximum chunk size from CHUNK_SIZE."""
+ if CHUNK_SIZE is None:
+ return None
+
+ if isinstance(CHUNK_SIZE, (tuple, list)):
+ array_size = np.product(CHUNK_SIZE)
+ else:
+ array_size = CHUNK_SIZE ** 2
+ return array_size
+
+
+def convert_remote_files_to_fsspec(filenames, storage_options=None):
+ """Check filenames for transfer protocols, convert to FSFile objects if possible."""
+ if storage_options is None:
+ storage_options = {}
+ if isinstance(filenames, dict):
+ return _check_file_protocols_for_dicts(filenames, storage_options)
+ return _check_file_protocols(filenames, storage_options)
+
+
+def _check_file_protocols_for_dicts(filenames, storage_options):
+ res = {}
+ for reader, files in filenames.items():
+ opts = storage_options.get(reader, {})
+ res[reader] = _check_file_protocols(files, opts)
+ return res
+
+
+def _check_file_protocols(filenames, storage_options):
+ local_files, remote_files, fs_files = _sort_files_to_local_remote_and_fsfiles(filenames)
+
+ if remote_files:
+ return local_files + fs_files + _filenames_to_fsfile(remote_files, storage_options)
+
+ return local_files + fs_files
+
+
+def _sort_files_to_local_remote_and_fsfiles(filenames):
+ from satpy.readers import FSFile
+
+ local_files = []
+ remote_files = []
+ fs_files = []
+ for f in filenames:
+ if isinstance(f, FSFile):
+ fs_files.append(f)
+ elif urlparse(f).scheme in ('', 'file') or "\\" in f:
+ local_files.append(f)
+ else:
+ remote_files.append(f)
+ return local_files, remote_files, fs_files
+
+
+def _filenames_to_fsfile(filenames, storage_options):
+ import fsspec
+
+ from satpy.readers import FSFile
+
+ if filenames:
+ fsspec_files = fsspec.open_files(filenames, **storage_options)
+ return [FSFile(f) for f in fsspec_files]
+ return []
+
+
+def get_storage_options_from_reader_kwargs(reader_kwargs):
+ """Read and clean storage options from reader_kwargs."""
+ if reader_kwargs is None:
+ return None, None
+ storage_options = reader_kwargs.pop('storage_options', None)
+ storage_opt_dict = _get_storage_dictionary_options(reader_kwargs)
+ storage_options = _merge_storage_options(storage_options, storage_opt_dict)
+
+ return storage_options, reader_kwargs
+
+
+def _get_storage_dictionary_options(reader_kwargs):
+ storage_opt_dict = {}
+ for k, v in reader_kwargs.items():
+ if isinstance(v, dict):
+ storage_opt_dict[k] = v.pop('storage_options', None)
+
+ return storage_opt_dict
+
+
+def _merge_storage_options(storage_options, storage_opt_dict):
+ if storage_opt_dict:
+ if storage_options:
+ storage_opt_dict['storage_options'] = storage_options
+ storage_options = storage_opt_dict
+
+ return storage_options
diff --git a/satpy/writers/__init__.py b/satpy/writers/__init__.py
index 560944a377..aa50161046 100644
--- a/satpy/writers/__init__.py
+++ b/satpy/writers/__init__.py
@@ -23,27 +23,22 @@
import logging
import os
import warnings
+from typing import Optional
import dask.array as da
import numpy as np
import xarray as xr
import yaml
+from trollimage.xrimage import XRImage
+from trollsift import parser
+from yaml import UnsafeLoader
-try:
- from yaml import UnsafeLoader
-except ImportError:
- from yaml import Loader as UnsafeLoader
-
-from satpy._config import config_search_paths, glob_config
-from satpy.utils import recursive_dict_update
from satpy import CHUNK_SIZE
+from satpy._config import config_search_paths, get_entry_points_config_dirs, glob_config
+from satpy.aux_download import DataDownloadMixin
from satpy.plugin_base import Plugin
from satpy.resample import get_area_def
-from satpy.aux_download import DataDownloadMixin
-
-from trollsift import parser
-
-from trollimage.xrimage import XRImage
+from satpy.utils import recursive_dict_update
LOG = logging.getLogger(__name__)
@@ -111,13 +106,17 @@ def configs_for_writer(writer=None):
# given a config filename or writer name
config_files = [w if w.endswith('.yaml') else w + '.yaml' for w in writer]
else:
- writer_configs = glob_config(os.path.join('writers', '*.yaml'))
+ paths = get_entry_points_config_dirs('satpy.writers')
+ writer_configs = glob_config(os.path.join('writers', '*.yaml'), search_dirs=paths)
config_files = set(writer_configs)
for config_file in config_files:
config_basename = os.path.basename(config_file)
+ paths = get_entry_points_config_dirs('satpy.writers')
writer_configs = config_search_paths(
- os.path.join("writers", config_basename))
+ os.path.join("writers", config_basename),
+ search_dirs=paths,
+ )
if not writer_configs:
LOG.warning("No writer configs found for '%s'", writer)
@@ -156,15 +155,14 @@ def _determine_mode(dataset):
if dataset.ndim == 2:
return "L"
- elif dataset.shape[0] == 2:
+ if dataset.shape[0] == 2:
return "LA"
- elif dataset.shape[0] == 3:
+ if dataset.shape[0] == 3:
return "RGB"
- elif dataset.shape[0] == 4:
+ if dataset.shape[0] == 4:
return "RGBA"
- else:
- raise RuntimeError("Can't determine 'mode' of dataset: %s" %
- str(dataset))
+ raise RuntimeError("Can't determine 'mode' of dataset: %s" %
+ str(dataset))
def _burn_overlay(img, image_metadata, area, cw_, overlays):
@@ -240,28 +238,7 @@ def add_overlay(orig_img, area, coast_dir, color=None, width=None, resolution=No
"overlays/decorations to non-RGB data.")
if overlays is None:
- overlays = dict()
- # fill with sensible defaults
- general_params = {'outline': color or (0, 0, 0),
- 'width': width or 0.5}
- for key, val in general_params.items():
- if val is not None:
- overlays.setdefault('coasts', {}).setdefault(key, val)
- overlays.setdefault('borders', {}).setdefault(key, val)
- if level_coast is None:
- level_coast = 1
- overlays.setdefault('coasts', {}).setdefault('level', level_coast)
- if level_borders is None:
- level_borders = 1
- overlays.setdefault('borders', {}).setdefault('level', level_borders)
-
- if grid is not None:
- if 'major_lonlat' in grid and grid['major_lonlat']:
- major_lonlat = grid.pop('major_lonlat')
- minor_lonlat = grid.pop('minor_lonlat', major_lonlat)
- grid.update({'Dlonlat': major_lonlat, 'dlonlat': minor_lonlat})
- for key, val in grid.items():
- overlays.setdefault('grid', {}).setdefault(key, val)
+ overlays = _create_overlays_dict(color, width, grid, level_coast, level_borders)
cw_ = ContourWriterAGG(coast_dir)
new_image = orig_img.apply_pil(_burn_overlay, res_mode,
@@ -270,6 +247,32 @@ def add_overlay(orig_img, area, coast_dir, color=None, width=None, resolution=No
return new_image
+def _create_overlays_dict(color, width, grid, level_coast, level_borders):
+ """Fill in the overlays dict."""
+ overlays = dict()
+ # fill with sensible defaults
+ general_params = {'outline': color or (0, 0, 0),
+ 'width': width or 0.5}
+ for key, val in general_params.items():
+ if val is not None:
+ overlays.setdefault('coasts', {}).setdefault(key, val)
+ overlays.setdefault('borders', {}).setdefault(key, val)
+ if level_coast is None:
+ level_coast = 1
+ overlays.setdefault('coasts', {}).setdefault('level', level_coast)
+ if level_borders is None:
+ level_borders = 1
+ overlays.setdefault('borders', {}).setdefault('level', level_borders)
+ if grid is not None:
+ if 'major_lonlat' in grid and grid['major_lonlat']:
+ major_lonlat = grid.pop('major_lonlat')
+ minor_lonlat = grid.pop('minor_lonlat', major_lonlat)
+ grid.update({'Dlonlat': major_lonlat, 'dlonlat': minor_lonlat})
+ for key, val in grid.items():
+ overlays.setdefault('grid', {}).setdefault(key, val)
+ return overlays
+
+
def add_text(orig, dc, img, text):
"""Add text to an image using the pydecorate package.
@@ -400,7 +403,7 @@ def get_enhanced_image(dataset, enhance=None, overlay=None, decorate=None,
dataset (xarray.DataArray): Data to be enhanced and converted to an image.
enhance (bool or Enhancer): Whether to automatically enhance
data to be more visually useful and to fit inside the file
- format being saved to. By default this will default to using
+ format being saved to. By default, this will default to using
the enhancement configuration files found using the default
:class:`~satpy.writers.Enhancer` class. This can be set to
`False` so that no enhancments are performed. This can also
@@ -480,8 +483,7 @@ def to_image(dataset):
dataset = dataset.squeeze()
if dataset.ndim < 2:
raise ValueError("Need at least a 2D array to make an image.")
- else:
- return XRImage(dataset)
+ return XRImage(dataset)
def split_results(results):
@@ -647,7 +649,7 @@ def get_filename(self, **kwargs):
dirname = os.path.dirname(output_filename)
if dirname and not os.path.isdir(dirname):
LOG.info("Creating output directory: {}".format(dirname))
- os.makedirs(dirname)
+ os.makedirs(dirname, exist_ok=True)
return output_filename
def save_datasets(self, datasets, compute=True, **kwargs):
@@ -660,7 +662,7 @@ def save_datasets(self, datasets, compute=True, **kwargs):
Args:
datasets (iterable): Iterable of `xarray.DataArray` objects to
save using this writer.
- compute (bool): If `True` (default), compute all of the saves to
+ compute (bool): If `True` (default), compute all the saves to
disk. If `False` then the return value is either
a :doc:`dask:delayed` object or two lists to
be passed to a :func:`dask.array.store` call.
@@ -670,7 +672,7 @@ def save_datasets(self, datasets, compute=True, **kwargs):
Returns:
Value returned depends on `compute` keyword argument. If
- `compute` is `True` the value is the result of a either a
+ `compute` is `True` the value is the result of either a
:func:`dask.array.store` operation or a :doc:`dask:delayed`
compute, typically this is `None`. If `compute` is `False` then
the result is either a :doc:`dask:delayed` object that can be
@@ -723,7 +725,7 @@ def save_dataset(self, dataset, filename=None, fill_value=None,
If `compute` is `False` then the returned value is either a
:doc:`dask:delayed` object that can be computed using
`delayed.compute()` or a tuple of (source, target) that should be
- passed to :func:`dask.array.store`. If target is provided the the
+ passed to :func:`dask.array.store`. If target is provided the
caller is responsible for calling `target.close()` if the target
has this method.
@@ -758,7 +760,7 @@ def __init__(self, name=None, filename=None, base_dir=None, enhance=None, **kwar
Base destination directories for all created files.
enhance (bool or Enhancer): Whether to automatically enhance
data to be more visually useful and to fit inside the file
- format being saved to. By default this will default to using
+ format being saved to. By default, this will default to using
the enhancement configuration files found using the default
:class:`~satpy.writers.Enhancer` class. This can be set to
`False` so that no enhancments are performed. This can also
@@ -810,7 +812,13 @@ def save_dataset(self, dataset, filename=None, fill_value=None,
decorate=decorate, fill_value=fill_value)
return self.save_image(img, filename=filename, compute=compute, fill_value=fill_value, **kwargs)
- def save_image(self, img, filename=None, compute=True, **kwargs):
+ def save_image(
+ self,
+ img: XRImage,
+ filename: Optional[str] = None,
+ compute: bool = True,
+ **kwargs
+ ):
"""Save Image object to a given ``filename``.
Args:
@@ -895,7 +903,7 @@ def __init__(self, decision_dicts, match_keys, multival_keys=None):
decision_dicts (dict): Dictionary of dictionaries. Each
sub-dictionary contains key/value pairs that can be
matched from the `find_match` method. Sub-dictionaries
- can include additional keys outside of the ``match_keys``
+ can include additional keys outside the ``match_keys``
provided to act as the "result" of a query. The keys of
the root dict are arbitrary.
match_keys (list): Keys of the provided dictionary to use for
@@ -1015,9 +1023,10 @@ def find_match(self, **query_dict):
"""
try:
match = self._find_match(self._tree, self._match_keys, query_dict)
- except (KeyError, IndexError, ValueError):
+ except (KeyError, IndexError, ValueError, TypeError):
LOG.debug("Match exception:", exc_info=True)
LOG.error("Error when finding matching decision section")
+ match = None
if match is None:
# only possible if no default section was provided
@@ -1033,10 +1042,12 @@ def __init__(self, *decision_dicts, **kwargs):
"""Init the decision tree."""
match_keys = kwargs.pop("match_keys",
("name",
+ "reader",
"platform_name",
"sensor",
"standard_name",
- "units",))
+ "units",
+ ))
self.prefix = kwargs.pop("config_section", "enhancements")
multival_keys = kwargs.pop("multival_keys", ["sensor"])
super(EnhancementDecisionTree, self).__init__(
@@ -1057,6 +1068,7 @@ def add_config_to_tree(self, *decision_dict):
if not enhancement_section:
LOG.debug("Config '{}' has no '{}' section or it is empty".format(config_file, self.prefix))
continue
+ LOG.debug(f"Adding enhancement configuration from file: {config_file}")
conf = recursive_dict_update(conf, enhancement_section)
elif isinstance(config_file, dict):
conf = recursive_dict_update(conf, config_file)
@@ -1095,7 +1107,8 @@ def __init__(self, enhancement_config_file=None):
# it wasn't specified in the config or in the kwargs, we should
# provide a default
config_fn = os.path.join("enhancements", "generic.yaml")
- self.enhancement_config_file = config_search_paths(config_fn)
+ paths = get_entry_points_config_dirs('satpy.enhancements')
+ self.enhancement_config_file = config_search_paths(config_fn, search_dirs=paths)
if not self.enhancement_config_file:
# They don't want any automatic enhancements
@@ -1114,9 +1127,10 @@ def get_sensor_enhancement_config(self, sensor):
# one single sensor
sensor = [sensor]
+ paths = get_entry_points_config_dirs('satpy.enhancements')
for sensor_name in sensor:
config_fn = os.path.join("enhancements", sensor_name + ".yaml")
- config_files = config_search_paths(config_fn)
+ config_files = config_search_paths(config_fn, search_dirs=paths)
# Note: Enhancement configuration files can't overwrite individual
# options, only entire sections are overwritten
for config_file in config_files:
@@ -1138,11 +1152,11 @@ def apply(self, img, **info):
"""Apply the enhancements."""
enh_kwargs = self.enhancement_tree.find_match(**info)
- LOG.debug("Enhancement configuration options: %s" %
- (str(enh_kwargs['operations']), ))
+ backup_id = f""
+ data_id = info.get("_satpy_id", backup_id)
+ LOG.debug(f"Data for {data_id} will be enhanced with options:\n\t{enh_kwargs['operations']}")
for operation in enh_kwargs['operations']:
fun = operation['method']
args = operation.get('args', [])
kwargs = operation.get('kwargs', {})
fun(img, *args, **kwargs)
- # img.enhance(**enh_kwargs)
diff --git a/satpy/writers/awips_tiled.py b/satpy/writers/awips_tiled.py
index daf0fc84fa..6daabbdc67 100644
--- a/satpy/writers/awips_tiled.py
+++ b/satpy/writers/awips_tiled.py
@@ -213,24 +213,24 @@
lettered tile locations.
"""
-import os
import logging
+import os
import string
-import warnings
import sys
-from datetime import datetime, timedelta
+import warnings
from collections import namedtuple
+from datetime import datetime, timedelta
-from satpy.writers import Writer, DecisionTree, Enhancer, get_enhanced_image
-from satpy import __version__
-from pyresample.geometry import AreaDefinition
-from trollsift.parser import StringFormatter, Parser
-
-import numpy as np
-from pyproj import Proj
import dask
import dask.array as da
+import numpy as np
import xarray as xr
+from pyproj import CRS, Proj, Transformer
+from pyresample.geometry import AreaDefinition
+from trollsift.parser import Parser, StringFormatter
+
+from satpy import __version__
+from satpy.writers import DecisionTree, Enhancer, Writer, get_enhanced_image
LOG = logging.getLogger(__name__)
DEFAULT_OUTPUT_PATTERN = '{source_name}_AII_{platform_name}_{sensor}_' \
@@ -303,7 +303,7 @@ def _get_tile_properties(self, tile_shape, tile_count):
else:
raise ValueError("Either 'tile_count' or 'tile_shape' must be provided")
- # number of pixels per each tile
+ # number of pixels per each tile (rows, cols)
self.tile_shape = tile_shape
# number of tiles in each direction (rows, columns)
self.tile_count = tile_count
@@ -342,9 +342,7 @@ def _get_xy_arrays(self):
new_extents,
)
- x, y = imaginary_grid_def.get_proj_coords()
- x = x[0].squeeze() # all rows should have the same coordinates
- y = y[:, 0].squeeze() # all columns should have the same coordinates
+ x, y = imaginary_grid_def.get_proj_vectors()
return x, y
def _get_xy_scaling_parameters(self):
@@ -352,7 +350,7 @@ def _get_xy_scaling_parameters(self):
gd = self.area_definition
bx = self.x.min()
mx = gd.pixel_size_x
- by = self.y.min()
+ by = self.y.max()
my = -abs(gd.pixel_size_y)
return mx, bx, my, by
@@ -424,10 +422,20 @@ def __call__(self):
class LetteredTileGenerator(NumberedTileGenerator):
"""Helper class to generate per-tile metadata for lettered tiles."""
- def __init__(self, area_definition, extents,
+ def __init__(self, area_definition, extents, sector_crs,
cell_size=(2000000, 2000000),
num_subtiles=None, use_sector_reference=False):
- """Initialize tile information for later generation."""
+ """Initialize tile information for later generation.
+
+ Args:
+ area_definition (AreaDefinition): Area of the data being saved.
+ extents (tuple): Four element tuple of the configured lettered
+ area.
+ sector_crs (pyproj.CRS): CRS of the configured lettered sector
+ area.
+ cell_size (tuple): Two element tuple of resolution of each tile
+ in sector projection units (y, x).
+ """
# (row subtiles, col subtiles)
self.num_subtiles = num_subtiles or (2, 2)
self.cell_size = cell_size # (row tile height, col tile width)
@@ -435,7 +443,8 @@ def __init__(self, area_definition, extents,
self.ll_extents = extents[:2] # (x min, y min)
self.ur_extents = extents[2:] # (x max, y max)
self.use_sector_reference = use_sector_reference
- super(LetteredTileGenerator, self).__init__(area_definition)
+ self._transformer = Transformer.from_crs(sector_crs, area_definition.crs)
+ super().__init__(area_definition)
def _get_tile_properties(self, tile_shape, tile_count):
"""Calculate tile information for this particular sector/grid."""
@@ -447,8 +456,8 @@ def _get_tile_properties(self, tile_shape, tile_count):
ad = self.area_definition
x, y = ad.get_proj_vectors()
- ll_xy = self.ll_extents
- ur_xy = self.ur_extents
+ ll_xy = self._transformer.transform(*self.ll_extents)
+ ur_xy = self._transformer.transform(*self.ur_extents)
cw = abs(ad.pixel_size_x)
ch = abs(ad.pixel_size_y)
st = self.num_subtiles
@@ -751,7 +760,7 @@ def get_attr_value(self, attr_name, input_metadata, value=None, raw_key=None, ra
metadata of non-string types to be requested.
raw_value (Any): Static hardcoded value to set this attribute
to. Overrides all other options.
- prefix (bool): Prefix to use when `value` and `raw_key` are
+ prefix (str): Prefix to use when `value` and `raw_key` are
both ``None``. Default is ``"_"``. This will be used to find
custom attribute handlers in subclasses. For example, if
`value` and `raw_key` are both ``None`` and `attr_name`
@@ -819,7 +828,9 @@ def _render_variable_encoding(self, var_config, input_data_arr):
# determine fill value and
if 'encoding' in var_config:
new_encoding.update(var_config['encoding'])
- new_encoding.setdefault('dtype', 'uint16')
+ if "dtype" not in new_encoding:
+ new_encoding['dtype'] = 'int16'
+ new_encoding['_Unsigned'] = 'true'
return new_encoding
def _render_variable(self, data_arr):
@@ -864,7 +875,6 @@ def render(self, dataset_or_data_arrays, shared_attrs=None):
for data_arr in data_arrays:
new_var_name, new_data_arr = self._render_variable(data_arr)
new_ds[new_var_name] = new_data_arr
-
new_coords = self._render_coordinates(new_ds)
new_ds.coords.update(new_coords)
# use first data array as "representative" for global attributes
@@ -911,6 +921,12 @@ def _global_start_date_time(self, input_metadata):
def _global_awips_id(self, input_metadata):
return "AWIPS_" + input_metadata['name']
+ def _global_physical_element(self, input_metadata):
+ var_config = self._var_tree.find_match(**input_metadata)
+ attr_config = {"physical_element": var_config["attributes"]["physical_element"]}
+ result = self._render_attrs(attr_config, input_metadata, prefix="_data_")
+ return result["physical_element"]
+
def _global_production_location(self, input_metadata):
"""Get default global production_location attribute."""
del input_metadata
@@ -964,6 +980,7 @@ def _render_variable_encoding(self, var_config, input_data_arr):
new_encoding['scale_factor'] = sf
new_encoding['add_offset'] = ao
new_encoding['_FillValue'] = fill
+ new_encoding['coordinates'] = ' '.join([ele for ele in input_data_arr.dims])
return new_encoding
def _get_projection_attrs(self, area_def):
@@ -989,30 +1006,29 @@ def _get_projection_attrs(self, area_def):
def _set_xy_coords_attrs(self, new_ds, crs):
y_attrs = new_ds.coords['y'].attrs
if crs.is_geographic:
- if y_attrs.get('units') is None:
- y_attrs['units'] = 'degrees_north'
- if y_attrs.get('standard_name') is None:
- y_attrs['standard_name'] = 'latitude'
+ self._fill_units_and_standard_name(y_attrs, 'degrees_north', 'latitude')
else:
- if y_attrs.get('units') is None:
- y_attrs['units'] = 'meters'
- if y_attrs.get('standard_name') is None:
- y_attrs['standard_name'] = 'projection_y_coordinate'
+ self._fill_units_and_standard_name(y_attrs, 'meters', 'projection_y_coordinate')
y_attrs['axis'] = 'Y'
x_attrs = new_ds.coords['x'].attrs
if crs.is_geographic:
- if x_attrs.get('units') is None:
- x_attrs['units'] = 'degrees_east'
- if x_attrs.get('standard_name') is None:
- x_attrs['standard_name'] = 'longitude'
+ self._fill_units_and_standard_name(x_attrs, 'degrees_east', 'longitude')
else:
- if x_attrs.get('units') is None:
- x_attrs['units'] = 'meters'
- if x_attrs.get('standard_name') is None:
- x_attrs['standard_name'] = 'projection_x_coordinate'
+ self._fill_units_and_standard_name(x_attrs, 'meters', 'projection_x_coordinate')
x_attrs['axis'] = 'X'
+ @staticmethod
+ def _fill_units_and_standard_name(attrs, units, standard_name):
+ """Fill in units and standard_name if not set in `attrs`."""
+ if attrs.get('units') is None:
+ attrs['units'] = units
+ if attrs['units'] in ('meter', 'metre'):
+ # AWIPS doesn't like 'meter'
+ attrs['units'] = 'meters'
+ if attrs.get('standard_name') is None:
+ attrs['standard_name'] = standard_name
+
def apply_area_def(self, new_ds, area_def):
"""Apply information we can gather from the AreaDefinition."""
gmap_name, gmap_attrs, gmap_encoding = self._get_projection_attrs(area_def)
@@ -1060,7 +1076,7 @@ def apply_tile_info(self, new_ds, tile_info):
return new_ds
def _add_sector_id_global(self, new_ds, sector_id):
- if not self._template_dict.get('apply_sector_id_global'):
+ if not self._template_dict.get('add_sector_id_global'):
return
if sector_id is None:
@@ -1081,6 +1097,16 @@ def apply_misc_metadata(self, new_ds, sector_id=None, creator=None, creation_tim
new_ds.attrs['creation_time'] = creation_time.strftime('%Y-%m-%dT%H:%M:%S')
return new_ds
+ def _render_variable_attributes(self, var_config, input_metadata):
+ attrs = super()._render_variable_attributes(var_config, input_metadata)
+ # AWIPS validation checks
+ if len(attrs.get("units", "")) > 26:
+ warnings.warn(
+ "AWIPS 'units' must be limited to a maximum of 26 characters. "
+ "Units '{}' is too long and will be truncated.".format(attrs["units"]))
+ attrs["units"] = attrs["units"][:26]
+ return attrs
+
def render(self, dataset_or_data_arrays, area_def,
tile_info, sector_id, creator=None, creation_time=None,
shared_attrs=None, extra_global_attrs=None):
@@ -1263,6 +1289,7 @@ def separate_init_kwargs(cls, kwargs):
def _fill_sector_info(self):
"""Convert sector extents if needed."""
for sector_info in self.awips_sectors.values():
+ sector_info['projection'] = CRS.from_user_input(sector_info['projection'])
p = Proj(sector_info['projection'])
if 'lower_left_xy' in sector_info:
sector_info['lower_left_lonlat'] = p(*sector_info['lower_left_xy'], inverse=True)
@@ -1299,6 +1326,7 @@ def _get_tile_generator(self, area_def, lettered_grid, sector_id,
tile_gen = LetteredTileGenerator(
area_def,
sector_info['lower_left_xy'] + sector_info['upper_right_xy'],
+ sector_crs=sector_info['projection'],
cell_size=sector_info['resolution'],
num_subtiles=num_subtiles,
use_sector_reference=use_sector_reference,
@@ -1370,7 +1398,7 @@ def _slice_and_update_coords(self, tile_info, data_arrays):
new_y = xr.DataArray(tile_info.y, dims=('y',))
if 'y' in data_arrays[0].coords:
old_y = data_arrays[0].coords['y']
- new_x.attrs.update(old_y.attrs)
+ new_y.attrs.update(old_y.attrs)
new_y.encoding = old_y.encoding
for data_arr in data_arrays:
@@ -1522,11 +1550,11 @@ def save_datasets(self, datasets, sector_id=None,
grid's pixels. By default this is False meaning that the
grid's tiles will be shifted to align with the data locations.
If True, the data is shifted. At most the data will be shifted
- by 0.5 pixels. See :mod:`satpy.writers.scmi` for more
+ by 0.5 pixels. See :mod:`satpy.writers.awips_tiled` for more
information.
template (str or dict): Name of the template configured in the
writer YAML file. This can also be a dictionary with a full
- template configuration. See the :mod:`satpy.writers.scmi`
+ template configuration. See the :mod:`satpy.writers.awips_tiled`
documentation for more information on templates. Defaults to
the 'polar' builtin template.
check_categories (bool): Whether category and flag products should
@@ -1691,7 +1719,6 @@ def _create_debug_array(sector_info, num_subtiles, font_path='Verdana.ttf'):
img.save("test.png")
- from pyresample.utils import proj4_str_to_dict
new_extents = (
ll_extent[0],
ur_extent[1] - 1001. * meters_ppy,
@@ -1702,7 +1729,7 @@ def _create_debug_array(sector_info, num_subtiles, font_path='Verdana.ttf'):
'debug_grid',
'debug_grid',
'debug_grid',
- proj4_str_to_dict(sector_info['projection']),
+ sector_info['projection'],
1000,
1000,
new_extents
diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py
index 8d388ae91c..ca0e2055fb 100644
--- a/satpy/writers/cf_writer.py
+++ b/satpy/writers/cf_writer.py
@@ -31,11 +31,17 @@
>>> scn.save_datasets(writer='cf', datasets=['VIS006', 'IR_108'], filename='seviri_test.nc',
exclude_attrs=['raw_metadata'])
-* You can select the netCDF backend using the ``engine`` keyword argument. Default is ``h5netcdf``.
+* You can select the netCDF backend using the ``engine`` keyword argument. If `None` if follows
+ :meth:`~xarray.Dataset.to_netcdf` engine choices with a preference for 'netcdf4'.
* For datasets with area definition you can exclude lat/lon coordinates by setting ``include_lonlats=False``.
-* By default the dataset name is prepended to non-dimensional coordinates such as scanline timestamps. This ensures
- maximum consistency, i.e. the netCDF variable names are independent of the number/set of datasets to be written.
- If a non-dimensional coordinate is identical for
+ If the area has a projected CRS, units are assumed to be in metre. If the
+ area has a geographic CRS, units are assumed to be in degrees. The writer
+ does not verify that the CRS is supported by the CF conventions. One
+ commonly used projected CRS not supported by the CF conventions is the
+ equirectangular projection, such as EPSG 4087.
+* By default non-dimensional coordinates (such as scanline timestamps) are prefixed with the corresponding
+ dataset name. This is because they are likely to be different for each dataset. If a non-dimensional
+ coordinate is identical for all datasets, the prefix can be removed by setting ``pretty=True``.
* Some dataset names start with a digit, like AVHRR channels 1, 2, 3a, 3b, 4 and 5. This doesn't comply with CF
https://cfconventions.org/Data/cf-conventions/cf-conventions-1.7/build/ch02s03.html. These channels are prefixed
with `CHANNEL_` by default. This can be controlled with the variable `numeric_name_prefix` to `save_datasets`.
@@ -56,6 +62,36 @@
Note that the resulting file will not be fully CF compliant.
+Dataset Encoding
+~~~~~~~~~~~~~~~~
+
+Dataset encoding can be specified in two ways:
+
+1) Via the ``encoding`` keyword argument of ``save_datasets``:
+
+ >>> my_encoding = {
+ ... 'my_dataset_1': {
+ ... 'zlib': True,
+ ... 'complevel': 9,
+ ... 'scale_factor': 0.01,
+ ... 'add_offset': 100,
+ ... 'dtype': np.int16
+ ... },
+ ... 'my_dataset_2': {
+ ... 'zlib': False
+ ... }
+ ... }
+ >>> scn.save_datasets(writer='cf', filename='encoding_test.nc', encoding=my_encoding)
+
+
+2) Via the ``encoding`` attribute of the datasets in a scene. For example
+
+ >>> scn['my_dataset'].encoding = {'zlib': False}
+ >>> scn.save_datasets(writer='cf', filename='encoding_test.nc')
+
+See the `xarray encoding documentation`_ for all encoding options.
+
+
Attribute Encoding
~~~~~~~~~~~~~~~~~~
@@ -99,6 +135,8 @@
.. _CF-compliant: http://cfconventions.org/
+.. _xarray encoding documentation:
+ http://xarray.pydata.org/en/stable/user-guide/io.html?highlight=encoding#writing-encoded-data
"""
import copy
@@ -122,6 +160,21 @@
EPOCH = u"seconds since 1970-01-01 00:00:00"
+# Check availability of either netCDF4 or h5netcdf package
+try:
+ import netCDF4
+except ImportError:
+ netCDF4 = None
+
+try:
+ import h5netcdf
+except ImportError:
+ h5netcdf = None
+
+# Ensure that either netCDF4 or h5netcdf is available to avoid silent failure
+if netCDF4 is None and h5netcdf is None:
+ raise ImportError('Ensure that the netCDF4 or h5netcdf package is installed.')
+
# Numpy datatypes compatible with all netCDF4 backends. ``np.unicode_`` is
# excluded because h5py (and thus h5netcdf) has problems with unicode, see
# https://github.com/h5py/h5py/issues/624."""
@@ -519,6 +572,45 @@ def _handle_dataarray_name(original_name, numeric_name_prefix):
return original_name, name
+def _get_compression(compression):
+ warnings.warn("The default behaviour of the CF writer will soon change to not compress data by default.",
+ FutureWarning)
+ if compression is None:
+ compression = {'zlib': True}
+ else:
+ warnings.warn("The `compression` keyword will soon be deprecated. Please use the `encoding` of the "
+ "DataArrays to tune compression from now on.", FutureWarning)
+ return compression
+
+
+def _set_history(root):
+ _history_create = 'Created by pytroll/satpy on {}'.format(datetime.utcnow())
+ if 'history' in root.attrs:
+ if isinstance(root.attrs['history'], list):
+ root.attrs['history'] = ''.join(root.attrs['history'])
+ root.attrs['history'] += '\n' + _history_create
+ else:
+ root.attrs['history'] = _history_create
+
+
+def _get_groups(groups, datasets, root):
+ if groups is None:
+ # Groups are not CF-1.7 compliant
+ if 'Conventions' not in root.attrs:
+ root.attrs['Conventions'] = CF_VERSION
+ # Write all datasets to the file root without creating a group
+ groups_ = {None: datasets}
+ else:
+ # User specified a group assignment using dataset names. Collect the corresponding datasets.
+ groups_ = defaultdict(list)
+ for dataset in datasets:
+ for group_name, group_members in groups.items():
+ if dataset.attrs['name'] in group_members:
+ groups_[group_name].append(dataset)
+ break
+ return groups_
+
+
class CFWriter(Writer):
"""Writer producing NetCDF/CF compatible datasets."""
@@ -551,10 +643,10 @@ def da2cf(dataarray, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, compr
original_name, name = _handle_dataarray_name(name, numeric_name_prefix)
new_data = new_data.rename(name)
- # Remove _satpy* attributes
- satpy_attrs = [key for key in new_data.attrs if key.startswith('_satpy')]
- for satpy_attr in satpy_attrs:
- new_data.attrs.pop(satpy_attr)
+ CFWriter._remove_satpy_attributes(new_data)
+
+ new_data = CFWriter._encode_time(new_data, epoch)
+ new_data = CFWriter._encode_coords(new_data)
# Remove area as well as user-defined attributes
for key in ['area'] + exclude_attrs:
@@ -566,33 +658,11 @@ def da2cf(dataarray, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, compr
new_data.attrs['ancillary_variables'] = ' '.join(anc)
# TODO: make this a grid mapping or lon/lats
# new_data.attrs['area'] = str(new_data.attrs.get('area'))
- for key, val in new_data.attrs.copy().items():
- if val is None:
- new_data.attrs.pop(key)
- if key == 'ancillary_variables' and val == []:
- new_data.attrs.pop(key)
- new_data.attrs.pop('_last_resampler', None)
+ CFWriter._cleanup_attrs(new_data)
+
if compression is not None:
new_data.encoding.update(compression)
- if 'time' in new_data.coords:
- new_data['time'].encoding['units'] = epoch
- new_data['time'].attrs['standard_name'] = 'time'
- new_data['time'].attrs.pop('bounds', None)
- if 'time' not in new_data.dims:
- new_data = new_data.expand_dims('time')
-
- if 'x' in new_data.coords:
- new_data['x'].attrs['standard_name'] = 'projection_x_coordinate'
- new_data['x'].attrs['units'] = 'm'
-
- if 'y' in new_data.coords:
- new_data['y'].attrs['standard_name'] = 'projection_y_coordinate'
- new_data['y'].attrs['units'] = 'm'
-
- if 'crs' in new_data.coords:
- new_data = new_data.drop_vars('crs')
-
if 'long_name' not in new_data.attrs and 'standard_name' not in new_data.attrs:
new_data.attrs['long_name'] = new_data.name
if 'prerequisites' in new_data.attrs:
@@ -610,6 +680,108 @@ def da2cf(dataarray, epoch=EPOCH, flatten_attrs=False, exclude_attrs=None, compr
return new_data
+ @staticmethod
+ def _cleanup_attrs(new_data):
+ for key, val in new_data.attrs.copy().items():
+ if val is None:
+ new_data.attrs.pop(key)
+ if key == 'ancillary_variables' and val == []:
+ new_data.attrs.pop(key)
+
+ @staticmethod
+ def _encode_coords(new_data):
+ """Encode coordinates."""
+ if not new_data.coords.keys() & {"x", "y", "crs"}:
+ # there are no coordinates
+ return new_data
+ is_projected = CFWriter._is_projected(new_data)
+ if is_projected:
+ new_data = CFWriter._encode_xy_coords_projected(new_data)
+ else:
+ new_data = CFWriter._encode_xy_coords_geographic(new_data)
+ if 'crs' in new_data.coords:
+ new_data = new_data.drop_vars('crs')
+ return new_data
+
+ @staticmethod
+ def _is_projected(new_data):
+ """Guess whether data are projected or not."""
+ crs = CFWriter._try_to_get_crs(new_data)
+ if crs:
+ return crs.is_projected
+ units = CFWriter._try_get_units_from_coords(new_data)
+ if units:
+ if units.endswith("m"):
+ return True
+ if units.startswith("degrees"):
+ return False
+ logger.warning("Failed to tell if data are projected. Assuming yes.")
+ return True
+
+ @staticmethod
+ def _try_to_get_crs(new_data):
+ """Try to get a CRS from attributes."""
+ if "area" in new_data.attrs:
+ if isinstance(new_data.attrs["area"], AreaDefinition):
+ return new_data.attrs["area"].crs
+ # at least one test case passes an area of type str
+ logger.warning(
+ f"Could not tell CRS from area of type {type(new_data.attrs['area']).__name__:s}. "
+ "Assuming projected CRS.")
+ if "crs" in new_data.coords:
+ return new_data.coords["crs"].item()
+
+ @staticmethod
+ def _try_get_units_from_coords(new_data):
+ for c in "xy":
+ if "units" in new_data.coords[c].attrs:
+ return new_data.coords[c].attrs["units"]
+
+ @staticmethod
+ def _encode_xy_coords_projected(new_data):
+ """Encode coordinates, assuming projected CRS."""
+ if 'x' in new_data.coords:
+ new_data['x'].attrs['standard_name'] = 'projection_x_coordinate'
+ new_data['x'].attrs['units'] = 'm'
+ if 'y' in new_data.coords:
+ new_data['y'].attrs['standard_name'] = 'projection_y_coordinate'
+ new_data['y'].attrs['units'] = 'm'
+ return new_data
+
+ @staticmethod
+ def _encode_xy_coords_geographic(new_data):
+ """Encode coordinates, assuming geographic CRS."""
+ if 'x' in new_data.coords:
+ new_data['x'].attrs['standard_name'] = 'longitude'
+ new_data['x'].attrs['units'] = 'degrees_east'
+ if 'y' in new_data.coords:
+ new_data['y'].attrs['standard_name'] = 'latitude'
+ new_data['y'].attrs['units'] = 'degrees_north'
+ return new_data
+
+ @staticmethod
+ def _encode_time(new_data, epoch):
+ if 'time' in new_data.coords:
+ new_data['time'].encoding['units'] = epoch
+ new_data['time'].attrs['standard_name'] = 'time'
+ new_data['time'].attrs.pop('bounds', None)
+ new_data = CFWriter._add_time_dimension(new_data)
+ return new_data
+
+ @staticmethod
+ def _add_time_dimension(new_data):
+ if 'time' not in new_data.dims and new_data["time"].size not in new_data.shape:
+ new_data = new_data.expand_dims('time')
+ return new_data
+
+ @staticmethod
+ def _remove_satpy_attributes(new_data):
+ # Remove _satpy* attributes
+ satpy_attrs = [key for key in new_data.attrs if key.startswith('_satpy')]
+ for satpy_attr in satpy_attrs:
+ new_data.attrs.pop(satpy_attr)
+ new_data.attrs.pop('_last_resampler', None)
+
@staticmethod
def update_encoding(dataset, to_netcdf_kwargs):
"""Update encoding info (deprecated)."""
@@ -695,6 +867,7 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None,
Compression to use on the datasets before saving, for example {'zlib': True, 'complevel': 9}.
This is in turn passed the xarray's `to_netcdf` method:
http://xarray.pydata.org/en/stable/generated/xarray.Dataset.to_netcdf.html for more possibilities.
+ (This parameter is now being deprecated, please use the DataArrays's `encoding` from now on.)
include_orig_name (bool).
Include the original dataset name as an varaibel attribute in the final netcdf
numeric_name_prefix (str):
@@ -702,9 +875,7 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None,
"""
logger.info('Saving datasets to NetCDF4/CF.')
-
- if compression is None:
- compression = {'zlib': True}
+ compression = _get_compression(compression)
# Write global attributes to file root (creates the file)
filename = filename or self.get_filename(**datasets[0].attrs)
@@ -714,13 +885,8 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None,
if flatten_attrs:
header_attrs = flatten_dict(header_attrs)
root.attrs = encode_attrs_nc(header_attrs)
- _history_create = 'Created by pytroll/satpy on {}'.format(datetime.utcnow())
- if 'history' in root.attrs:
- if isinstance(root.attrs['history'], list):
- root.attrs['history'] = ''.join(root.attrs['history'])
- root.attrs['history'] += '\n' + _history_create
- else:
- root.attrs['history'] = _history_create
+
+ _set_history(root)
# Remove satpy-specific kwargs
to_netcdf_kwargs = copy.deepcopy(to_netcdf_kwargs) # may contain dictionaries (encoding)
@@ -732,20 +898,7 @@ def save_datasets(self, datasets, filename=None, groups=None, header_attrs=None,
init_nc_kwargs.pop('encoding', None) # No variables to be encoded at this point
init_nc_kwargs.pop('unlimited_dims', None)
- if groups is None:
- # Groups are not CF-1.7 compliant
- if 'Conventions' not in root.attrs:
- root.attrs['Conventions'] = CF_VERSION
- # Write all datasets to the file root without creating a group
- groups_ = {None: datasets}
- else:
- # User specified a group assignment using dataset names. Collect the corresponding datasets.
- groups_ = defaultdict(list)
- for dataset in datasets:
- for group_name, group_members in groups.items():
- if dataset.attrs['name'] in group_members:
- groups_[group_name].append(dataset)
- break
+ groups_ = _get_groups(groups, datasets, root)
written = [root.to_netcdf(filename, engine=engine, mode='w', **init_nc_kwargs)]
diff --git a/satpy/writers/geotiff.py b/satpy/writers/geotiff.py
index 4cb8894b26..d5f3d2d211 100644
--- a/satpy/writers/geotiff.py
+++ b/satpy/writers/geotiff.py
@@ -16,13 +16,21 @@
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
"""GeoTIFF writer objects for creating GeoTIFF files from `DataArray` objects."""
+from __future__ import annotations
import logging
+from typing import Any, Optional, Union
+
import numpy as np
-from satpy.writers import ImageWriter
+
# make sure we have rasterio even though we don't use it until trollimage
# saves the image
import rasterio # noqa
+from trollimage.colormap import Colormap
+from trollimage.xrimage import XRImage
+
+from satpy._compat import DTypeLike
+from satpy.writers import ImageWriter
LOG = logging.getLogger(__name__)
@@ -50,6 +58,10 @@ class GeoTIFFWriter(ImageWriter):
>>> scn.save_dataset(dataset_name, writer='geotiff',
... tags={'offset': 291.8, 'scale': -0.35})
+ Images are tiled by default. To create striped TIFF files ``tiled=False`` can be specified:
+
+ >>> scn.save_datasets(writer='geotiff', tiled=False)
+
For performance tips on creating geotiffs quickly and making them smaller
see the :ref:`faq`.
@@ -76,7 +88,25 @@ class GeoTIFFWriter(ImageWriter):
"profile",
"bigtiff",
"pixeltype",
- "copy_src_overviews",)
+ "copy_src_overviews",
+ # COG driver options (different from GTiff above)
+ "blocksize",
+ "resampling",
+ "quality",
+ "level",
+ "overview_resampling",
+ "warp_resampling",
+ "overview_compress",
+ "overview_quality",
+ "overview_predictor",
+ "tiling_scheme",
+ "zoom_level_strategy",
+ "target_srs",
+ "res",
+ "extent",
+ "aligned_levels",
+ "add_alpha",
+ )
def __init__(self, dtype=None, tags=None, **kwargs):
"""Init the writer."""
@@ -107,16 +137,29 @@ def separate_init_kwargs(cls, kwargs):
return init_kwargs, kwargs
- def save_image(self, img, filename=None, dtype=None, fill_value=None,
- compute=True, keep_palette=False, cmap=None, tags=None,
- overviews=None, overviews_minsize=256,
- overviews_resampling=None, include_scale_offset=False,
- **kwargs):
+ def save_image(
+ self,
+ img: XRImage,
+ filename: Optional[str] = None,
+ compute: bool = True,
+ dtype: Optional[DTypeLike] = None,
+ fill_value: Optional[Union[int, float]] = None,
+ keep_palette: bool = False,
+ cmap: Optional[Colormap] = None,
+ tags: Optional[dict[str, Any]] = None,
+ overviews: Optional[list[int]] = None,
+ overviews_minsize: int = 256,
+ overviews_resampling: Optional[str] = None,
+ include_scale_offset: bool = False,
+ scale_offset_tags: Optional[tuple[str, str]] = None,
+ colormap_tag: Optional[str] = None,
+ driver: Optional[str] = None,
+ tiled: bool = True,
+ **kwargs
+ ):
"""Save the image to the given ``filename`` in geotiff_ format.
- Note for faster output and reduced memory usage the ``rasterio``
- library must be installed. This writer currently falls back to
- using ``gdal`` directly, but that will be deprecated in the future.
+ Note this writer requires the ``rasterio`` library to be installed.
Args:
img (xarray.DataArray): Data to save to geotiff.
@@ -124,14 +167,6 @@ def save_image(self, img, filename=None, dtype=None, fill_value=None,
``filename`` passed during writer creation. Unlike the
creation ``filename`` keyword argument, this filename does not
get formatted with data attributes.
- dtype (numpy.dtype): Numpy data type to save the image as.
- Defaults to 8-bit unsigned integer (``np.uint8``) or the data
- type of the data to be saved if ``enhance=False``. If the
- ``dtype`` argument is provided during writer creation then
- that will be used as the default.
- fill_value (int or float): Value to use where data values are
- NaN/null. If this is specified in the writer configuration
- file that value will be used as the default.
compute (bool): Compute dask arrays and save the image
immediately. If ``False`` then the return value can be passed
to :func:`~satpy.writers.compute_writer_results` to do the
@@ -140,6 +175,14 @@ def save_image(self, img, filename=None, dtype=None, fill_value=None,
them multiple times. Defaults to ``True`` in the writer by
itself, but is typically passed as ``False`` by callers where
calculations can be combined.
+ dtype (DTypeLike): Numpy data type to save the image as.
+ Defaults to 8-bit unsigned integer (``np.uint8``) or the data
+ type of the data to be saved if ``enhance=False``. If the
+ ``dtype`` argument is provided during writer creation then
+ that will be used as the default.
+ fill_value (float or int): Value to use where data values are
+ NaN/null. If this is specified in the writer configuration
+ file that value will be used as the default.
keep_palette (bool): Save palette/color table to geotiff.
To be used with images that were palettized with the
"palettize" enhancement. Setting this to ``True`` will cause
@@ -175,20 +218,32 @@ def save_image(self, img, filename=None, dtype=None, fill_value=None,
provided. Common values include `nearest` (default),
`bilinear`, `average`, and many others. See the rasterio
documentation for more information.
- include_scale_offset (bool): Activate inclusion of scale and offset
- factors in the geotiff to allow retrieving original values from
- the pixel values. ``False`` by default.
+ scale_offset_tags (Tuple[str, str]): If set, include inclusion of
+ scale and offset in the GeoTIFF headers in the GDALMetaData
+ tag. The value of this argument should be a keyword argument
+ ``(scale_label, offset_label)``, for example, ``("scale",
+ "offset")``, indicating the labels to be used.
+ colormap_tag (Optional[str]): If set and the image being saved was
+ colorized or palettized then a comma-separated version of the
+ colormap is saved to a custom geotiff tag with the provided
+ name. See :meth:`trollimage.colormap.Colormap.to_csv` for more
+ information.
+ driver (Optional[str]): Name of GDAL driver to use to save the
+ geotiff. If not specified or None (default) the "GTiff" driver
+ is used. Another common option is "COG" for Cloud Optimized
+ GeoTIFF. See GDAL documentation for more information.
+ tiled (bool): For performance this defaults to ``True``.
+ Pass ``False`` to created striped TIFF files.
+ include_scale_offset (deprecated, bool): Deprecated.
+ Use ``scale_offset_tags=("scale", "offset")`` to include scale
+ and offset tags.
.. _geotiff: http://trac.osgeo.org/geotiff/
"""
filename = filename or self.get_filename(**img.data.attrs)
- # Update global GDAL options with these specific ones
- gdal_options = self.gdal_options.copy()
- for k in kwargs:
- if k in self.GDAL_OPTIONS:
- gdal_options[k] = kwargs[k]
+ gdal_options = self._get_gdal_options(kwargs)
if fill_value is None:
# fall back to fill_value from configuration file
fill_value = self.info.get('fill_value')
@@ -196,7 +251,7 @@ def save_image(self, img, filename=None, dtype=None, fill_value=None,
dtype = dtype if dtype is not None else self.dtype
if dtype is None and self.enhancer is not False:
dtype = np.uint8
- else:
+ elif dtype is None:
dtype = img.data.dtype.type
if "alpha" in kwargs:
@@ -219,11 +274,24 @@ def save_image(self, img, filename=None, dtype=None, fill_value=None,
if tags is None:
tags = {}
tags.update(self.tags)
- return img.save(filename, fformat='tif', fill_value=fill_value,
+
+ return img.save(filename, fformat='tif', driver=driver,
+ fill_value=fill_value,
dtype=dtype, compute=compute,
keep_palette=keep_palette, cmap=cmap,
tags=tags, include_scale_offset_tags=include_scale_offset,
+ scale_offset_tags=scale_offset_tags,
+ colormap_tag=colormap_tag,
overviews=overviews,
overviews_resampling=overviews_resampling,
overviews_minsize=overviews_minsize,
+ tiled=tiled,
**gdal_options)
+
+ def _get_gdal_options(self, kwargs):
+ # Update global GDAL options with these specific ones
+ gdal_options = self.gdal_options.copy()
+ for k in kwargs:
+ if k in self.GDAL_OPTIONS:
+ gdal_options[k] = kwargs[k]
+ return gdal_options
diff --git a/satpy/writers/mitiff.py b/satpy/writers/mitiff.py
index 438ab780b0..ef07417527 100644
--- a/satpy/writers/mitiff.py
+++ b/satpy/writers/mitiff.py
@@ -18,14 +18,12 @@
"""MITIFF writer objects for creating MITIFF files from `Dataset` objects."""
import logging
-import numpy as np
-
-from satpy.writers import ImageWriter
-
-from satpy.writers import get_enhanced_image
-from satpy.dataset import DataQuery, DataID
import dask
+import numpy as np
+
+from satpy.dataset import DataID, DataQuery
+from satpy.writers import ImageWriter, get_enhanced_image
IMAGEDESCRIPTION = 270
@@ -34,6 +32,22 @@
KELVIN_TO_CELSIUS = -273.15
+def _adjust_kwargs(dataset, kwargs):
+ if 'platform_name' not in kwargs:
+ kwargs['platform_name'] = dataset.attrs['platform_name']
+ if 'name' not in kwargs:
+ kwargs['name'] = dataset.attrs['name']
+ if 'start_time' not in kwargs:
+ kwargs['start_time'] = dataset.attrs['start_time']
+ if 'sensor' not in kwargs:
+ kwargs['sensor'] = dataset.attrs['sensor']
+ # Sensor attrs could be set. MITIFFs needing to handle sensor can only have one sensor
+ # Assume the first value of set as the sensor.
+ if isinstance(kwargs['sensor'], set):
+ LOG.warning('Sensor is set, will use the first value: %s', kwargs['sensor'])
+ kwargs['sensor'] = (list(kwargs['sensor']))[0]
+
+
class MITIFFWriter(ImageWriter):
"""Writer to produce MITIFF image files."""
@@ -62,24 +76,11 @@ def save_dataset(self, dataset, filename=None, fill_value=None,
"""Save single dataset as mitiff file."""
LOG.debug("Starting in mitiff save_dataset ... ")
- def _delayed_create(create_opts, dataset):
+ def _delayed_create(dataset):
try:
if 'palette' in kwargs:
self.palette = kwargs['palette']
- if 'platform_name' not in kwargs:
- kwargs['platform_name'] = dataset.attrs['platform_name']
- if 'name' not in kwargs:
- kwargs['name'] = dataset.attrs['name']
- if 'start_time' not in kwargs:
- kwargs['start_time'] = dataset.attrs['start_time']
- if 'sensor' not in kwargs:
- kwargs['sensor'] = dataset.attrs['sensor']
-
- # Sensor attrs could be set. MITIFFs needing to handle sensor can only have one sensor
- # Assume the first value of set as the sensor.
- if isinstance(kwargs['sensor'], set):
- LOG.warning('Sensor is set, will use the first value: %s', kwargs['sensor'])
- kwargs['sensor'] = (list(kwargs['sensor']))[0]
+ _adjust_kwargs(dataset, kwargs)
try:
self.mitiff_config[kwargs['sensor']] = dataset.attrs['metadata_requirements']['config']
@@ -106,8 +107,7 @@ def _delayed_create(create_opts, dataset):
except (KeyError, ValueError, RuntimeError):
raise
- create_opts = ()
- delayed = dask.delayed(_delayed_create)(create_opts, dataset)
+ delayed = dask.delayed(_delayed_create)(dataset)
if compute:
return delayed.compute()
@@ -118,30 +118,18 @@ def save_datasets(self, datasets, filename=None, fill_value=None,
"""Save all datasets to one or more files."""
LOG.debug("Starting in mitiff save_datasets ... ")
- def _delayed_create(create_opts, datasets):
- LOG.debug("create_opts: %s", create_opts)
+ def _delayed_create(datasets):
+ dataset = datasets[0]
+
try:
- if 'platform_name' not in kwargs:
- kwargs['platform_name'] = datasets[0].attrs['platform_name']
- if 'name' not in kwargs:
- kwargs['name'] = datasets[0].attrs['name']
- if 'start_time' not in kwargs:
- kwargs['start_time'] = datasets[0].attrs['start_time']
- if 'sensor' not in kwargs:
- kwargs['sensor'] = datasets[0].attrs['sensor']
-
- # Sensor attrs could be set. MITIFFs needing to handle sensor can only have one sensor
- # Assume the first value of set as the sensor.
- if isinstance(kwargs['sensor'], set):
- LOG.warning('Sensor is set, will use the first value: %s', kwargs['sensor'])
- kwargs['sensor'] = (list(kwargs['sensor']))[0]
+ _adjust_kwargs(dataset, kwargs)
try:
- self.mitiff_config[kwargs['sensor']] = datasets[0].attrs['metadata_requirements']['config']
- translate = datasets[0].attrs['metadata_requirements']['translate']
+ self.mitiff_config[kwargs['sensor']] = dataset.attrs['metadata_requirements']['config']
+ translate = dataset.attrs['metadata_requirements']['translate']
self.translate_channel_name[kwargs['sensor']] = translate
- self.channel_order[kwargs['sensor']] = datasets[0].attrs['metadata_requirements']['order']
- self.file_pattern = datasets[0].attrs['metadata_requirements']['file_pattern']
+ self.channel_order[kwargs['sensor']] = dataset.attrs['metadata_requirements']['order']
+ self.file_pattern = dataset.attrs['metadata_requirements']['file_pattern']
except KeyError:
# For some mitiff products this info is needed, for others not.
# If needed you should know how to fix this
@@ -150,7 +138,7 @@ def _delayed_create(create_opts, datasets):
image_description = self._make_image_description(datasets, **kwargs)
LOG.debug("File pattern %s", self.file_pattern)
if isinstance(datasets, list):
- kwargs['start_time'] = datasets[0].attrs['start_time']
+ kwargs['start_time'] = dataset.attrs['start_time']
else:
kwargs['start_time'] = datasets.attrs['start_time']
gen_filename = filename or self.get_filename(**kwargs)
@@ -159,8 +147,7 @@ def _delayed_create(create_opts, datasets):
except (KeyError, ValueError, RuntimeError):
raise
- create_opts = ()
- delayed = dask.delayed(_delayed_create)(create_opts, datasets)
+ delayed = dask.delayed(_delayed_create)(datasets)
LOG.debug("About to call delayed compute ...")
if compute:
return delayed.compute()
@@ -170,18 +157,7 @@ def _make_channel_list(self, datasets, **kwargs):
channels = []
try:
if self.channel_order:
- for cn in self.channel_order[kwargs['sensor']]:
- for ch, ds in enumerate(datasets):
- if isinstance(ds.attrs['prerequisites'][ch], (DataQuery, DataID)):
- if ds.attrs['prerequisites'][ch]['name'] == cn:
- channels.append(
- ds.attrs['prerequisites'][ch]['name'])
- break
- else:
- if ds.attrs['prerequisites'][ch] == cn:
- channels.append(
- ds.attrs['prerequisites'][ch])
- break
+ channels = self._reorder_channels(datasets, **kwargs)
elif self.palette:
if 'palette_channel_name' in kwargs:
channels.append(kwargs['palette_channel_name'].upper())
@@ -195,6 +171,22 @@ def _make_channel_list(self, datasets, **kwargs):
channels.append(ch + 1)
return channels
+ def _reorder_channels(self, datasets, **kwargs):
+ channels = []
+ for cn in self.channel_order[kwargs['sensor']]:
+ for ch, ds in enumerate(datasets):
+ if isinstance(ds.attrs['prerequisites'][ch], (DataQuery, DataID)):
+ if ds.attrs['prerequisites'][ch]['name'] == cn:
+ channels.append(
+ ds.attrs['prerequisites'][ch]['name'])
+ break
+ else:
+ if ds.attrs['prerequisites'][ch] == cn:
+ channels.append(
+ ds.attrs['prerequisites'][ch])
+ break
+ return channels
+
def _channel_names(self, channels, cns, **kwargs):
_image_description = ""
for ch in channels:
@@ -243,6 +235,50 @@ def _add_proj4_string(self, datasets, first_dataset):
x_0 = 0
y_0 = 0
# FUTURE: Use pyproj 2.0+ to convert EPSG to PROJ4 if possible
+ proj4_string, x_0 = self._convert_epsg_to_proj(proj4_string, x_0)
+
+ if 'geos' in proj4_string:
+ proj4_string = proj4_string.replace("+sweep=x ", "")
+ if '+a=6378137.0 +b=6356752.31414' in proj4_string:
+ proj4_string = proj4_string.replace("+a=6378137.0 +b=6356752.31414",
+ "+ellps=WGS84")
+ if '+units=m' in proj4_string:
+ proj4_string = proj4_string.replace("+units=m", "+units=km")
+
+ if not any(datum in proj4_string for datum in ['datum', 'towgs84']):
+ proj4_string += ' +towgs84=0,0,0'
+
+ if 'units' not in proj4_string:
+ proj4_string += ' +units=km'
+
+ proj4_string = self._append_projection_center(proj4_string, datasets, first_dataset, x_0, y_0)
+ LOG.debug("proj4_string: %s", proj4_string)
+ proj4_string += '\n'
+
+ return proj4_string
+
+ def _append_projection_center(self, proj4_string, datasets, first_dataset, x_0, y_0):
+ if isinstance(datasets, list):
+ dataset = first_dataset
+ else:
+ dataset = datasets
+ if 'x_0' not in proj4_string:
+ proj4_string += ' +x_0=%.6f' % (
+ (-dataset.attrs['area'].area_extent[0] +
+ dataset.attrs['area'].pixel_size_x) + x_0)
+ proj4_string += ' +y_0=%.6f' % (
+ (-dataset.attrs['area'].area_extent[1] +
+ dataset.attrs['area'].pixel_size_y) + y_0)
+ elif '+x_0=0' in proj4_string and '+y_0=0' in proj4_string:
+ proj4_string = proj4_string.replace("+x_0=0", '+x_0=%.6f' % (
+ (-dataset.attrs['area'].area_extent[0] +
+ dataset.attrs['area'].pixel_size_x) + x_0))
+ proj4_string = proj4_string.replace("+y_0=0", '+y_0=%.6f' % (
+ (-dataset.attrs['area'].area_extent[1] +
+ dataset.attrs['area'].pixel_size_y) + y_0))
+ return proj4_string
+
+ def _convert_epsg_to_proj(self, proj4_string, x_0):
if 'EPSG:32631' in proj4_string:
proj4_string = proj4_string.replace("+init=EPSG:32631",
"+proj=etmerc +lat_0=0 +lon_0=3 +k=0.9996 +ellps=WGS84 +datum=WGS84")
@@ -265,53 +301,7 @@ def _add_proj4_string(self, datasets, first_dataset):
x_0 = 500000
elif 'EPSG' in proj4_string:
LOG.warning("EPSG used in proj string but not converted. Please add this in code")
-
- if 'geos' in proj4_string:
- proj4_string = proj4_string.replace("+sweep=x ", "")
- if '+a=6378137.0 +b=6356752.31414' in proj4_string:
- proj4_string = proj4_string.replace("+a=6378137.0 +b=6356752.31414",
- "+ellps=WGS84")
- if '+units=m' in proj4_string:
- proj4_string = proj4_string.replace("+units=m", "+units=km")
-
- if not any(datum in proj4_string for datum in ['datum', 'towgs84']):
- proj4_string += ' +towgs84=0,0,0'
-
- if 'units' not in proj4_string:
- proj4_string += ' +units=km'
-
- if 'x_0' not in proj4_string and isinstance(datasets, list):
- proj4_string += ' +x_0=%.6f' % (
- (-first_dataset.attrs['area'].area_extent[0] +
- first_dataset.attrs['area'].pixel_size_x) + x_0)
- proj4_string += ' +y_0=%.6f' % (
- (-first_dataset.attrs['area'].area_extent[1] +
- first_dataset.attrs['area'].pixel_size_y) + y_0)
- elif 'x_0' not in proj4_string:
- proj4_string += ' +x_0=%.6f' % (
- (-datasets.attrs['area'].area_extent[0] +
- datasets.attrs['area'].pixel_size_x) + x_0)
- proj4_string += ' +y_0=%.6f' % (
- (-datasets.attrs['area'].area_extent[1] +
- datasets.attrs['area'].pixel_size_y) + y_0)
- elif '+x_0=0' in proj4_string and '+y_0=0' in proj4_string and isinstance(datasets, list):
- proj4_string = proj4_string.replace("+x_0=0", '+x_0=%.6f' % (
- (-first_dataset.attrs['area'].area_extent[0] +
- first_dataset.attrs['area'].pixel_size_x) + x_0))
- proj4_string = proj4_string.replace("+y_0=0", '+y_0=%.6f' % (
- (-first_dataset.attrs['area'].area_extent[1] +
- first_dataset.attrs['area'].pixel_size_y) + y_0))
- elif '+x_0=0' in proj4_string and '+y_0=0' in proj4_string:
- proj4_string = proj4_string.replace("+x_0=0", '+x_0=%.6f' % (
- (-datasets.attrs['area'].area_extent[0] +
- datasets.attrs['area'].pixel_size_x) + x_0))
- proj4_string = proj4_string.replace("+y_0=0", '+y_0=%.6f' % (
- (-datasets.attrs['area'].area_extent[1] +
- datasets.attrs['area'].pixel_size_y) + y_0))
- LOG.debug("proj4_string: %s", proj4_string)
- proj4_string += '\n'
-
- return proj4_string
+ return proj4_string, x_0
def _add_pixel_sizes(self, datasets, first_dataset):
_image_description = ""
@@ -520,15 +510,7 @@ def _make_image_description(self, datasets, **kwargs):
LOG.debug("Datasets is a list of dataset")
first_dataset = datasets[0]
- if 'platform_name' in first_dataset.attrs:
- _platform_name = translate_platform_name.get(
- first_dataset.attrs['platform_name'],
- first_dataset.attrs['platform_name'])
- elif 'platform_name' in kwargs:
- _platform_name = translate_platform_name.get(
- kwargs['platform_name'], kwargs['platform_name'])
- else:
- _platform_name = None
+ _platform_name = self._get_platform_name(first_dataset, translate_platform_name, kwargs)
_image_description = ''
_image_description.encode('utf-8')
@@ -557,16 +539,7 @@ def _make_image_description(self, datasets, **kwargs):
_image_description += ' Channels: '
- if isinstance(datasets, list):
- LOG.debug("len datasets: %s", len(datasets))
- _image_description += str(len(datasets))
- elif 'bands' in datasets.sizes:
- LOG.debug("len datasets: %s", datasets.sizes['bands'])
- _image_description += str(datasets.sizes['bands'])
- elif len(datasets.sizes) == 2:
- LOG.debug("len datasets: 1")
- _image_description += '1'
-
+ _image_description += self._get_dataset_len(datasets)
_image_description += ' In this file: '
channels = self._make_channel_list(datasets, **kwargs)
@@ -608,6 +581,32 @@ def _make_image_description(self, datasets, **kwargs):
return _image_description
+ def _get_dataset_len(self, datasets):
+ if isinstance(datasets, list):
+ LOG.debug("len datasets: %s", len(datasets))
+ dataset_len = str(len(datasets))
+ elif 'bands' in datasets.sizes:
+ LOG.debug("len datasets: %s", datasets.sizes['bands'])
+ dataset_len = str(datasets.sizes['bands'])
+ elif len(datasets.sizes) == 2:
+ LOG.debug("len datasets: 1")
+ dataset_len = '1'
+ else:
+ dataset_len = ""
+ return dataset_len
+
+ def _get_platform_name(self, first_dataset, translate_platform_name, kwargs):
+ if 'platform_name' in first_dataset.attrs:
+ _platform_name = translate_platform_name.get(
+ first_dataset.attrs['platform_name'],
+ first_dataset.attrs['platform_name'])
+ elif 'platform_name' in kwargs:
+ _platform_name = translate_platform_name.get(
+ kwargs['platform_name'], kwargs['platform_name'])
+ else:
+ _platform_name = None
+ return _platform_name
+
def _calibrate_data(self, dataset, calibration, min_val, max_val):
reverse_offset = 0.
reverse_scale = 1.
@@ -692,34 +691,7 @@ def _save_datasets_as_mitiff(self, datasets, image_description,
tif.write_image(data.astype(np.uint8), compression='deflate')
break
elif 'dataset' in datasets.attrs['name']:
- LOG.debug("Saving %s as a dataset.", datasets.attrs['name'])
- if len(datasets.dims) == 2 and (all('bands' not in i for i in datasets.dims)):
- # Special case with only one channel ie. no bands
-
- # Need to possible translate channels names from satpy to mitiff
- # Note the last index is a tuple index.
- cn = cns.get(datasets.attrs['prerequisites'][0]['name'],
- datasets.attrs['prerequisites'][0]['name'])
- data = self._calibrate_data(datasets, datasets.attrs['prerequisites'][0].get('calibration'),
- self.mitiff_config[kwargs['sensor']][cn]['min-val'],
- self.mitiff_config[kwargs['sensor']][cn]['max-val'])
-
- tif.write_image(data.astype(np.uint8), compression='deflate')
- else:
- for _cn_i, _cn in enumerate(self.channel_order[kwargs['sensor']]):
- for band in datasets['bands']:
- if band == _cn:
- chn = datasets.sel(bands=band)
- # Need to possible translate channels names from satpy to mitiff
- # Note the last index is a tuple index.
- cn = cns.get(chn.attrs['prerequisites'][_cn_i]['name'],
- chn.attrs['prerequisites'][_cn_i]['name'])
- data = self._calibrate_data(chn, chn.attrs['prerequisites'][_cn_i].get('calibration'),
- self.mitiff_config[kwargs['sensor']][cn]['min-val'],
- self.mitiff_config[kwargs['sensor']][cn]['max-val'])
-
- tif.write_image(data.astype(np.uint8), compression='deflate')
- break
+ self._save_single_dataset(datasets, cns, tif, kwargs)
elif self.palette:
LOG.debug("Saving dataset as palette.")
self._save_as_palette(tif, datasets, **kwargs)
@@ -727,3 +699,33 @@ def _save_datasets_as_mitiff(self, datasets, image_description,
LOG.debug("Saving datasets as enhanced image")
self._save_as_enhanced(tif, datasets, **kwargs)
del tif
+
+ def _save_single_dataset(self, datasets, cns, tif, kwargs):
+ LOG.debug("Saving %s as a dataset.", datasets.attrs['name'])
+ if len(datasets.dims) == 2 and (all('bands' not in i for i in datasets.dims)):
+ # Special case with only one channel ie. no bands
+
+ # Need to possible translate channels names from satpy to mitiff
+ # Note the last index is a tuple index.
+ cn = cns.get(datasets.attrs['prerequisites'][0]['name'],
+ datasets.attrs['prerequisites'][0]['name'])
+ data = self._calibrate_data(datasets, datasets.attrs['prerequisites'][0].get('calibration'),
+ self.mitiff_config[kwargs['sensor']][cn]['min-val'],
+ self.mitiff_config[kwargs['sensor']][cn]['max-val'])
+
+ tif.write_image(data.astype(np.uint8), compression='deflate')
+ else:
+ for _cn_i, _cn in enumerate(self.channel_order[kwargs['sensor']]):
+ for band in datasets['bands']:
+ if band == _cn:
+ chn = datasets.sel(bands=band)
+ # Need to possible translate channels names from satpy to mitiff
+ # Note the last index is a tuple index.
+ cn = cns.get(chn.attrs['prerequisites'][_cn_i]['name'],
+ chn.attrs['prerequisites'][_cn_i]['name'])
+ data = self._calibrate_data(chn, chn.attrs['prerequisites'][_cn_i].get('calibration'),
+ self.mitiff_config[kwargs['sensor']][cn]['min-val'],
+ self.mitiff_config[kwargs['sensor']][cn]['max-val'])
+
+ tif.write_image(data.astype(np.uint8), compression='deflate')
+ break
diff --git a/satpy/writers/ninjogeotiff.py b/satpy/writers/ninjogeotiff.py
new file mode 100644
index 0000000000..f1b663ee93
--- /dev/null
+++ b/satpy/writers/ninjogeotiff.py
@@ -0,0 +1,505 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+# Copyright (c) 2021 Satpy developers
+#
+# This file is part of satpy.
+#
+# satpy is free software: you can redistribute it and/or modify it under the
+# terms of the GNU General Public License as published by the Free Software
+# Foundation, either version 3 of the License, or (at your option) any later
+# version.
+#
+# satpy is distributed in the hope that it will be useful, but WITHOUT ANY
+# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
+# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along with
+# satpy. If not, see .
+"""Writer for GeoTIFF images with tags for the NinJo visualization tool.
+
+The next version of NinJo (release expected spring 2022) will be able
+to read standard GeoTIFF images, with required metadata encoded as a set
+of XML tags in the GDALMetadata TIFF tag. Each of the XML tags must be
+prepended with ``'NINJO_'``. For NinJo delivery, these GeoTIFF files
+supersede the old NinJoTIFF format. The :class:`NinJoGeoTIFFWriter`
+therefore supersedes the old Satpy NinJoTIFF writer and the pyninjotiff
+package.
+
+The reference documentation for valid NinJo tags and their meaning is
+contained in `NinJoPedia`_. Since this page is not in the public web,
+there is a (possibly outdated) `mirror`_.
+
+.. _NinJoPedia: https://ninjopedia.com/tiki-index.php?page=adm_SatelliteServer_SatelliteImportFormats_en
+.. _mirror: https://www.ssec.wisc.edu/~davidh/polar2grid/misc/NinJo_Satellite_Import_Formats.html
+
+There are some user-facing differences between the old NinJoTIFF writer
+and the new NinJoGeoTIFF writer. Most notably, keyword arguments that
+correspond to tags directly passed by the user are now identical,
+including case, to how they will be written to the GDALMetaData and
+interpreted by NinJo. That means some keyword arguments have changed,
+such as summarised in this table:
+
+.. list-table:: Migrating to NinJoGeoTIFF, keyword arguments for the writer
+ :header-rows: 1
+
+ * - ninjotiff (old)
+ - ninjogeotiff (new)
+ - Notes
+ * - ``chan_id``
+ - ``ChannelID``
+ - mandatory
+ * - ``data_cat``
+ - ``DataType``
+ - mandatory
+ * - ``physic_unit``
+ - ``PhysicUnit``
+ - mandatory
+ * - ``physic_val``
+ - ``PhysicValue``
+ - mandatory
+ * - ``sat_id``
+ - ``SatelliteNameID``
+ - mandatory
+ * - ``data_source``
+ - ``DataSource``
+ - optional
+
+Moreover, two keyword arguments are no longer supported because
+their functionality has become redundant. This applies to
+``ch_min_measurement_unit`` and ``ch_max_measurement_unit``.
+Instead, pass those values in source units to the
+:func:`~satpy.enhancements.stretch` enhancement with the ``min_stretch``
+and ``max_stretch`` arguments.
+"""
+
+import copy
+import datetime
+import logging
+
+import numpy as np
+
+from .geotiff import GeoTIFFWriter
+
+logger = logging.getLogger(__name__)
+
+
+class NinJoGeoTIFFWriter(GeoTIFFWriter):
+ """Writer for GeoTIFFs with NinJo tags.
+
+ This writer is experimental. API may be subject to change.
+
+ For information, see module docstring and documentation for
+ :meth:`~NinJoGeoTIFFWriter.save_image`.
+ """
+
+ def save_image(
+ self, image, filename=None, fill_value=None,
+ compute=True, keep_palette=False, cmap=None, overviews=None,
+ overviews_minsize=256, overviews_resampling=None,
+ tags=None, config_files=None,
+ *, ChannelID, DataType, PhysicUnit, PhysicValue,
+ SatelliteNameID, **kwargs):
+ """Save image along with NinJo tags.
+
+ Save image along with NinJo tags. Interface as for GeoTIFF,
+ except NinJo expects some additional tags. Those tags will be
+ prepended with ``ninjo_`` and added as GDALMetaData.
+
+ Writing such images requires trollimage 1.16 or newer.
+
+ Importing such images with NinJo requires NinJo 7 or newer.
+
+ Args:
+ image (:class:`~trollimage.xrimage.XRImage`):
+ Image to save.
+ filename (str): Where to save the file.
+ fill_value (int): Which pixel value is fill value?
+ compute (bool): To compute or not to compute, that is the question.
+ keep_palette (bool):
+ As for parent GeoTIFF :meth:`~satpy.writers.geotiff.GeoTIFFWriter.save_image`.
+ cmap (:class:`trollimage.colormap.Colormap`):
+ As for parent :meth:`~satpy.writers.geotiff.GeoTIFFWriter.save_image`.
+ overviews (list):
+ As for :meth:`~satpy.writers.geotiff.GeoTIFFWriter.save_image`.
+ overviews_minsize (int):
+ As for :meth:`~satpy.writers.geotiff.GeoTIFFWriter.save_image`.
+ overviews_resampling (str):
+ As for :meth:`~satpy.writers.geotiff.GeoTIFFWriter.save_image`.
+ tags (dict): Extra (not NinJo) tags to add to GDAL MetaData
+ config_files (Any): Not directly used by this writer, supported
+ for compatibility with other writers.
+
+ Remaining keyword arguments are either passed as GDAL options,
+ if contained in ``self.GDAL_OPTIONS``, or they are passed
+ to :class:`NinJoTagGenerator`, which will include them as
+ NinJo tags in GDALMetadata. Supported tags are defined in
+ ``NinJoTagGenerator.optional_tags``. The meaning of those (and
+ other) tags are defined in the NinJo documentation (see module
+ documentation for a link to NinJoPedia). The following tags
+ are mandatory and must be provided as keyword arguments:
+
+ ChannelID (int)
+ NinJo Channel ID
+ DataType (int)
+ NinJo Data Type
+ SatelliteNameID (int)
+ NinJo Satellite ID
+ PhysicUnit (str)
+ NinJo label for unit (example: "C"). If PhysicValue is set to
+ "Temperature", PhysicUnit is set to "C", but data attributes
+ incidate the data have unit "K", then the writer will adapt the
+ header ``ninjo_AxisIntercept`` such that data are interpreted
+ in units of "C".
+ PhysicValue (str)
+ NinJo label for quantity (example: "temperature")
+
+ """
+ dataset = image.data
+
+ # filename not passed on to writer by Scene.save_dataset, but I need
+ # it!
+ filename = filename or self.get_filename(**dataset.attrs)
+
+ gdal_opts = {}
+ ntg_opts = {}
+ for (k, v) in kwargs.items():
+ if k in self.GDAL_OPTIONS:
+ gdal_opts[k] = v
+ else:
+ ntg_opts[k] = v
+
+ ntg = NinJoTagGenerator(
+ image,
+ fill_value=fill_value,
+ filename=filename,
+ ChannelID=ChannelID,
+ DataType=DataType,
+ PhysicUnit=PhysicUnit,
+ PhysicValue=PhysicValue,
+ SatelliteNameID=SatelliteNameID,
+ **ntg_opts)
+ ninjo_tags = {f"ninjo_{k:s}": v for (k, v) in ntg.get_all_tags().items()}
+ image = self._fix_units(image, PhysicValue, PhysicUnit)
+
+ return super().save_image(
+ image,
+ filename=filename,
+ fill_value=fill_value,
+ compute=compute,
+ keep_palette=keep_palette,
+ cmap=cmap,
+ overviews=overviews,
+ overviews_minsize=overviews_minsize,
+ overviews_resampling=overviews_resampling,
+ tags={**(tags or {}), **ninjo_tags},
+ scale_offset_tags=None if image.mode.startswith("RGB") else ("ninjo_Gradient", "ninjo_AxisIntercept"),
+ **gdal_opts)
+
+ def _fix_units(self, image, quantity, unit):
+ """Adapt units between °C and K.
+
+ This will return a new XRImage, to make sure the old data and
+ enhancement history aren't touched.
+ """
+ data_units = image.data.attrs.get("units")
+ if (quantity.lower() == "temperature" and
+ unit == "C" and
+ data_units == "K"):
+ logger.debug("Adding offset for K → °C conversion")
+ new_attrs = copy.deepcopy(image.data.attrs)
+ im2 = type(image)(image.data.copy())
+ im2.data.attrs = new_attrs
+ # this scale/offset has to be applied before anything else
+ im2.data.attrs["enhancement_history"].insert(0, {"scale": 1, "offset": 273.15})
+ return im2
+ if unit != data_units and unit.lower() != "n/a":
+ logger.warning(
+ f"Writing {unit!s} to ninjogeotiff headers, but "
+ f"data attributes have unit {data_units!s}. "
+ "No conversion applied.")
+
+ return image
+
+
+class NinJoTagGenerator:
+ """Class to collect NinJo tags.
+
+ This class is used by :class:`NinJoGeoTIFFWriter` to collect NinJo tags.
+ Most end-users will not need to create instances of this class directly.
+
+ Tags are gathered from three sources:
+
+ - Fixed tags, contained in the attribute ``fixed_tags``. The value of
+ those tags is hardcoded and never changes.
+ - Tags passed by the user, contained in the attribute ``passed_tags``.
+ Those tags must be passed by the user as arguments to the writer, which
+ will pass them on when instantiating this class.
+ - Tags calculated from data and metadata. Those tags are defined in the
+ attribute ``dynamic_tags``. They are either calculated from image data,
+ from image metadata, or from arguments passed by the user to the writer.
+
+ Some tags are mandatory (defined in ``mandatory_tags``). All tags that are
+ not mandatory are optional. By default, optional tags are generated if and
+ only if the required information is available.
+ """
+
+ # tags that never change
+ fixed_tags = {
+ "Magic": "NINJO",
+ "HeaderVersion": 2,
+ "XMinimum": 1,
+ "YMinimum": 1}
+
+ # tags that must be passed directly by the user
+ passed_tags = {"ChannelID", "DataType", "PhysicUnit",
+ "SatelliteNameID", "PhysicValue"}
+
+ # tags that can be calculated dynamically from (meta)data
+ dynamic_tags = {
+ "CentralMeridian": "central_meridian",
+ "ColorDepth": "color_depth",
+ "CreationDateID": "creation_date_id",
+ "DateID": "date_id",
+ "EarthRadiusLarge": "earth_radius_large",
+ "EarthRadiusSmall": "earth_radius_small",
+ "FileName": "filename",
+ "MaxGrayValue": "max_gray_value",
+ "MinGrayValue": "min_gray_value",
+ "Projection": "projection",
+ "ReferenceLatitude1": "ref_lat_1",
+ "TransparentPixel": "transparent_pixel",
+ "XMaximum": "xmaximum",
+ "YMaximum": "ymaximum"
+ }
+
+ # mandatory tags according to documentation
+ mandatory_tags = {"SatelliteNameID", "DateID", "CreationDateID",
+ "ChannelID", "HeaderVersion", "DataType",
+ "SatelliteNumber", "ColorDepth", "XMinimum", "XMaximum",
+ "YMinimum", "YMaximum", "Projection", "PhysicValue",
+ "PhysicUnit", "MinGrayValue", "MaxGrayValue", "Gradient",
+ "AxisIntercept", "TransparentPixel"}
+
+ # optional tags are added on best effort or if passed by user
+ optional_tags = {"DataSource", "MeridianWest", "MeridianEast",
+ "EarthRadiusLarge", "EarthRadiusSmall", "GeodeticDate",
+ "ReferenceLatitude1", "ReferenceLatitude2",
+ "CentralMeridian", "ColorTable", "Description",
+ "OverflightDirection", "GeoLatitude", "GeoLongitude",
+ "Altitude", "AOSAzimuth", "LOSAzimuth", "MaxElevation",
+ "OverFlightTime", "IsBlackLinesCorrection",
+ "IsAtmosphereCorrected", "IsCalibrated", "IsNormalized",
+ "OriginalHeader", "IsValueTableAvailable",
+ "ValueTableFloatField"}
+
+ # tags that are added later in other ways
+ postponed_tags = {"AxisIntercept", "Gradient"}
+
+ def __init__(self, image, fill_value, filename, **kwargs):
+ """Initialise tag generator.
+
+ Args:
+ image (:class:`trollimage.xrimage.XRImage`): XRImage for which
+ NinJo tags should be calculated.
+ fill_value (int): Fill value corresponding to image.
+ filename (str): Filename to be written.
+ **kwargs: Any additional tags to be included as-is.
+ """
+ self.image = image
+ self.dataset = image.data
+ self.fill_value = fill_value
+ self.filename = filename
+ self.args = kwargs
+ self.tag_names = (self.fixed_tags.keys() |
+ self.passed_tags |
+ self.dynamic_tags.keys() |
+ (self.args.keys() & self.optional_tags))
+ if self.args.keys() - self.tag_names:
+ raise ValueError("The following tags were not recognised: " +
+ " ".join(self.args.keys() - self.tag_names))
+
+ def get_all_tags(self):
+ """Get a dictionary with all tags for NinJo."""
+ tags = {}
+ for tag in self.tag_names:
+ try:
+ tags[tag] = self.get_tag(tag)
+ except (AttributeError, KeyError) as e:
+ if tag in self.mandatory_tags:
+ raise
+ logger.debug(
+ f"Unable to obtain value for optional NinJo tag {tag:s}. "
+ f"This is probably expected. The reason is: {e.args[0]}")
+ return tags
+
+ def get_tag(self, tag):
+ """Get value for NinJo tag."""
+ if tag in self.fixed_tags:
+ return self.fixed_tags[tag]
+ if tag in self.passed_tags:
+ return self.args[tag]
+ if tag in self.dynamic_tags:
+ return getattr(self, f"get_{self.dynamic_tags[tag]:s}")()
+ if tag in self.optional_tags and tag in self.args:
+ return self.args[tag]
+ if tag in self.postponed_tags:
+ raise ValueError(f"Tag {tag!s} is added later by the GeoTIFF writer.")
+ if tag in self.optional_tags:
+ raise ValueError(
+ f"Optional tag {tag!s} must be supplied by user if user wants to "
+ "request the value, but wasn't.")
+ raise ValueError(f"Unknown tag: {tag!s}")
+
+ def get_central_meridian(self):
+ """Calculate central meridian."""
+ pams = self.dataset.attrs["area"].crs.coordinate_operation.params
+ lon_0 = {p.name: p.value for p in pams}["Longitude of natural origin"]
+ return lon_0
+
+ def get_color_depth(self):
+ """Return the color depth."""
+ if self.image.mode in ("L", "P"):
+ return 8
+ if self.image.mode in ("LA", "PA"):
+ return 16
+ if self.image.mode == "RGB":
+ return 24
+ if self.image.mode == "RGBA":
+ return 32
+ raise ValueError(
+ f"Unsupported image mode: {self.image.mode:s}")
+
+ # Set unix epoch here explicitly, because datetime.timestamp() is
+ # apparently not supported on Windows.
+ _epoch = datetime.datetime(1970, 1, 1, tzinfo=datetime.timezone.utc)
+
+ def get_creation_date_id(self):
+ """Calculate the creation date ID.
+
+ That's seconds since UNIX Epoch for the time the image is created.
+ """
+ delta = datetime.datetime.now(tz=datetime.timezone.utc) - self._epoch
+ return int(delta.total_seconds())
+
+ def get_date_id(self):
+ """Calculate the date ID.
+
+ That's seconds since UNIX Epoch for the time corresponding to the
+ satellite image start of measurement time.
+ """
+ tm = self.dataset.attrs["start_time"]
+ delta = tm.replace(tzinfo=datetime.timezone.utc) - self._epoch
+ return int(delta.total_seconds())
+
+ def get_earth_radius_large(self):
+ """Return the Earth semi-major axis in metre."""
+ return self.dataset.attrs["area"].crs.ellipsoid.semi_major_metre
+
+ def get_earth_radius_small(self):
+ """Return the Earth semi-minor axis in metre."""
+ return self.dataset.attrs["area"].crs.ellipsoid.semi_minor_metre
+
+ def get_filename(self):
+ """Return the filename."""
+ return self.filename
+
+ def get_min_gray_value(self):
+ """Calculate minimum gray value."""
+ return self.image._scale_to_dtype(
+ self.dataset.min(),
+ np.uint8,
+ self.fill_value).astype(np.uint8)
+
+ def get_max_gray_value(self):
+ """Calculate maximum gray value."""
+ return self.image._scale_to_dtype(
+ self.dataset.max(),
+ np.uint8,
+ self.fill_value).astype(np.uint8)
+
+ def get_projection(self):
+ """Get NinJo projection string.
+
+ From the documentation, valid values are:
+
+ - NPOL/SPOL: polar-sterographic North/South
+ - PLAT: „Plate Carrée“, equirectangular projection
+ - MERC: Mercator projection
+
+ Derived from AreaDefinition.
+ """
+ if self.dataset.attrs["area"].crs.coordinate_system.name == "ellipsoidal":
+ # For lat/lon coordinates, we say it's PLAT
+ return "PLAT"
+ name = self.dataset.attrs["area"].crs.coordinate_operation.method_name
+ if "Equidistant Cylindrical" in name:
+ return "PLAT"
+ if "Mercator" in name:
+ return "MERC"
+ if "Stereographic" in name:
+ if self.get_ref_lat_1() >= 0:
+ return "NPOL"
+ return "SPOL"
+ raise ValueError(
+ "Unknown mapping from area "
+ f"'{self.dataset.attrs['area'].description}' with CRS coordinate "
+ f"operation name {name:s} to NinJo projection. NinJo understands only "
+ "equidistant cylindrical, mercator, or stereographic projections.")
+
+ def get_ref_lat_1(self):
+ """Get reference latitude one.
+
+ Derived from area definition.
+ """
+ pams = {p.name: p.value for p in self.dataset.attrs["area"].crs.coordinate_operation.params}
+ for label in ["Latitude of standard parallel",
+ "Latitude of natural origin",
+ "Latitude of 1st standard parallel"]:
+ if label in pams:
+ return pams[label]
+ raise ValueError(
+ "Could not find reference latitude for area "
+ f"{self.dataset.attrs['area'].description}")
+
+ def get_transparent_pixel(self):
+ """Get the transparent pixel value, also known as the fill value.
+
+ When the no fill value is defined (value `None`), such as for RGBA or
+ LA images, returns -1, in accordance with the file format
+ specification.
+ """
+ if self.fill_value is None:
+ return -1
+ return self.fill_value
+
+ def get_xmaximum(self):
+ """Get the maximum value of x, i.e. the meridional extent of the image in pixels."""
+ return self.dataset.sizes["x"]
+
+ def get_ymaximum(self):
+ """Get the maximum value of y, i.e. the zonal extent of the image in pixels."""
+ return self.dataset.sizes["y"]
+
+ def get_meridian_east(self):
+ """Get the easternmost longitude of the area.
+
+ Currently not implemented. In pyninjotiff it was implemented but the
+ answer was incorrect.
+ """
+ raise NotImplementedError()
+
+ def get_meridian_west(self):
+ """Get the westernmost longitude of the area.
+
+ Currently not implemented. In pyninjotiff it was implemented but the
+ answer was incorrect.
+ """
+ raise NotImplementedError()
+
+ def get_ref_lat_2(self):
+ """Get reference latitude two.
+
+ This is not implemented and never was correctly implemented in
+ pyninjotiff either. It doesn't appear to be used by NinJo.
+ """
+ raise NotImplementedError("Second reference latitude not implemented.")
diff --git a/satpy/writers/ninjotiff.py b/satpy/writers/ninjotiff.py
index 54756d8340..557b2749d0 100644
--- a/satpy/writers/ninjotiff.py
+++ b/satpy/writers/ninjotiff.py
@@ -80,12 +80,11 @@
import logging
import numpy as np
-import xarray as xr
-
import pyninjotiff.ninjotiff as nt
-from satpy.writers import ImageWriter
+import xarray as xr
from trollimage.xrimage import invert_scale_offset
+from satpy.writers import ImageWriter
logger = logging.getLogger(__name__)
@@ -117,8 +116,9 @@ def convert_units(dataset, in_unit, out_unit):
return dataset
if in_unit.lower() in {"k", "kelvin"} and out_unit.lower() in {"c", "celsius"}:
+ logger.debug("Converting temperature units from K to °C")
with xr.set_options(keep_attrs=True):
- new_dataset = dataset + 273.15
+ new_dataset = dataset - 273.15
new_dataset.attrs["units"] = out_unit
return new_dataset
@@ -173,15 +173,19 @@ def save_image(self, img, filename=None, compute=True, **kwargs): # floating_po
raise NotImplementedError(
"Don't know how to handle non-scale/offset-based enhancements yet."
)
+ if img.mode.startswith("P"):
+ img.data = img.data.astype(np.uint8)
return nt.save(img, filename, data_is_scaled_01=True, compute=compute, **kwargs)
def save_dataset(
- self, dataset, filename=None, fill_value=None, compute=True, **kwargs
+ self, dataset, filename=None, fill_value=None, compute=True,
+ convert_temperature_units=True, **kwargs
):
"""Save a dataset to ninjotiff format.
This calls `save_image` in turn, but first preforms some unit conversion
- if necessary.
+ if necessary and desired. Unit conversion can be suppressed by passing
+ ``convert_temperature_units=False``.
"""
nunits = kwargs.get("physic_unit", None)
if nunits is None:
@@ -200,7 +204,10 @@ def save_dataset(
"Saving to physical ninjo file without units defined in dataset!"
)
else:
- dataset = convert_units(dataset, units, nunits)
+ if convert_temperature_units:
+ dataset = convert_units(dataset, units, nunits)
+ else:
+ logger.debug("Omitting unit conversion")
return super(NinjoTIFFWriter, self).save_dataset(
dataset, filename=filename, compute=compute, fill_value=fill_value, **kwargs
)
diff --git a/setup.cfg b/setup.cfg
index f56be011f6..e2ef375dc0 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -18,6 +18,10 @@ exclude =
satpy/readers/scatsat1_l2b.py
satpy/version.py
satpy/tests/features
+per-file-ignores =
+ satpy/tests/*/conftest.py:F401
+ doc/source/doi_role.py:D103
+ satpy/tests/features/steps/*.py:F811
[coverage:run]
relative_files = True
diff --git a/setup.py b/setup.py
index df5b783e38..cdddac0058 100644
--- a/setup.py
+++ b/setup.py
@@ -30,13 +30,15 @@
except ImportError:
pass
-requires = ['numpy >=1.13', 'pillow', 'pyresample >=1.11.0', 'trollsift',
- 'trollimage >1.10.1', 'pykdtree', 'pyyaml', 'xarray >=0.10.1, !=0.13.0',
+requires = ['numpy >=1.13', 'pillow', 'pyresample >=1.24.0', 'trollsift',
+ 'trollimage >1.10.1', 'pykdtree', 'pyyaml >=5.1', 'xarray >=0.10.1, !=0.13.0',
'dask[array] >=0.17.1', 'pyproj>=2.2', 'zarr', 'donfig', 'appdirs',
- 'pooch']
+ 'pooch', 'pyorbital']
-test_requires = ['behave', 'h5py', 'netCDF4', 'pyhdf', 'imageio', 'libtiff',
- 'rasterio', 'geoviews', 'trollimage', 'fsspec']
+test_requires = ['behave', 'h5py', 'netCDF4', 'pyhdf', 'imageio', 'pylibtiff',
+ 'rasterio', 'geoviews', 'trollimage', 'fsspec', 'bottleneck',
+ 'rioxarray', 'pytest', 'pytest-lazy-fixture', 'defusedxml',
+ 's3fs']
extras_require = {
# Readers:
@@ -52,18 +54,22 @@
'amsr2_l1b': ['h5py >= 2.7.0'],
'hrpt': ['pyorbital >= 1.3.1', 'pygac', 'python-geotiepoints >= 1.1.7'],
'hrit_msg': ['pytroll-schedule'],
- 'msi_safe': ['glymur'],
+ 'msi_safe': ['rioxarray', "bottleneck", "python-geotiepoints"],
'nc_nwcsaf_msg': ['netCDF4 >= 1.1.8'],
- 'sar_c': ['python-geotiepoints >= 1.1.7', 'rasterio', 'rioxarray'],
+ 'sar_c': ['python-geotiepoints >= 1.1.7', 'rasterio', 'rioxarray', 'defusedxml'],
'abi_l1b': ['h5netcdf'],
+ 'seviri_l1b_hrit': ['pyorbital >= 1.3.1'],
+ 'seviri_l1b_native': ['pyorbital >= 1.3.1'],
+ 'seviri_l1b_nc': ['pyorbital >= 1.3.1', 'netCDF4 >= 1.1.8'],
'seviri_l2_bufr': ['eccodes-python'],
'seviri_l2_grib': ['eccodes-python'],
'hsaf_grib': ['pygrib'],
+ 'remote_reading': ['fsspec'],
# Writers:
'cf': ['h5netcdf >= 0.7.3'],
'awips_tiled': ['netCDF4 >= 1.1.8'],
'geotiff': ['rasterio', 'trollimage[geotiff]'],
- 'mitiff': ['libtiff'],
+ 'mitiff': ['pylibtiff'],
'ninjo': ['pyninjotiff', 'pint'],
# Composites/Modifiers:
'rayleigh': ['pyspectral >= 0.10.1'],
@@ -75,6 +81,8 @@
# Other
'geoviews': ['geoviews'],
'overlays': ['pycoast', 'pydecorate'],
+ 'satpos_from_tle': ['skyfield', 'astropy'],
+ 'tests': test_requires,
}
all_extras = []
for extra_deps in extras_require.values():
@@ -147,11 +155,8 @@ def _config_data_files(base_dirs, extensions=(".cfg", )):
'tests/etc/writers/*.yaml',
]},
zip_safe=False,
- use_scm_version={'write_to': 'satpy/version.py'},
- setup_requires=['setuptools_scm', 'setuptools_scm_git_archive'],
install_requires=requires,
- tests_require=test_requires,
- python_requires='>=3.7',
+ python_requires='>=3.8',
extras_require=extras_require,
entry_points=entry_points,
)
diff --git a/utils/convert_to_ninjotiff.py b/utils/convert_to_ninjotiff.py
index 8fb4699af0..e457ee35e3 100644
--- a/utils/convert_to_ninjotiff.py
+++ b/utils/convert_to_ninjotiff.py
@@ -15,9 +15,7 @@
#
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
-"""
-First version of a simple command line too that converts an
-image into a NinJo Tiff file.
+"""Simple command line too that converts an image into a NinJo Tiff file.
NinJo Tiff metadata can be passed as command line input or
through a config file (an example is given in the ninjo-cmd.yaml
@@ -28,19 +26,15 @@
"""
+import argparse
import os
-from satpy.utils import debug_on
-
-from satpy import Scene
-from mpop.projector import get_area_def
-import argparse
import yaml
-try:
- from yaml import UnsafeLoader
-except ImportError:
- from yaml import Loader as UnsafeLoader
+from yaml import UnsafeLoader
+from satpy import Scene
+from satpy.pyresample import get_area_def
+from satpy.utils import debug_on
debug_on()
@@ -69,7 +63,7 @@
cfg = yaml.load(ymlfile, Loader=UnsafeLoader)
narea = get_area_def(args.areadef)
-global_data = Scene(sensor="images", reader="generic_image", area=narea)
+global_data = Scene(reader="generic_image")
global_data.load(['image'])
global_data['image'].info['area'] = narea
diff --git a/utils/fetch_avhrr_calcoeffs.py b/utils/fetch_avhrr_calcoeffs.py
index b46a693c4b..7bc49ba8db 100644
--- a/utils/fetch_avhrr_calcoeffs.py
+++ b/utils/fetch_avhrr_calcoeffs.py
@@ -16,12 +16,13 @@
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see .
"""Fetch avhrr calibration coefficients."""
-import urllib2
-import h5py
import datetime as dt
import os.path
import sys
+import h5py
+import urllib2
+
BASE_URL = "http://www.star.nesdis.noaa.gov/smcd/spb/fwu/homepage/" + \
"AVHRR/Op_Cal_AVHRR/"