Skip to content

Commit

Permalink
Merge remote-tracking branch 'pytroll/main' into main
Browse files Browse the repository at this point in the history
  • Loading branch information
Dario Stelitano committed Dec 14, 2023
2 parents fb8ff3b + 4076e99 commit 91f96af
Show file tree
Hide file tree
Showing 9 changed files with 58 additions and 204 deletions.
13 changes: 7 additions & 6 deletions satpy/composites/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,7 @@ def drop_coordinates(self, data_arrays):
if coord not in ds.dims and
any([neglible in coord for neglible in NEGLIGIBLE_COORDS])]
if drop:
new_arrays.append(ds.drop(drop))
new_arrays.append(ds.drop_vars(drop))
else:
new_arrays.append(ds)

Expand Down Expand Up @@ -992,17 +992,17 @@ def __call__(self, projectables, *args, **kwargs):
hrv = projectables[2]

try:
ch3 = 3 * hrv - vis06 - vis08
ch3 = 3.0 * hrv - vis06 - vis08
ch3.attrs = hrv.attrs
except ValueError:
raise IncompatibleAreas

ndvi = (vis08 - vis06) / (vis08 + vis06)
ndvi = np.where(ndvi < 0, 0, ndvi)
ndvi = ndvi.where(ndvi >= 0.0, 0.0)

ch1 = ndvi * vis06 + (1 - ndvi) * vis08
ch1 = ndvi * vis06 + (1.0 - ndvi) * vis08
ch1.attrs = vis06.attrs
ch2 = ndvi * vis08 + (1 - ndvi) * vis06
ch2 = ndvi * vis08 + (1.0 - ndvi) * vis06
ch2.attrs = vis08.attrs

res = super(RealisticColors, self).__call__((ch1, ch2, ch3),
Expand Down Expand Up @@ -1180,7 +1180,8 @@ def _combined_sharpened_info(self, info, new_attrs):


def _get_sharpening_ratio(high_res, low_res):
ratio = high_res / low_res
with np.errstate(divide="ignore"):
ratio = high_res / low_res
# make ratio a no-op (multiply by 1) where the ratio is NaN, infinity,
# or it is negative.
ratio[~np.isfinite(ratio) | (ratio < 0)] = 1.0
Expand Down
4 changes: 0 additions & 4 deletions satpy/composites/ahi.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,3 @@
# You should have received a copy of the GNU General Public License along with
# satpy. If not, see <http://www.gnu.org/licenses/>.
"""Composite classes for AHI."""

# The green corrector used to be defined here, but was moved to spectral.py
# in Satpy 0.38 because it also applies to FCI.
from .spectral import GreenCorrector # noqa: F401
21 changes: 0 additions & 21 deletions satpy/composites/spectral.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
"""Composite classes for spectral adjustments."""

import logging
import warnings

from satpy.composites import GenericCompositor
from satpy.dataset import combine_metadata
Expand Down Expand Up @@ -199,23 +198,3 @@ def _compute_blend_fraction(self, ndvi):
+ self.limits[0]

return fraction


class GreenCorrector(SpectralBlender):
"""Previous class used to blend channels for green band corrections.
This method has been refactored to make it more generic. The replacement class is 'SpectralBlender' which computes
a weighted average based on N number of channels and N number of corresponding weights/fractions. A new class
called 'HybridGreen' has been created, which performs a correction of green bands centered at 0.51 microns
following Miller et al. (2016, :doi:`10.1175/BAMS-D-15-00154.2`) in order to improve true color imagery.
"""

def __init__(self, *args, fractions=(0.85, 0.15), **kwargs):
"""Set default keyword argument values."""
warnings.warn(
"'GreenCorrector' is deprecated, use 'SpectralBlender' instead, or 'HybridGreen' for hybrid green"
" correction following Miller et al. (2016).",
UserWarning,
stacklevel=2
)
super().__init__(fractions=fractions, *args, **kwargs)
40 changes: 0 additions & 40 deletions satpy/etc/composites/ahi.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -15,46 +15,6 @@ modifiers:
- solar_zenith_angle

composites:
green:
deprecation_warning: "'green' is a deprecated composite. Use the equivalent 'hybrid_green' instead."
compositor: !!python/name:satpy.composites.spectral.HybridGreen
# FUTURE: Set a wavelength...see what happens. Dependency finding
# probably wouldn't work.
prerequisites:
# should we be using the most corrected or least corrected inputs?
# what happens if something requests more modifiers on top of this?
- wavelength: 0.51
modifiers: [sunz_corrected, rayleigh_corrected]
- wavelength: 0.85
modifiers: [sunz_corrected]
standard_name: toa_bidirectional_reflectance

green_true_color_reproduction:
# JMA True Color Reproduction green band
# http://www.jma.go.jp/jma/jma-eng/satellite/introduction/TCR.html
deprecation_warning: "'green_true_color_reproduction' is a deprecated composite. Use the equivalent 'reproduced_green' instead."
compositor: !!python/name:satpy.composites.spectral.SpectralBlender
fractions: [0.6321, 0.2928, 0.0751]
prerequisites:
- name: B02
modifiers: [sunz_corrected, rayleigh_corrected]
- name: B03
modifiers: [sunz_corrected, rayleigh_corrected]
- name: B04
modifiers: [sunz_corrected]
standard_name: none

green_nocorr:
deprecation_warning: "'green_nocorr' is a deprecated composite. Use the equivalent 'hybrid_green_nocorr' instead."
compositor: !!python/name:satpy.composites.spectral.HybridGreen
# FUTURE: Set a wavelength...see what happens. Dependency finding
# probably wouldn't work.
prerequisites:
# should we be using the most corrected or least corrected inputs?
# what happens if something requests more modifiers on top of this?
- wavelength: 0.51
- wavelength: 0.85
standard_name: toa_reflectance

hybrid_green:
compositor: !!python/name:satpy.composites.spectral.HybridGreen
Expand Down
53 changes: 7 additions & 46 deletions satpy/resample.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@
"bucket_sum", "Sum Bucket Resampling", :class:`~satpy.resample.BucketSum`
"bucket_count", "Count Bucket Resampling", :class:`~satpy.resample.BucketCount`
"bucket_fraction", "Fraction Bucket Resampling", :class:`~satpy.resample.BucketFraction`
"gradient_search", "Gradient Search Resampling", :class:`~pyresample.gradient.GradientSearchResampler`
"gradient_search", "Gradient Search Resampling", :meth:`~pyresample.gradient.create_gradient_search_resampler`
The resampling algorithm used can be specified with the ``resampler`` keyword
argument and defaults to ``nearest``:
Expand Down Expand Up @@ -148,13 +148,11 @@

import dask.array as da
import numpy as np
import pyresample
import xarray as xr
import zarr
from packaging import version
from pyresample.ewa import DaskEWAResampler, LegacyDaskEWAResampler
from pyresample.geometry import SwathDefinition
from pyresample.gradient import GradientSearchResampler
from pyresample.gradient import create_gradient_search_resampler
from pyresample.resampler import BaseResampler as PRBaseResampler

from satpy._config import config_search_paths, get_config_path
Expand All @@ -177,8 +175,6 @@

resamplers_cache: "WeakValueDictionary[tuple, object]" = WeakValueDictionary()

PR_USE_SKIPNA = version.parse(pyresample.__version__) > version.parse("1.17.0")


def hash_dict(the_dict, the_hash=None):
"""Calculate a hash for a dictionary."""
Expand Down Expand Up @@ -773,33 +769,6 @@ def _get_replicated_chunk_sizes(d_arr, repeats):
return tuple(repeated_chunks)


def _get_arg_to_pass_for_skipna_handling(**kwargs):
"""Determine if skipna can be passed to the compute functions for the average and sum bucket resampler."""
# FIXME this can be removed once Pyresample 1.18.0 is a Satpy requirement

if PR_USE_SKIPNA:
if "mask_all_nan" in kwargs:
warnings.warn(
"Argument mask_all_nan is deprecated. Please use skipna for missing values handling. "
"Continuing with default skipna=True, if not provided differently.",
DeprecationWarning,
stacklevel=3
)
kwargs.pop("mask_all_nan")
else:
if "mask_all_nan" in kwargs:
warnings.warn(
"Argument mask_all_nan is deprecated."
"Please update Pyresample and use skipna for missing values handling.",
DeprecationWarning,
stacklevel=3
)
kwargs.setdefault("mask_all_nan", False)
kwargs.pop("skipna")

return kwargs


class BucketResamplerBase(PRBaseResampler):
"""Base class for bucket resampling which implements averaging."""

Expand Down Expand Up @@ -832,11 +801,6 @@ def resample(self, data, **kwargs): # noqa: D417
Returns (xarray.DataArray): Data resampled to the target area
"""
if not PR_USE_SKIPNA and "skipna" in kwargs:
raise ValueError("You are trying to set the skipna argument but you are using an old version of"
" Pyresample that does not support it."
"Please update Pyresample to 1.18.0 or higher to be able to use this argument.")

self.precompute(**kwargs)
attrs = data.attrs.copy()
data_arr = data.data
Expand Down Expand Up @@ -910,17 +874,16 @@ def compute(self, data, fill_value=np.nan, skipna=True, **kwargs): # noqa: D417
Returns:
dask.Array
"""
kwargs = _get_arg_to_pass_for_skipna_handling(skipna=skipna, **kwargs)

results = []
if data.ndim == 3:
for i in range(data.shape[0]):
res = self.resampler.get_average(data[i, :, :],
fill_value=fill_value,
skipna=skipna,
**kwargs)
results.append(res)
else:
res = self.resampler.get_average(data, fill_value=fill_value,
res = self.resampler.get_average(data, fill_value=fill_value, skipna=skipna,
**kwargs)
results.append(res)

Expand Down Expand Up @@ -948,16 +911,14 @@ class BucketSum(BucketResamplerBase):

def compute(self, data, skipna=True, **kwargs):
"""Call the resampling."""
kwargs = _get_arg_to_pass_for_skipna_handling(skipna=skipna, **kwargs)

results = []
if data.ndim == 3:
for i in range(data.shape[0]):
res = self.resampler.get_sum(data[i, :, :],
res = self.resampler.get_sum(data[i, :, :], skipna=skipna,
**kwargs)
results.append(res)
else:
res = self.resampler.get_sum(data, **kwargs)
res = self.resampler.get_sum(data, skipna=skipna, **kwargs)
results.append(res)

return da.stack(results)
Expand Down Expand Up @@ -1009,7 +970,7 @@ def compute(self, data, fill_value=np.nan, categories=None, **kwargs):
"nearest": KDTreeResampler,
"bilinear": BilinearResampler,
"native": NativeResampler,
"gradient_search": GradientSearchResampler,
"gradient_search": create_gradient_search_resampler,
"bucket_avg": BucketAvg,
"bucket_sum": BucketSum,
"bucket_count": BucketCount,
Expand Down
14 changes: 1 addition & 13 deletions satpy/tests/compositor_tests/test_spectral.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
import pytest
import xarray as xr

from satpy.composites.spectral import GreenCorrector, HybridGreen, NDVIHybridGreen, SpectralBlender
from satpy.composites.spectral import HybridGreen, NDVIHybridGreen, SpectralBlender
from satpy.tests.utils import CustomScheduler


Expand Down Expand Up @@ -67,18 +67,6 @@ def test_hybrid_green(self):
data = res.compute()
np.testing.assert_allclose(data, 0.23)

def test_green_corrector(self):
"""Test the deprecated class for green corrections."""
comp = GreenCorrector("blended_channel", fractions=(0.85, 0.15), prerequisites=(0.51, 0.85),
standard_name="toa_bidirectional_reflectance")
res = comp((self.c01, self.c03))
assert isinstance(res, xr.DataArray)
assert isinstance(res.data, da.Array)
assert res.attrs["name"] == "blended_channel"
assert res.attrs["standard_name"] == "toa_bidirectional_reflectance"
data = res.compute()
np.testing.assert_allclose(data, 0.23)


class TestNdviHybridGreenCompositor:
"""Test NDVI-weighted hybrid green correction of green band."""
Expand Down
34 changes: 34 additions & 0 deletions satpy/tests/test_composites.py
Original file line number Diff line number Diff line change
Expand Up @@ -1867,3 +1867,37 @@ def _create_fake_composite_config(yaml_filename: str):
},
comp_file,
)


class TestRealisticColors:
"""Test the SEVIRI Realistic Colors compositor."""

def test_realistic_colors(self):
"""Test the compositor."""
from satpy.composites import RealisticColors

vis06 = xr.DataArray(da.arange(0, 15, dtype=np.float32).reshape(3, 5), dims=("y", "x"),
attrs={"foo": "foo"})
vis08 = xr.DataArray(da.arange(15, 0, -1, dtype=np.float32).reshape(3, 5), dims=("y", "x"),
attrs={"bar": "bar"})
hrv = xr.DataArray(6 * da.ones((3, 5), dtype=np.float32), dims=("y", "x"),
attrs={"baz": "baz"})

expected_red = np.array([[0.0, 2.733333, 4.9333334, 6.6, 7.733333],
[8.333333, 8.400001, 7.9333334, 7.0, 6.0],
[5.0, 4.0, 3.0, 2.0, 1.0]], dtype=np.float32)
expected_green = np.array([
[15.0, 12.266666, 10.066668, 8.400001, 7.2666664],
[6.6666665, 6.6000004, 7.0666666, 8.0, 9.0],
[10.0, 11.0, 12.0, 13.0, 14.0]], dtype=np.float32)

with dask.config.set(scheduler=CustomScheduler(max_computes=1)):
comp = RealisticColors("Ni!")
res = comp((vis06, vis08, hrv))

arr = res.values

assert res.dtype == np.float32
np.testing.assert_allclose(arr[0, :, :], expected_red)
np.testing.assert_allclose(arr[1, :, :], expected_green)
np.testing.assert_allclose(arr[2, :, :], 3.0)
Loading

0 comments on commit 91f96af

Please sign in to comment.