Skip to content

Commit

Permalink
Merge branch 'pytroll:main' into mersi_fy3f
Browse files Browse the repository at this point in the history
  • Loading branch information
simonrp84 authored Jul 16, 2024
2 parents 3151495 + 3a742c2 commit cce88a4
Show file tree
Hide file tree
Showing 23 changed files with 274 additions and 126 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/deploy-sdist.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ jobs:
- name: Publish package to PyPI
if: github.event.action == 'published'
uses: pypa/gh-action-pypi-publish@v1.8.14
uses: pypa/gh-action-pypi-publish@v1.9.0
with:
user: __token__
password: ${{ secrets.pypi_password }}
6 changes: 3 additions & 3 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ fail_fast: false
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: 'v0.4.7'
rev: 'v0.5.0'
hooks:
- id: ruff
- repo: https://github.com/pre-commit/pre-commit-hooks
Expand All @@ -14,12 +14,12 @@ repos:
- id: check-yaml
args: [--unsafe]
- repo: https://github.com/PyCQA/bandit
rev: '1.7.8' # Update me!
rev: '1.7.9' # Update me!
hooks:
- id: bandit
args: [--ini, .bandit]
- repo: https://github.com/pre-commit/mirrors-mypy
rev: 'v1.10.0' # Use the sha / tag you want to point at
rev: 'v1.10.1' # Use the sha / tag you want to point at
hooks:
- id: mypy
additional_dependencies:
Expand Down
2 changes: 1 addition & 1 deletion continuous_integration/environment.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,6 @@ dependencies:
- pip:
- pytest-lazy-fixtures
- trollsift
- trollimage>=1.23
- trollimage>=1.24
- pyspectral
- pyorbital
9 changes: 9 additions & 0 deletions doc/source/enhancements.rst
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,15 @@ the example here::
- {colors: spectral, min_value: 193.15, max_value: 253.149999}
- {colors: greys, min_value: 253.15, max_value: 303.15}

In addition, it is also possible to add a linear alpha channel to the colormap, as in the
following example::

- name: colorize
method: !!python/name:satpy.enhancements.colorize
kwargs:
palettes:
- {colors: ylorrd, min_alpha: 100, max_alpha: 255}

It is also possible to provide your own custom defined color mapping by
specifying a list of RGB values and the corresponding min and max values
between which to apply the colors. This is for instance a common use case for
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ dependencies = [
"pyproj>=2.2",
"pyresample>=1.24.0",
"pyyaml>=5.1",
"trollimage>=1.23",
"trollimage>=1.24",
"trollsift",
"xarray>=0.14.1",
"zarr",
Expand Down
8 changes: 4 additions & 4 deletions satpy/composites/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -764,8 +764,8 @@ def _get_coszen_blending_weights(
self,
projectables: Sequence[xr.DataArray],
) -> xr.DataArray:
lim_low = np.cos(np.deg2rad(self.lim_low))
lim_high = np.cos(np.deg2rad(self.lim_high))
lim_low = float(np.cos(np.deg2rad(self.lim_low)))
lim_high = float(np.cos(np.deg2rad(self.lim_high)))
try:
coszen = np.cos(np.deg2rad(projectables[2 if self.day_night == "day_night" else 1]))
self._has_sza = True
Expand All @@ -775,8 +775,8 @@ def _get_coszen_blending_weights(
# Get chunking that matches the data
coszen = get_cos_sza(projectables[0])
# Calculate blending weights
coszen -= np.min((lim_high, lim_low))
coszen /= np.abs(lim_low - lim_high)
coszen -= min(lim_high, lim_low)
coszen /= abs(lim_low - lim_high)
return coszen.clip(0, 1)

def _get_data_for_single_side_product(
Expand Down
8 changes: 4 additions & 4 deletions satpy/composites/glm.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,19 +50,19 @@ def __init__(self, name, min_highlight=0.0, max_highlight=10.0, # noqa: D417
Args:
min_highlight (float): Minimum raw value of the "highlight" data
that will be used for linearly scaling the data along with
``max_hightlight``.
``max_highlight``.
max_highlight (float): Maximum raw value of the "highlight" data
that will be used for linearly scaling the data along with
``min_hightlight``.
``min_highlight``.
max_factor (tuple): Maximum effect that the highlight data can
have on each channel of the primary image data. This will be
multiplied by the linearly scaled highlight data and then
added or subtracted from the highlight channels. See class
docstring for more information. By default this is set to
``(0.8, 0.8, -0.8, 0)`` meaning the Red and Green channel
will be added to by at most 0.8, the Blue channel will be
subtracted from by at most 0.8, and the Alpha channel will
not be effected.
subtracted from by at most 0.8 (resulting in yellow highlights),
and the Alpha channel will not be affected.
"""
self.min_highlight = min_highlight
Expand Down
40 changes: 29 additions & 11 deletions satpy/enhancements/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -409,7 +409,7 @@ def create_colormap(palette, img=None): # noqa: D417
Colormaps can be loaded from lists of colors provided by the ``colors``
key in the provided dictionary. Each element in the list represents a
single color to be mapped to and can be 3 (RGB) or 4 (RGBA) elements long.
By default the value or control point for a color is determined by the
By default, the value or control point for a color is determined by the
index in the list (0, 1, 2, ...) divided by the total number of colors
to produce a number between 0 and 1. This can be overridden by providing a
``values`` key in the provided dictionary. See the "Set Range" section
Expand Down Expand Up @@ -455,12 +455,37 @@ def create_colormap(palette, img=None): # noqa: D417
``max_value``. See :meth:`trollimage.colormap.Colormap.set_range` for more
information.
**Set Alpha Range**
The alpha channel of a created colormap can be added and/or modified by
specifying ``min_alpha`` and ``max_alpha``.
See :meth:`trollimage.colormap.Colormap.set_alpha_range` for more info.
"""
# are colors between 0-255 or 0-1
color_scale = palette.get("color_scale", 255)
cmap = _get_cmap_from_palette_info(palette, img, color_scale)

if palette.get("reverse", False):
cmap.reverse()
if "min_value" in palette and "max_value" in palette:
cmap.set_range(palette["min_value"], palette["max_value"])
elif "min_value" in palette or "max_value" in palette:
raise ValueError("Both 'min_value' and 'max_value' must be specified (or neither).")

if "min_alpha" in palette and "max_alpha" in palette:
cmap.set_alpha_range(palette["min_alpha"] / color_scale,
palette["max_alpha"] / color_scale)
elif "min_alpha" in palette or "max_alpha" in palette:
raise ValueError("Both 'min_alpha' and 'max_alpha' must be specified (or neither).")

return cmap


def _get_cmap_from_palette_info(palette, img, color_scale):
fname = palette.get("filename", None)
colors = palette.get("colors", None)
dataset = palette.get("dataset", None)
# are colors between 0-255 or 0-1
color_scale = palette.get("color_scale", 255)
if fname:
if not os.path.exists(fname):
fname = get_config_path(fname)
Expand All @@ -473,17 +498,10 @@ def create_colormap(palette, img=None): # noqa: D417
cmap = _create_colormap_from_dataset(img, dataset, color_scale)
else:
raise ValueError("Unknown colormap format: {}".format(palette))

if palette.get("reverse", False):
cmap.reverse()
if "min_value" in palette and "max_value" in palette:
cmap.set_range(palette["min_value"], palette["max_value"])
elif "min_value" in palette or "max_value" in palette:
raise ValueError("Both 'min_value' and 'max_value' must be specified (or neither)")

return cmap



def _create_colormap_from_dataset(img, dataset, color_scale):
"""Create a colormap from an auxiliary variable in a source file."""
match = find_in_ancillary(img.data, dataset)
Expand Down
13 changes: 11 additions & 2 deletions satpy/readers/sar_c_safe.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
"""

import functools
import json
import logging
import warnings
from collections import defaultdict
Expand Down Expand Up @@ -664,7 +665,7 @@ def get_gcps(self):
gcp_coords (tuple): longitude and latitude 1d arrays
"""
gcps = self._data.coords["spatial_ref"].attrs["gcps"]
gcps = get_gcps_from_array(self._data)
crs = self._data.rio.crs

gcp_list = [(feature["properties"]["row"], feature["properties"]["col"], *feature["geometry"]["coordinates"])
Expand Down Expand Up @@ -726,7 +727,7 @@ def load(self, dataset_keys, **kwargs):
if key["name"] not in ["longitude", "latitude"]:
lonlats = self.load([DataID(self._id_keys, name="longitude", polarization=key["polarization"]),
DataID(self._id_keys, name="latitude", polarization=key["polarization"])])
gcps = val.coords["spatial_ref"].attrs["gcps"]
gcps = get_gcps_from_array(val)
from pyresample.future.geometry import SwathDefinition
val.attrs["area"] = SwathDefinition(lonlats["longitude"], lonlats["latitude"],
attrs=dict(gcps=gcps))
Expand Down Expand Up @@ -797,3 +798,11 @@ def _create_measurement_handlers(self, calibrators, denoisers):
filetype_info=None)

return measurement_handlers


def get_gcps_from_array(val):
"""Get the gcps from the spatial_ref coordinate as a geojson dict."""
gcps = val.coords["spatial_ref"].attrs["gcps"]
if isinstance(gcps, str):
gcps = json.loads(gcps)
return gcps
2 changes: 1 addition & 1 deletion satpy/readers/vaisala_gld360.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def __init__(self, filename, filename_info, filetype_info):
# Combine 'date' and 'time' into a datetime object
parse_dates = {"time": ["gld360_date", "gld360_time"]}

self.data = pd.read_csv(filename, delim_whitespace=True, header=None,
self.data = pd.read_csv(filename, sep="\\s+", header=None,
names=names, dtype=dtypes, parse_dates=parse_dates)

@property
Expand Down
94 changes: 60 additions & 34 deletions satpy/readers/viirs_edr.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,18 +128,13 @@ def get_dataset(self, dataset_id: DataID, info: dict) -> xr.DataArray:
"""Get the dataset."""
data_arr = self.nc[info["file_key"]]
data_arr = self._mask_invalid(data_arr, info)
data_arr = self._sanitize_metadata(data_arr, info)
units = info.get("units", data_arr.attrs.get("units"))
if units is None or units == "unitless":
units = "1"
if units == "%" and data_arr.attrs.get("units") in ("1", "unitless"):
data_arr *= 100.0 # turn into percentages
data_arr.attrs["units"] = units
if "standard_name" in info:
data_arr.attrs["standard_name"] = info["standard_name"]
self._decode_flag_meanings(data_arr)
data_arr.attrs["platform_name"] = self.platform_name
data_arr.attrs["sensor"] = self.sensor_name
data_arr.attrs["rows_per_scan"] = self.rows_per_scans(data_arr)
if data_arr.attrs.get("standard_name") in ("longitude", "latitude"):
# recursive swath definitions are a problem for the base reader right now
# delete the coordinates here so the base reader doesn't try to
Expand All @@ -157,6 +152,18 @@ def _mask_invalid(self, data_arr: xr.DataArray, ds_info: dict) -> xr.DataArray:
return data_arr.where((valid_range[0] <= data_arr) & (data_arr <= valid_range[1]))
return data_arr

def _sanitize_metadata(self, data_arr: xr.DataArray, info: dict) -> xr.DataArray:
if "valid_range" in data_arr.attrs:
# don't use numpy arrays for simple metadata
data_arr.attrs["valid_range"] = tuple(data_arr.attrs["valid_range"])
if "standard_name" in info:
data_arr.attrs["standard_name"] = info["standard_name"]
self._decode_flag_meanings(data_arr)
data_arr.attrs["platform_name"] = self.platform_name
data_arr.attrs["sensor"] = self.sensor_name
data_arr.attrs["rows_per_scan"] = self.rows_per_scans(data_arr)
return data_arr

@staticmethod
def _decode_flag_meanings(data_arr: xr.DataArray):
flag_meanings = data_arr.attrs.get("flag_meanings", None)
Expand Down Expand Up @@ -243,46 +250,65 @@ def available_datasets(self, configured_datasets=None):
yield from self._dynamic_variables_from_file(handled_var_names)

def _dynamic_variables_from_file(self, handled_var_names: set) -> Iterable[tuple[bool, dict]]:
ftype = self.filetype_info["file_type"]
m_lon_name = f"longitude_{ftype}"
m_lat_name = f"latitude_{ftype}"
m_coords = (m_lon_name, m_lat_name)
i_lon_name = f"longitude_i_{ftype}"
i_lat_name = f"latitude_i_{ftype}"
i_coords = (i_lon_name, i_lat_name)
for var_name in self.nc.variables.keys():
data_arr = self.nc[var_name]
is_lon = "longitude" in var_name.lower()
is_lat = "latitude" in var_name.lower()
if var_name in handled_var_names and not (is_lon or is_lat):
# skip variables that YAML had configured, but allow lon/lats
# to be reprocessed due to our dynamic coordinate naming
coords: dict[str, dict] = {}
for is_avail, ds_info in self._generate_dynamic_metadata(self.nc.variables.keys(), coords):
var_name = ds_info["file_key"]
if var_name in handled_var_names and not ("longitude_" in var_name or "latitude_" in var_name):
continue
handled_var_names.add(var_name)
yield is_avail, ds_info

for coord_info in coords.values():
yield True, coord_info

def _generate_dynamic_metadata(self, variable_names: Iterable[str], coords: dict) -> Iterable[tuple[bool, dict]]:
for var_name in variable_names:
data_arr = self.nc[var_name]
if data_arr.ndim != 2:
# only 2D arrays supported at this time
continue
res = 750 if data_arr.shape[1] == M_COLS else 375
ds_info = {
"file_key": var_name,
"file_type": ftype,
"file_type": self.filetype_info["file_type"],
"name": var_name,
"resolution": res,
"coordinates": m_coords if res == 750 else i_coords,
"coordinates": self._coord_names_for_resolution(res),
}
if is_lon:
ds_info["standard_name"] = "longitude"
ds_info["units"] = "degrees_east"
ds_info["name"] = m_lon_name if res == 750 else i_lon_name
# recursive coordinate/SwathDefinitions are not currently handled well in the base reader
del ds_info["coordinates"]
elif is_lat:
ds_info["standard_name"] = "latitude"
ds_info["units"] = "degrees_north"
ds_info["name"] = m_lat_name if res == 750 else i_lat_name
# recursive coordinate/SwathDefinitions are not currently handled well in the base reader
del ds_info["coordinates"]

is_lon = "longitude" in var_name.lower()
is_lat = "latitude" in var_name.lower()
if not (is_lon or is_lat):
yield True, ds_info
continue

ds_info["standard_name"] = "longitude" if is_lon else "latitude"
ds_info["units"] = "degrees_east" if is_lon else "degrees_north"
# recursive coordinate/SwathDefinitions are not currently handled well in the base reader
del ds_info["coordinates"]
yield True, ds_info

# "standard" geolocation coordinate (assume shorter variable name is "better")
new_name = self._coord_names_for_resolution(res)[int(not is_lon)]
if new_name not in coords or len(var_name) < len(coords[new_name]["file_key"]):
ds_info = ds_info.copy()
ds_info["name"] = new_name
coords[ds_info["name"]] = ds_info

def _coord_names_for_resolution(self, res: int):
ftype = self.filetype_info["file_type"]
m_lon_name = f"longitude_{ftype}"
m_lat_name = f"latitude_{ftype}"
m_coords = (m_lon_name, m_lat_name)
i_lon_name = f"longitude_i_{ftype}"
i_lat_name = f"latitude_i_{ftype}"
i_coords = (i_lon_name, i_lat_name)
if res == 750:
return m_coords
else:
return i_coords



class VIIRSSurfaceReflectanceWithVIHandler(VIIRSJRRFileHandler):
"""File handler for surface reflectance files with optional vegetation indexes."""
Expand Down
Loading

0 comments on commit cce88a4

Please sign in to comment.