Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Cherry picks from 1.8 #1040

Merged
merged 10 commits into from
Aug 1, 2024
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# Note that this is now pinned to a fixed version. Remember to check for new versions periodically.
FROM ghcr.io/osgeo/gdal:ubuntu-small-3.8.5 as builder
FROM ghcr.io/osgeo/gdal:ubuntu-small-3.8.5 AS builder

# Setup build env for postgresql-client-14
USER root
Expand Down
15 changes: 15 additions & 0 deletions HISTORY.rst
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,21 @@ History

Datacube-ows version 1.8.x indicates that it is designed work with datacube-core versions 1.8.x.

1.8.41 (2024-07-16)
-------------------

New Feature! Multi-date handler aggregator functions for colour-ramp type styles can now receive
either the results of the index function, or the raw band data by setting a config option. (Previously
they always received the results of the index function.)

* Improved error messages when creating extents without materialised views (#1016)
* Several minor bug-fixes and improved error handling in WCS code (#1027)
* Automated updates (#1022)
* Allow multi-date handler aggregator functions to receive raw data (#1033)
* Update HISTORY.rst and increment default version for release (#1034)

This release includes contributions from @SpacemanPaul and @whatnick

1.8.40 (2024-04-29)
-------------------

Expand Down
12 changes: 12 additions & 0 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,18 @@ System Architecture
.. image:: docs/diagrams/ows_diagram1.9.png
:width: 700

Known CRS Limitations
---------------------

1. ODC datasets with WKT-format CRSs will not work with OWS - data from such datasets
will never be displayed. OWS currently only works with EPSG format CRSs.

2. Datasets that straddle the anti-meridian or the north or south polar region will
cause issues with the legacy postgres driver.

These are fundamental limitation of the way OWS works with the postgres ODC index driver.
These limitations will be addressed in v1.9.0, but only for the new ODC postgis index driver.

Community
---------

Expand Down
2 changes: 1 addition & 1 deletion datacube_ows/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,4 @@
try:
from ._version import version as __version__
except ImportError:
__version__ = "1.8.40?"
__version__ = "1.8.41?"
11 changes: 11 additions & 0 deletions datacube_ows/band_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,6 +189,17 @@ def multi_date_delta(data, time_direction=-1):
def multi_date_pass(data):
return data

def multi_date_raw_example(data, band1, band2, band_mapper=None):
if band_mapper:
band1 = band_mapper(band1)
band2 = band_mapper(band2)
data1, data2 = (data.sel(time=dt) for dt in data.coords["time"].values)
return (
(data2[band1] - data1[band1]) - (data2[band2] - data1[band2])
) / (
data1[band1] + data1[band2] + data2[band1] + data2[band2]
)


@band_modulator
@scalable
Expand Down
4 changes: 2 additions & 2 deletions datacube_ows/config_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import os
from importlib import import_module
from itertools import chain
from typing import Any, Callable, Iterable, Optional, Sequence, TypeVar, cast
from typing import Any, Callable, Iterable, Optional, Sequence, cast
from urllib.parse import urlparse

import fsspec
Expand All @@ -34,7 +34,7 @@

CFG_DICT = dict[str, RAW_CFG]

F = TypeVar('F', bound=Callable[..., Any])
F = Callable[..., Any]


# inclusions defaulting to an empty list is dangerous, but note that it is never modified.
Expand Down
36 changes: 34 additions & 2 deletions datacube_ows/feature_info.py
Original file line number Diff line number Diff line change
Expand Up @@ -227,12 +227,44 @@ def feature_info(args: dict[str, str]) -> FlaskResponse:
derived_band_dict = cast(RAW_CFG, _make_derived_band_dict(pixel_ds, params.layer.style_index))
if derived_band_dict:
date_info["band_derived"] = derived_band_dict
# Add any custom-defined fields.
for k, f in params.layer.feature_info_custom_includes.items():
# Add any legacy custom-defined fields.
for k, f in params.layer.legacy_feature_info_custom_includes.items():
# legacy function signature: pass in band and index data as a dictionary
date_info[k] = f(date_info["bands"])

# Any custom-defined layer fields
# (entries from the legacy are overwritten by the new if both exist)
for k, f in params.layer.feature_info_custom_includes.items():
# New function signature: pass in:
# * a single pixel (1x1x1) multiband xarray Dataset, and
# * the ODC Dataset model (i.e. full ODC metadata)
date_info[k] = f(pixel_ds, ds)

if params.style is not None:
# Any custom-defined style fields
# (style definitions override layer-level entries where both exist)
for k, f in params.style.feature_info_includes.items():
# Function signature: pass in:
# * a single pixel (1x1x1) multiband xarray Dataset, and
# * the ODC Dataset model (i.e. full ODC metadata)
date_info[k] = f(pixel_ds, ds)

cast(list[RAW_CFG], feature_json["data"]).append(date_info)
fi_date_index[dt] = cast(dict[str, list[RAW_CFG]], feature_json)["data"][-1]
# Multidate custom includes reflect all selected times on multidate requests,
# and are added as an extra all-time data record after the date ones.
times = list(data.time.values)
if len(times) > 1 and params.style is not None:
mdh = params.style.get_multi_date_handler(times)
if mdh is not None:
date_info = {
"time": "all"
}
for k, f in mdh.feature_info_includes.items():
# Function signature: pass in:
# * a multi-date single pixel (1x1xn) multiband xarray Dataset,
date_info[k] = f(data)
cast(list[RAW_CFG], feature_json["data"]).append(date_info)
feature_json["data_available_for_dates"] = []
pt_native = None
for d in all_time_datasets.coords["time"].values:
Expand Down
21 changes: 15 additions & 6 deletions datacube_ows/ows_configuration.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
from slugify import slugify

from datacube_ows.config_utils import (CFG_DICT, RAW_CFG, ConfigException,
FlagProductBands, FunctionWrapper,
F, FlagProductBands, FunctionWrapper,
ODCInitException, OWSConfigEntry,
OWSEntryNotFound, OWSExtensibleConfigEntry,
OWSFlagBand, OWSMetadataConfig,
Expand Down Expand Up @@ -705,9 +705,13 @@ def ready_image_processing(self) -> None:
def parse_feature_info(self, cfg: CFG_DICT):
self.feature_info_include_utc_dates = bool(cfg.get("include_utc_dates", False))
custom = cast(dict[str, CFG_DICT | str], cfg.get("include_custom", {}))
self.feature_info_custom_includes = {
k: FunctionWrapper(self, v) for k, v in custom.items() # type:ignore[type-var]
}
self.legacy_feature_info_custom_includes = {k: FunctionWrapper(self, v) for k, v in custom.items()}
if self.legacy_feature_info_custom_includes:
_LOG.warning("In layer %s: The 'include_custom' directive is deprecated and will be removed in version 1.9. "
"Please refer to the documentation for information on how to migrate your configuration "
"to the new 'custom_includes' directive.", self.name)
custom = cast(dict[str, CFG_DICT | str | F], cfg.get("custom_includes", {}))
self.feature_info_custom_includes = {k: FunctionWrapper(self, v) for k, v in custom.items()}

# pylint: disable=attribute-defined-outside-init
def parse_flags(self, cfg: CFG_DICT):
Expand Down Expand Up @@ -1513,13 +1517,18 @@ def parse_layers(self, cfg: list[CFG_DICT]):
self.declare_unready("native_product_index")
self.root_layer_folder = OWSFolder(cast(CFG_DICT, {
"title": "Root Folder (hidden)",
"label": "ows_root_hidden",
"abstract": self.abstract,
"label": "ows_root",
"layers": cfg
}), global_cfg=self, parent_layer=None)

@property
def layers(self) -> list[OWSLayer]:
return self.root_layer_folder.child_layers
# Multiple top-level are not consistent with a strict reading of the OWS standard.
# If we have multiple top-level folders, wrap them in an auto-generated top-level folder.
if len(self.root_layer_folder.child_layers) == 1:
return self.root_layer_folder.child_layers
return [self.root_layer_folder]

def alias_bboxes(self, bboxes: CFG_DICT) -> CFG_DICT:
out: CFG_DICT = {}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ eo3_ranges as
from agdc.dataset where
metadata_type_ref in (select id from metadata_lookup where name='eo3')
and archived is null
and upper(substr(metadata #>> '{crs}', 1, 5)) = 'EPSG:'
),
-- This is eo spatial (Uses ALOS-PALSAR over Africa as a sample product)
eo_corners as
Expand All @@ -45,7 +46,7 @@ eo_corners as
and archived is null
and (metadata #>> '{grid_spatial, projection, valid_data}' is null
or
substr(metadata #>> '{grid_spatial, projection, spatial_reference}', 1, 4) <> 'EPSG'
upper(substr(metadata #>> '{grid_spatial, projection, spatial_reference}', 1, 5)) <> 'EPSG:'
)
),
eo_geoms as
Expand All @@ -62,8 +63,8 @@ eo_geoms as
from agdc.dataset where
metadata_type_ref in (select id from metadata_lookup where name in ('eo','eo_s2_nrt','gqa_eo','eo_plus', 'boku'))
and archived is null
and metadata #>> '{grid_spatial, projection, valid_data}' is not null
and substr(metadata #>> '{grid_spatial, projection, spatial_reference}', 1, 5) = 'EPSG:'
and metadata #>> '{grid_spatial, projection, valid_data}' is not null
and upper(substr(metadata #>> '{grid_spatial, projection, spatial_reference}', 1, 5)) = 'EPSG:'
)
select id,format('POLYGON(( %s %s, %s %s, %s %s, %s %s, %s %s))',
lon_begin, lat_begin, lon_end, lat_begin, lon_end, lat_end,
Expand Down Expand Up @@ -97,4 +98,5 @@ select id,
) as spatial_extent
from agdc.dataset where
metadata_type_ref in (select id from metadata_lookup where name like 'eo3_%')
and upper(substr(metadata #>> '{grid_spatial, projection, spatial_reference}', 1, 5)) = 'EPSG:'
and archived is null
12 changes: 11 additions & 1 deletion datacube_ows/styles/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

import datacube_ows.band_utils
from datacube_ows.config_utils import (CFG_DICT, RAW_CFG, AbstractMaskRule,
ConfigException, FlagBand,
ConfigException, FlagBand, F,
FlagProductBands, FunctionWrapper,
OWSConfigEntry, OWSEntryNotFound,
OWSExtensibleConfigEntry,
Expand Down Expand Up @@ -204,6 +204,11 @@ def __init__(self, product: "datacube_ows.ows_configuration.OWSNamedLayer",
self.declare_unready("needed_bands")
self.declare_unready("flag_bands")

custom_includes = cast(dict[str, CFG_DICT | str | F], style_cfg.get("custom_includes", {}))
self.feature_info_includes = {
k: FunctionWrapper(self, v)
for k, v in custom_includes.items()
}
self.legend_cfg = self.Legend(self, cast(CFG_DICT, raw_cfg.get("legend", {})))
if not defer_multi_date:
self.parse_multi_date(raw_cfg)
Expand Down Expand Up @@ -535,6 +540,11 @@ def __init__(self, style: "StyleDefBase", cfg: CFG_DICT) -> None:
raise ConfigException("Aggregator function is required for non-animated multi-date handlers.")
self.legend_cfg = self.Legend(self, cast(CFG_DICT, raw_cfg.get("legend", {})))
self.preserve_user_date_order = cast(bool, cfg.get("preserve_user_date_order", False))
custom_includes = cast(dict[str, CFG_DICT | str | F], cfg.get("custom_includes", {}))
self.feature_info_includes = {
k: FunctionWrapper(self.style, v)
for k, v in custom_includes.items()
}

def applies_to(self, count: int) -> bool:
"""Does this multidate handler apply to a request with this number of dates?"""
Expand Down
11 changes: 9 additions & 2 deletions datacube_ows/styles/ramp.py
Original file line number Diff line number Diff line change
Expand Up @@ -575,9 +575,11 @@ def __init__(self, style: "ColorRampDef", cfg: CFG_DICT) -> None:
if self.animate:
self.feature_info_label: str | None = None
self.color_ramp = style.color_ramp
self.pass_raw_data = False
else:
self.feature_info_label = cast(str | None, cfg.get("feature_info_label", None))
self.color_ramp = ColorRamp(style, cfg, cast(ColorRampDef.Legend, self.legend_cfg))
self.pass_raw_data = bool(cfg.get("pass_raw_data", False))

def transform_data(self, data: Dataset) -> Dataset:
"""
Expand All @@ -586,9 +588,14 @@ def transform_data(self, data: Dataset) -> Dataset:
:param data: Raw data
:return: RGBA image xarray. May have a time dimension
"""
xformed_data = cast("ColorRampDef", self.style).apply_index(data)
agg = cast(FunctionWrapper, self.aggregator)(xformed_data)
if self.pass_raw_data:
assert self.aggregator is not None # For type-checker
agg = self.aggregator(data)
else:
xformed_data = cast("ColorRampDef", self.style).apply_index(data)
agg = cast(FunctionWrapper, self.aggregator)(xformed_data)
return self.color_ramp.apply(agg)

class Legend(RampLegendBase):
pass

Expand Down
4 changes: 1 addition & 3 deletions datacube_ows/templates/wms_capabilities.xml
Original file line number Diff line number Diff line change
Expand Up @@ -89,9 +89,7 @@
{% elif lyr.regular_time_axis %}
<Dimension name="time" units="ISO8601" default="{{ lyr.default_time.isoformat() }}">{{ lyr.time_axis_representation() }}</Dimension>
{% elif lyr_ranges.times|length > 1 %}
<Dimension name="time" units="ISO8601" default="{{ lyr.default_time.isoformat() }}">
{% for t in lyr_ranges.times %}{{ t.isoformat() }}{% if not loop.last %},{% endif %}{% endfor %}
</Dimension>
<Dimension name="time" units="ISO8601" default="{{ lyr.default_time.isoformat() }}">{% for t in lyr_ranges.times %}{{ t.isoformat() }}{% if not loop.last %},{% endif %}{% endfor %}</Dimension>
{% endif %}

{% if lyr.attribution %}
Expand Down
19 changes: 10 additions & 9 deletions datacube_ows/wms_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -324,9 +324,9 @@ def get_layer(self, args) -> OWSNamedLayer:
return get_layer_from_arg(args)


def single_style_from_args(product, args, required=True):
def single_style_from_args(layer, args, required=True):
# User Band Math (overrides style if present).
if product.user_band_math and "code" in args and "colorscheme" in args:
if layer.user_band_math and "code" in args and "colorscheme" in args:
code = args["code"]
mpl_ramp = args["colorscheme"]
try:
Expand All @@ -347,7 +347,7 @@ def single_style_from_args(product, args, required=True):
raise WMSException(f"Colorscale range must be two numbers, sorted and separated by a comma.",
locator="Colorscalerange parameter")
try:
style = StyleDef(product, {
style = StyleDef(layer, {
"name": "custom_user_style",
"index_expression": code,
"mpl_ramp": mpl_ramp,
Expand All @@ -374,28 +374,28 @@ def single_style_from_args(product, args, required=True):
if not style_r and not required:
return None
if not style_r:
style_r = product.default_style.name
style = product.style_index.get(style_r)
style_r = layer.default_style.name
style = layer.style_index.get(style_r)
if not style:
raise WMSException("Style %s is not defined" % style_r,
WMSException.STYLE_NOT_DEFINED,
locator="Style parameter",
valid_keys=list(product.style_index))
valid_keys=list(layer.style_index))
return style

class GetLegendGraphicParameters():
def __init__(self, args):
self.product = get_layer_from_arg(args, 'layer')
self.layer = get_layer_from_arg(args, 'layer')

# Validate Format parameter
self.format = get_arg(args, "format", "image format",
errcode=WMSException.INVALID_FORMAT,
lower=True,
permitted_values=["image/png"])
self.style = single_style_from_args(self.product, args)
self.style = single_style_from_args(self.layer, args)
self.styles = [self.style]
# Time parameter
self.times = get_times(args, self.product)
self.times = get_times(args, self.layer)


class GetMapParameters(GetParameters):
Expand Down Expand Up @@ -456,6 +456,7 @@ def method_specific_init(self, args):
"%s parameter" % coords[0])
self.i = int(i)
self.j = int(j)
self.style = single_style_from_args(self.layer, args, required=False)


# Solar angle correction functions
Expand Down
7 changes: 7 additions & 0 deletions datacube_ows/wsgi.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,12 +15,19 @@
# The location of the datcube config file.
os.environ.setdefault("DATACUBE_CONFIG_PATH", "/opt/odc/.datacube.conf.local")

from datacube_ows import __version__

from datacube_ows.ogc import app # isort:skip

application = app


def main():
if "--version" in sys.argv:
print("Open Data Cube Open Web Services (datacube-ows) version",
__version__
)
exit(0)
app.run()


Expand Down
Loading
Loading