diff --git a/CHANGES.md b/CHANGES.md
index 93e2bf3be..d26b78969 100644
--- a/CHANGES.md
+++ b/CHANGES.md
@@ -5,6 +5,11 @@
that comes with, e.g., several bug fixes including issues with the time series legend,
the zoom information box, persisted states, and the progress bar, as well as updated
dependencies such as `chartlets ^0.2.0`.
+* WebAPI now supports statistics and time series for dataset variables with
+ additional non-spatial dimensions beyond `time`, `lat`, and `lon` (e.g. `depth`).
+ Dimension values can be passed as query parameters; if omitted, a value is selected
+ automatically. As part of this, `xcube.core.tile.get_non_spatial_labels` has been
+ made a public function. (#1213)
### Fixes
* Adapted to xarray version 2026.04.0 (#1211)
diff --git a/test/webapi/datasets/test_controllers.py b/test/webapi/datasets/test_controllers.py
index 3cfb8a22f..9d5381b63 100644
--- a/test/webapi/datasets/test_controllers.py
+++ b/test/webapi/datasets/test_controllers.py
@@ -42,7 +42,7 @@ def assertDatasetsOk(self, response: Any, expected_count: Optional[int] = None):
class DatasetsControllerTest(DatasetsControllerTestBase):
def test_datasets(self):
response = get_datasets(get_datasets_ctx())
- datasets = self.assertDatasetsOk(response, expected_count=2)
+ datasets = self.assertDatasetsOk(response, expected_count=3)
for dataset in datasets:
self.assertIsInstance(dataset, dict)
self.assertIn("id", dataset)
@@ -55,7 +55,7 @@ def test_dataset_with_details(self):
response = get_datasets(
get_datasets_ctx(), details=True, base_url="http://test"
)
- datasets = self.assertDatasetsOk(response, expected_count=2)
+ datasets = self.assertDatasetsOk(response, expected_count=3)
demo_dataset = None
demo_1w_dataset = None
@@ -97,7 +97,7 @@ def test_dataset_with_point(self):
response = get_datasets(
get_datasets_ctx(), point=(1.7, 51.2), base_url="http://test"
)
- datasets = self.assertDatasetsOk(response, expected_count=2)
+ datasets = self.assertDatasetsOk(response, expected_count=3)
dataset = datasets[0]
self.assertIsInstance(dataset, dict)
self.assertIn("id", dataset)
@@ -114,7 +114,7 @@ def test_dataset_with_point_and_details(self):
response = get_datasets(
get_datasets_ctx(), point=(1.7, 51.2), details=True, base_url="http://test"
)
- datasets = self.assertDatasetsOk(response, expected_count=2)
+ datasets = self.assertDatasetsOk(response, expected_count=3)
dataset = datasets[0]
self.assertIsInstance(dataset, dict)
self.assertIn("id", dataset)
diff --git a/test/webapi/ows/stac/test_controllers.py b/test/webapi/ows/stac/test_controllers.py
index 81dad4b53..dc1d57592 100644
--- a/test/webapi/ows/stac/test_controllers.py
+++ b/test/webapi/ows/stac/test_controllers.py
@@ -136,7 +136,7 @@
"title": f'Feature for the dataset "{dsid}"',
"type": "application/geo+json",
}
- for dsid in ["demo", "demo-1w"]
+ for dsid in ["demo", "demo-1w", "demo-multidimensional"]
],
"providers": [],
"stac_version": STAC_VERSION,
@@ -234,7 +234,7 @@ def test_get_datasets_collection_items(self):
features = result["features"]
self.assertIsInstance(features, list)
- self.assertEqual(2, len(features))
+ self.assertEqual(3, len(features))
for feature in features:
self.assertIsInstance(feature, dict)
@@ -245,7 +245,7 @@ def test_get_datasets_collection_items(self):
self.assertIsInstance(feature.get("geometry"), dict)
self.assertIsInstance(feature.get("assets"), dict)
self.assertIsInstance(feature.get("id"), str)
- self.assertIn(feature["id"], {"demo", "demo-1w"})
+ self.assertIn(feature["id"], {"demo", "demo-1w","demo-multidimensional"})
# TODO (forman): add more assertions
# import pprint
# pprint.pprint(feature)
@@ -300,7 +300,7 @@ def test_get_collections(self):
)
self.assertEqual(EXPECTED_DATASETS_COLLECTION, result["collections"][0])
self.assertEqual(
- ["datacubes", "demo", "demo-1w"],
+ ["datacubes", "demo", "demo-1w", "demo-multidimensional"],
[collection["id"] for collection in result["collections"]],
)
diff --git a/test/webapi/ows/wmts/res/WMTSCapabilities-CRS84.xml b/test/webapi/ows/wmts/res/WMTSCapabilities-CRS84.xml
index 918ac2e08..e27e83a62 100644
--- a/test/webapi/ows/wmts/res/WMTSCapabilities-CRS84.xml
+++ b/test/webapi/ows/wmts/res/WMTSCapabilities-CRS84.xml
@@ -493,6 +493,45 @@
2017-02-05T00:00:00.000000000
+
+ demo-multidimensional.conc_chl
+ demo-multidimensional/Chlorophyll concentration
+
+
+ 0 50
+ 5 52.5
+
+
+ image/png
+
+ WorldCRS84Quad
+
+
+
+ depth
+ depth
+
+ 0
+ false
+ 0.0
+ 10.0
+ 20.0
+
+
+ time
+ time
+ ISO8601
+ current
+ true
+ 2017-01-16T10:09:21.834255872
+ 2017-01-25T09:35:51.060063488
+ 2017-01-26T10:50:16.686192896
+ 2017-01-28T09:58:11.350386176
+ 2017-01-30T10:46:33.836892416
+
+
WorldCRS84Quad
CRS84 for the World
@@ -632,6 +671,17 @@
demo-1w.quality_flags_stdev
+
+ demo-multidimensional
+ Demonstration Multidimensional Cube
+
+
+ demo-multidimensional.conc_chl
+ demo-multidimensional/Chlorophyll concentration
+
+ demo-multidimensional.conc_chl
+
+
diff --git a/test/webapi/ows/wmts/res/WMTSCapabilities-OSM.xml b/test/webapi/ows/wmts/res/WMTSCapabilities-OSM.xml
index 7a1078f7c..83aaba0ab 100644
--- a/test/webapi/ows/wmts/res/WMTSCapabilities-OSM.xml
+++ b/test/webapi/ows/wmts/res/WMTSCapabilities-OSM.xml
@@ -493,6 +493,45 @@
2017-02-05T00:00:00.000000000
+
+ demo-multidimensional.conc_chl
+ demo-multidimensional/Chlorophyll concentration
+
+
+ 0 50
+ 5 52.5
+
+
+ image/png
+
+ WorldWebMercatorQuad
+
+
+
+ depth
+ depth
+
+ 0
+ false
+ 0.0
+ 10.0
+ 20.0
+
+
+ time
+ time
+ ISO8601
+ current
+ true
+ 2017-01-16T10:09:21.834255872
+ 2017-01-25T09:35:51.060063488
+ 2017-01-26T10:50:16.686192896
+ 2017-01-28T09:58:11.350386176
+ 2017-01-30T10:46:33.836892416
+
+
WorldWebMercatorQuad
Google Maps Compatible for the World
@@ -632,6 +671,17 @@
demo-1w.quality_flags_stdev
+
+ demo-multidimensional
+ Demonstration Multidimensional Cube
+
+
+ demo-multidimensional.conc_chl
+ demo-multidimensional/Chlorophyll concentration
+
+ demo-multidimensional.conc_chl
+
+
diff --git a/test/webapi/res/config-stats.yml b/test/webapi/res/config-stats.yml
index a4e8f1f9b..fa15d3936 100644
--- a/test/webapi/res/config-stats.yml
+++ b/test/webapi/res/config-stats.yml
@@ -44,6 +44,22 @@ Datasets:
Path: ../../../examples/serve/demo/sample-cog.tif
Style: tif_style
+ - Identifier: demo-multidimensional
+ Title: Demonstration Multidimensional Cube
+ GroupTitle: Multidimensional Demo
+ Tags: ["demo", "computed"]
+ FileSystem: memory
+ Path: script.py
+ Variables:
+ - "conc_chl"
+ Function: simulate_multidimensional_dataset
+ InputDatasets: ["demo"]
+ InputParameters:
+ variables: ["conc_chl"]
+ depths: [0.0, 10.0, 20.0]
+ factor: 0.2
+ Style: default
+
PlaceGroups:
- Identifier: inside-cube
Title: Points inside the cube
diff --git a/test/webapi/res/config.yml b/test/webapi/res/config.yml
index 7c1d84ed0..94e3269bf 100644
--- a/test/webapi/res/config.yml
+++ b/test/webapi/res/config.yml
@@ -37,6 +37,22 @@ Datasets:
incl_stdev: True
Style: default
+ - Identifier: demo-multidimensional
+ Title: Demonstration Multidimensional Cube
+ GroupTitle: Multidimensional Demo
+ Tags: ["demo", "computed"]
+ FileSystem: memory
+ Path: script.py
+ Variables:
+ - "conc_chl"
+ Function: simulate_multidimensional_dataset
+ InputDatasets: ["demo"]
+ InputParameters:
+ variables: ["conc_chl"]
+ depths: [0.0, 10.0, 20.0]
+ factor: 0.2
+ Style: default
+
PlaceGroups:
- Identifier: inside-cube
Title: Points inside the cube
diff --git a/test/webapi/res/script.py b/test/webapi/res/script.py
index 18eb1903d..23d162a95 100644
--- a/test/webapi/res/script.py
+++ b/test/webapi/res/script.py
@@ -2,6 +2,7 @@
# Permissions are hereby granted under the terms of the MIT License:
# https://opensource.org/licenses/MIT.
+import pandas as pd
import numpy as np
import xarray as xr
@@ -62,3 +63,31 @@ def broken_ml_dataset_factory_1():
def broken_ml_dataset_factory_2(ml_dataset: MultiLevelDataset):
"""Example for a custom, broken MultiLevelDataset class."""
return xr.Dataset()
+
+
+def simulate_multidimensional_dataset(ds, variables, depths, factor):
+ dim_name = "depth"
+
+ depth_coord = xr.DataArray(depths, dims=(dim_name,), coords={dim_name: depths})
+ depth_factors = xr.DataArray(
+ [factor**i for i in range(len(depths))],
+ dims=(dim_name,),
+ coords={dim_name: depths},
+ )
+
+ data_vars = {}
+
+ for name in variables:
+ data = ds[name]
+
+ if dim_name in data.dims:
+ data_vars[name] = data
+ continue
+
+ data_expanded = data.expand_dims({dim_name: depths})
+ data_scaled = data_expanded * depth_factors
+
+ data_scaled.attrs.update(data.attrs)
+ data_vars[name] = data_scaled
+
+ return xr.Dataset(data_vars, coords={dim_name: depth_coord})
diff --git a/test/webapi/statistics/test_controllers.py b/test/webapi/statistics/test_controllers.py
index 4fa68071f..258bd073d 100644
--- a/test/webapi/statistics/test_controllers.py
+++ b/test/webapi/statistics/test_controllers.py
@@ -28,7 +28,7 @@ def test_compute_statistics_for_point(self):
"demo",
"conc_tsm",
{"type": "Point", "coordinates": [lon, lat]},
- time,
+ {"time": time},
)
self.assertIsInstance(result, dict)
self.assertEqual(
@@ -48,7 +48,54 @@ def test_compute_statistics_for_point(self):
"demo",
"conc_tsm",
(lon, lat),
- time,
+ {"time": time},
+ )
+ self.assertIsInstance(result, dict)
+ self.assertEqual(
+ {"value": expected_value},
+ result,
+ )
+
+ lon = 1.262
+ lat = 50.243
+ time = "2017-01-16 10:09:21"
+ depth = 0
+
+ ctx = get_statistics_ctx()
+
+ dataset = ctx.datasets_ctx.get_dataset("demo-multidimensional")
+ expected_value = float(
+ dataset["conc_chl"]
+ .sel(lon=lon, lat=lat, time=time, depth=depth, method="nearest")
+ .values
+ )
+
+ result = compute_statistics(
+ ctx,
+ "demo-multidimensional",
+ "conc_chl",
+ {"type": "Point", "coordinates": [lon, lat]},
+ {"time": time, "depth": depth},
+ )
+ self.assertIsInstance(result, dict)
+ self.assertEqual(
+ {
+ "count": 1,
+ "minimum": expected_value,
+ "maximum": expected_value,
+ "mean": expected_value,
+ "deviation": 0.0,
+ },
+ result,
+ )
+
+ # Compact point mode
+ result = compute_statistics(
+ ctx,
+ "demo-multidimensional",
+ "conc_chl",
+ (lon, lat),
+ {"time": time, "depth": depth},
)
self.assertIsInstance(result, dict)
self.assertEqual(
@@ -68,7 +115,7 @@ def test_compute_statistics_for_oor_point(self):
"demo",
"conc_tsm",
{"type": "Point", "coordinates": [lon, lat]},
- time,
+ {"time": time},
)
self.assertIsInstance(result, dict)
self.assertEqual({"count": 0}, result)
@@ -79,7 +126,7 @@ def test_compute_statistics_for_oor_point(self):
"demo",
"conc_tsm",
(lon, lat),
- time,
+ {"time": time},
)
self.assertIsInstance(result, dict)
self.assertEqual({}, result)
@@ -108,7 +155,7 @@ def test_compute_statistics_for_polygon(self):
]
],
},
- time,
+ {"time": time},
)
self.assertIsInstance(result, dict)
self.assertEqual(380, result.get("count"))
@@ -147,7 +194,7 @@ def test_compute_statistics_for_polygon_and_var_assignment(self):
]
],
},
- time,
+ {"time": time},
)
self.assertIsInstance(result, dict)
self.assertEqual(380, result.get("count"))
@@ -186,7 +233,7 @@ def test_compute_statistics_for_oor_polygon(self):
]
],
},
- time,
+ {"time": time},
)
self.assertIsInstance(result, dict)
self.assertEqual({"count": 0}, result)
diff --git a/test/webapi/statistics/test_routes.py b/test/webapi/statistics/test_routes.py
index f2ffd90f7..3af715e7d 100644
--- a/test/webapi/statistics/test_routes.py
+++ b/test/webapi/statistics/test_routes.py
@@ -42,13 +42,28 @@ def test_fetch_post_statistics_ok(self):
assert round(parsed_data["result"]["mean"], 3) == 102.0
assert round(parsed_data["result"]["deviation"], 3) == 0.0
+ response = self.fetch(
+ "/statistics/demo-multidimensional/conc_chl?time=2017-01-30+10:46:34&depth=10",
+ method="POST",
+ body='{"type": "Point", "coordinates": [1.262, 50.243]}',
+ )
+
+ self.assertResponseOK(response)
+ decoded_data = response.data.decode("utf-8")
+ parsed_data = json.loads(decoded_data)
+ assert parsed_data["result"]["count"] == 1
+ assert round(parsed_data["result"]["minimum"], 3) == 1.835
+ assert round(parsed_data["result"]["maximum"], 3) == 1.835
+ assert round(parsed_data["result"]["mean"], 3) == 1.835
+ assert round(parsed_data["result"]["deviation"], 3) == 0.0
+
def test_fetch_post_statistics_missing_time_with_time_dimension_dataset(self):
response = self.fetch(
"/statistics/demo/conc_chl",
method="POST",
body='{"type": "Point", "coordinates": [1.768, 51.465]}',
)
- self.assertBadRequestResponse(response, "Missing query parameter 'time'")
+ self.assertResponseOK(response)
def test_fetch_post_statistics_missing_time_without_time_dimension_dataset(self):
response = self.fetch(
@@ -64,11 +79,7 @@ def test_fetch_post_statistics_with_time_without_time_dimension_dataset(self):
method="POST",
body='{"type": "Point", "coordinates": [-105.591, 35.751]}',
)
- self.assertBadRequestResponse(
- response,
- "Query parameter 'time' must not be given since "
- "dataset does not contain a 'time' dimension",
- )
+ self.assertResponseOK(response)
def test_fetch_post_statistics_invalid_geometry(self):
response = self.fetch(
@@ -84,6 +95,24 @@ def test_fetch_post_statistics_invalid_geometry(self):
)
self.assertBadRequestResponse(response, "Invalid GeoJSON geometry encountered")
+ def test_fetch_post_statistics_missing_non_spatial_dimensions_with_multidimensional_dataset(
+ self,
+ ):
+ response = self.fetch(
+ "/statistics/demo-multidimensional/conc_chl",
+ method="POST",
+ body='{"type": "Point", "coordinates": [1.262, 50.243]}',
+ )
+
+ self.assertResponseOK(response)
+ decoded_data = response.data.decode("utf-8")
+ parsed_data = json.loads(decoded_data)
+ assert parsed_data["result"]["count"] == 1
+ assert round(parsed_data["result"]["minimum"], 3) == 1.002
+ assert round(parsed_data["result"]["maximum"], 3) == 1.002
+ assert round(parsed_data["result"]["mean"], 3) == 1.002
+ assert round(parsed_data["result"]["deviation"], 3) == 0.0
+
def test_crs_conversion_post_statistics_with_coordinates_outside_bounds(self):
response = self.fetch(
"/statistics/cog_local/band_1",
@@ -114,7 +143,7 @@ def test_fetch_get_statistics_missing_time_with_time_dimension_dataset(self):
response = self.fetch(
"/statistics/demo/conc_chl?lon=1.786&lat=51.465", method="GET"
)
- self.assertBadRequestResponse(response, "Missing query parameter 'time'")
+ self.assertResponseOK(response)
def test_fetch_get_statistics_missing_time_without_time_dimension_dataset(self):
response = self.fetch(
@@ -129,11 +158,7 @@ def test_fetch_get_statistics_with_time_without_time_dimension_dataset(self):
"type=Point&time=2017-01-16+10:09:21",
method="GET",
)
- self.assertBadRequestResponse(
- response,
- "Query parameter 'time' must not be given since "
- "dataset does not contain a 'time' dimension",
- )
+ self.assertResponseOK(response)
def test_fetch_get_statistics(self):
response = self.fetch(
@@ -154,6 +179,27 @@ def test_fetch_get_statistics(self):
parsed_data = json.loads(decoded_data)
assert round(parsed_data["result"]["value"], 3) == 102.0
+ response = self.fetch(
+ "/statistics/demo-multidimensional/conc_chl?time=2017-01-16+10:09:21&lon=1.262&lat=50.243&type=Point&depth=0",
+ method="GET",
+ )
+ self.assertResponseOK(response)
+ decoded_data = response.data.decode("utf-8")
+ parsed_data = json.loads(decoded_data)
+ assert round(parsed_data["result"]["value"], 3) == 1.002
+
+ def test_fetch_get_statistics_missing_non_spatial_dimensions_with_multidimensional_dataset(
+ self,
+ ):
+ response = self.fetch(
+ "/statistics/demo-multidimensional/conc_chl?lon=1.262&lat=50.243&type=Point",
+ method="GET",
+ )
+ self.assertResponseOK(response)
+ decoded_data = response.data.decode("utf-8")
+ parsed_data = json.loads(decoded_data)
+ assert round(parsed_data["result"]["value"], 3) == 1.002
+
def test_crs_conversion_get_statistics_with_coordinates_outside_bounds(self):
response = self.fetch(
"/statistics/cog_local/band_1?lon=-125.810&lat=35.771&type=Point",
diff --git a/test/webapi/timeseries/test_controllers.py b/test/webapi/timeseries/test_controllers.py
index b78024843..989a04f37 100644
--- a/test/webapi/timeseries/test_controllers.py
+++ b/test/webapi/timeseries/test_controllers.py
@@ -46,6 +46,25 @@ def test_get_time_series_for_point(self):
]
self.assertAlmostEqualDeep(expected_result, actual_result)
+ actual_result = get_time_series(
+ ctx,
+ "demo-multidimensional",
+ "conc_chl",
+ dict(type="Point", coordinates=[2.1, 51.4]),
+ start_date=np.datetime64("2017-01-15"),
+ end_date=np.datetime64("2017-01-29"),
+ non_spatial_dimensions={"depth": 10},
+ )
+
+ expected_result = [
+ {"mean": 0.07436655163764953, "time": "2017-01-16T10:09:22Z"},
+ {"mean": None, "time": "2017-01-25T09:35:51Z"},
+ {"mean": None, "time": "2017-01-26T10:50:17Z"},
+ {"mean": 1.5146712303161622, "time": "2017-01-28T09:58:11Z"},
+ ]
+
+ self.assertAlmostEqualDeep(expected_result, actual_result)
+
def test_get_time_series_with_tolerance(self):
ctx = get_timeseries_ctx()
actual_result = get_time_series(
diff --git a/test/webapi/timeseries/test_routes.py b/test/webapi/timeseries/test_routes.py
index d92e18df3..8f3f6102a 100644
--- a/test/webapi/timeseries/test_routes.py
+++ b/test/webapi/timeseries/test_routes.py
@@ -117,3 +117,19 @@ def test_fetch_timeseries_tolerance(self):
"Query parameter 'tolerance' must have type 'float'."
")",
)
+
+ def test_fetch_timeseries_non_spatial_dimensions(self):
+ response = self.fetch(
+ "/timeseries/demo-multidimensional/conc_chl?depth=10",
+ method="POST",
+ body='{"type":"Point","coordinates":[1.262, 50.243]}',
+ )
+ self.assertResponseOK(response)
+
+ def test_fetch_timeseries_with_non_spatial_dimension_missing_in_request(self):
+ response = self.fetch(
+ "/timeseries/demo-multidimensional/conc_chl",
+ method="POST",
+ body='{"type":"Point","coordinates":[1.262, 50.243]}',
+ )
+ self.assertResponseOK(response)
diff --git a/xcube/core/tile.py b/xcube/core/tile.py
index 3db4a4263..0c352acba 100644
--- a/xcube/core/tile.py
+++ b/xcube/core/tile.py
@@ -6,7 +6,7 @@
import logging
import math
import warnings
-from collections.abc import Hashable, Sequence
+from collections.abc import Hashable, Iterable, Sequence
from typing import Any, Optional, Union
import matplotlib.colors
@@ -622,7 +622,7 @@ def _get_variable(
)
variable = dataset[var_name]
- non_spatial_labels = _get_non_spatial_labels(
+ non_spatial_labels = get_non_spatial_labels(
dataset, variable, non_spatial_labels, logger
)
if non_spatial_labels:
@@ -670,18 +670,25 @@ def get(self, tile_size: Pair[int], format: str) -> Union[bytes, np.ndarray]:
TransparentRgbaTilePool.INSTANCE = TransparentRgbaTilePool()
-def _get_non_spatial_labels(
+def get_non_spatial_labels(
dataset: xr.Dataset,
variable: xr.DataArray,
labels: Optional[dict[str, Any]],
- logger: logging.Logger,
+ logger: logging.Logger = None,
+ excluded_dims: Iterable[str] | None = None
) -> dict[Hashable, Any]:
labels = labels if labels is not None else {}
+ excluded_dims = excluded_dims or []
new_labels = {}
# assuming last two dims are spatial: [..., y, x]
+ # and ignore specified dims to keep the log clean (see
+ # xcube.webapi.timeseries.routes.get_non_spatial_dimensions)
assert variable.ndim >= 2
- non_spatial_dims = variable.dims[0:-2]
+ non_spatial_dims = [
+ dim for dim in variable.dims[0:-2]
+ if dim not in excluded_dims
+ ]
if not non_spatial_dims:
# Ignore any extra labels passed.
return new_labels
@@ -697,6 +704,7 @@ def _get_non_spatial_labels(
dim_name = str(dim)
label = labels.get(dim_name)
+
if label is None:
if logger:
logger.debug(
diff --git a/xcube/webapi/statistics/controllers.py b/xcube/webapi/statistics/controllers.py
index 811fc1e1a..11bbd7d9b 100644
--- a/xcube/webapi/statistics/controllers.py
+++ b/xcube/webapi/statistics/controllers.py
@@ -32,13 +32,13 @@ def compute_statistics(
ds_id: str,
var_name: str,
geometry: Union[dict[str, Any], tuple[float, float]],
- time_label: str,
+ non_spatial_dimensions: dict[str, Any] = None,
trace_perf: bool = False,
):
measure_time = measure_time_cm(logger=LOG, disabled=not trace_perf)
with measure_time("Computing statistics"):
return _compute_statistics(
- ctx, ds_id, var_name, time_label, geometry, DEFAULT_BIN_COUNT
+ ctx, ds_id, var_name, geometry, DEFAULT_BIN_COUNT, non_spatial_dimensions
)
@@ -46,30 +46,17 @@ def _compute_statistics(
ctx: StatisticsContext,
ds_id: str,
var_name_or_assign: str,
- time_label: str,
geometry: Union[dict[str, Any], tuple[float, float]],
bin_count: int,
+ non_spatial_dimensions: dict[str, Any] = None,
):
ml_dataset = ctx.datasets_ctx.get_ml_dataset(ds_id)
dataset = ml_dataset.get_dataset(0)
grid_mapping = ml_dataset.grid_mapping
- dataset_contains_time = "time" in dataset
-
- if dataset_contains_time:
- if time_label is not None:
- try:
- time = np.array(time_label, dtype=dataset.time.dtype)
- dataset = dataset.sel(time=time, method="nearest")
- except (TypeError, ValueError) as e:
- raise ApiError.BadRequest("Invalid query parameter 'time'") from e
- else:
- raise ApiError.BadRequest("Missing query parameter 'time'")
- elif time_label is not None:
- raise ApiError.BadRequest(
- "Query parameter 'time' must not be given"
- " since dataset does not contain a 'time' dimension"
- )
+ if non_spatial_dimensions:
+ for dim_name, dim_value in non_spatial_dimensions.items():
+ dataset = dataset.sel({dim_name: dim_value}, method="nearest")
if isinstance(geometry, tuple):
compact_mode = True
diff --git a/xcube/webapi/statistics/routes.py b/xcube/webapi/statistics/routes.py
index 9208197d5..3dcc6d103 100644
--- a/xcube/webapi/statistics/routes.py
+++ b/xcube/webapi/statistics/routes.py
@@ -3,12 +3,19 @@
# https://opensource.org/licenses/MIT.
from xcube.server.api import ApiHandler
+from xcube.util.undefined import UNDEFINED
-from ...util.undefined import UNDEFINED
from ..datasets.routes import PATH_PARAM_DATASET_ID, PATH_PARAM_VAR_NAME
+
from .api import api
from .context import StatisticsContext
from .controllers import compute_statistics
+import logging
+from collections.abc import Hashable
+from typing import Any
+from xcube.core.tile import get_non_spatial_labels
+
+_logger = logging.getLogger(__name__)
QUERY_PARAM_X = {
"name": "lon",
@@ -54,7 +61,10 @@ class StatisticsHandler(ApiHandler[StatisticsContext]):
async def get(self, datasetId: str, varName: str):
lon = self.request.get_query_arg("lon", type=float, default=UNDEFINED)
lat = self.request.get_query_arg("lat", type=float, default=UNDEFINED)
- time = self.request.get_query_arg("time", type=str, default=None)
+ non_spatial_dimensions = get_non_spatial_dimensions(
+ self.ctx, self.request, datasetId, varName
+ )
+
trace_perf = self.request.get_query_arg(
"debug", default=self.ctx.datasets_ctx.trace_perf
)
@@ -65,7 +75,7 @@ async def get(self, datasetId: str, varName: str):
datasetId,
varName,
(lon, lat),
- time,
+ non_spatial_dimensions,
trace_perf,
)
await self.response.finish({"result": result})
@@ -89,7 +99,9 @@ async def get(self, datasetId: str, varName: str):
],
)
async def post(self, datasetId: str, varName: str):
- time = self.request.get_query_arg("time", type=str, default=None)
+ non_spatial_dimensions = get_non_spatial_dimensions(
+ self.ctx, self.request, datasetId, varName
+ )
trace_perf = self.request.get_query_arg(
"debug", default=self.ctx.datasets_ctx.trace_perf
)
@@ -100,7 +112,23 @@ async def post(self, datasetId: str, varName: str):
datasetId,
varName,
self.request.json,
- time,
+ non_spatial_dimensions,
trace_perf,
)
await self.response.finish({"result": result})
+
+
+def get_non_spatial_dimensions(ctx, request, ds_id, var) -> dict[Hashable, Any]:
+ ml_dataset = ctx.datasets_ctx.get_ml_dataset(ds_id)
+ ds = ml_dataset.get_dataset(0)
+ variable = ds[var]
+
+ variable_dims = variable.dims
+ dimensions = {}
+ for dim in variable_dims:
+ value = request.get_query_arg(str(dim), type=str, default=None)
+ if value is not None:
+ dimensions[str(dim)] = value
+
+ labels = get_non_spatial_labels(ds, variable, labels=dimensions, logger=_logger)
+ return labels
diff --git a/xcube/webapi/timeseries/controllers.py b/xcube/webapi/timeseries/controllers.py
index ef70e5f6c..a78a0d025 100644
--- a/xcube/webapi/timeseries/controllers.py
+++ b/xcube/webapi/timeseries/controllers.py
@@ -37,6 +37,7 @@ def get_time_series(
agg_methods: Union[str, Sequence[str]] = None,
start_date: Optional[np.datetime64] = None,
end_date: Optional[np.datetime64] = None,
+ non_spatial_dimensions: dict[str, Any] = None,
tolerance: Optional[float] = 1.0,
max_valids: Optional[int] = None,
incl_ancillary_vars: bool = False,
@@ -70,6 +71,7 @@ def get_time_series(
cover a spatial area.
start_date: An optional start date.
end_date: An optional end date.
+ non_spatial_dimensions: Values of non-spatial dimensions (e.g. time, depth).
tolerance: Time tolerance in seconds that expands the given time
range. Defaults to one second.
max_valids: Optional number of valid points. If it is None
@@ -99,6 +101,11 @@ def get_time_series(
# Check if var_name is an expression
var_name=var_name if "=" not in var_name else None,
)
+
+ if non_spatial_dimensions:
+ for dim_name, dim_value in non_spatial_dimensions.items():
+ dataset = dataset.sel({dim_name: dim_value}, method="nearest")
+
geo_json_geometries, is_collection = _to_geo_json_geometries(geo_json)
geometries = _to_shapely_geometries(geo_json_geometries)
diff --git a/xcube/webapi/timeseries/routes.py b/xcube/webapi/timeseries/routes.py
index 1ea6db983..421cdc4eb 100644
--- a/xcube/webapi/timeseries/routes.py
+++ b/xcube/webapi/timeseries/routes.py
@@ -7,9 +7,16 @@
from xcube.server.api import ApiHandler
from ..datasets import PATH_PARAM_DATASET_ID, PATH_PARAM_VAR_NAME
+
from .api import api
from .context import TimeSeriesContext
from .controllers import get_time_series
+import logging
+from collections.abc import Hashable
+from typing import Any
+from xcube.core.tile import get_non_spatial_labels
+
+_logger = logging.getLogger(__name__)
# noinspection PyPep8Naming
@@ -77,6 +84,10 @@ async def post(self, datasetId: str, varName: str):
end_date = self.request.get_query_arg(
"endDate", type=pd.Timestamp, default=None
)
+ non_spatial_dimensions = get_non_spatial_dimensions(
+ self.ctx, self.request, datasetId, varName
+ )
+
tolerance = self.request.get_query_arg("tolerance", type=float, default=1.0)
max_valids = self.request.get_query_arg("maxValids", type=int, default=None)
result = await self.ctx.run_in_executor(
@@ -89,8 +100,25 @@ async def post(self, datasetId: str, varName: str):
agg_methods,
start_date,
end_date,
+ non_spatial_dimensions,
tolerance,
max_valids,
)
self.response.set_header("Content-Type", "application/json")
await self.response.finish(dict(result=result))
+
+
+def get_non_spatial_dimensions(ctx, request, ds_id, var) -> dict[Hashable, Any]:
+ ml_dataset = ctx.datasets_ctx.get_ml_dataset(ds_id)
+ ds = ml_dataset.get_dataset(0)
+ variable = ds[var]
+
+ variable_dims = variable.dims
+ dimensions = {}
+ for dim in variable_dims:
+ value = request.get_query_arg(str(dim), type=str, default=None)
+ if value is not None:
+ dimensions[str(dim)] = value
+
+ labels = get_non_spatial_labels(ds, variable, labels=dimensions, logger=_logger, excluded_dims=["time"])
+ return labels