Hi all!
I want to stack filter, e.g. using openeo.processes.product
to reduce
multiple DataCubes with (I think) boolean values, as I used some logical operators as openeo.processes.gt
to create the masks.
Currently, reducing using product is not support (I see that in the error message). Is there another way to do this? Or is this a type error that I am having?
Error:
OpenEoApiError: [500] Internal: Failed to process synchronously on backend vito: OpenEoApiError("[501] FeatureUnsupported: Reducer 'product' not supported")
Code:
from openeo import connect, Connection
from openeo.rest.datacube import DataCube
from openeo.processes import count, product, gt
from typing import Dict, Union, List
import pathlib
vito_url: str = "https://openeo.vito.be/openeo/1.0"
con: Connection = connect("openeo.cloud")
con.authenticate_oidc(provider_id="egi")
out_dir = pathlib.Path("output")
out_dir.mkdir(parents=True, exist_ok=True)
denia_harbour_bbox: Dict[str, Union[float, str]] = {"west": 0.10594089795383788, "east": 0.12937267590793944, "south": 38.83464299556706, "north": 38.85035302841166, "crs": "EPSG:4326"}
temporal_extent: List[str] = ["2021-01-01", "2021-04-01"]
collection = ("TERRASCOPE_S2_TOC_V2", ["B06", "B05", "B03"])
band_names: str = ["swir1", "nir", "green"]
dc: DataCube = con.load_collection(
collection_id=collection[0],
spatial_extent=denia_harbour_bbox,
temporal_extent=temporal_extent,
bands=collection[1]
).add_dimension(name="source_name", label=collection[0], type="other") \
.rename_labels(dimension="bands", source=collection[1], target=band_names)
# Create bucketed DC based on percentile of images
t_bucketed_dc: DataCube = dc \
.aggregate_temporal(
intervals=t_intervals,
reducer=lambda data: quantiles(data, probabilities=[percentile]),
labels=[t_int[0] for t_int in t_intervals]
)
dr: pd.DatetimeIndex = pd.date_range(start=temporal_extent[0], end=temporal_extent[1], freq=f"2MS")
t_intervals = [[str(d), str(dr[i+1])] for i, d in enumerate(dr[:-1])]
count_dc: DataCube = dc.band("green") \
.multiply(0).add(1) \
.aggregate_temporal(
intervals=t_intervals,
reducer=lambda data: sum(data),
labels=[t_int[0] for t_int in t_intervals]
)
mask: DataCube = count_dc.apply(lambda val: lt(x=val, y=3))
# Try to reduce over time to filter for missing images at any timestep
mask_no_t = mask.reduce_dimension(dimension="t", reducer=product)
dc = t_bucketed_dc.mask(mask_no_t)
dc.download(out_dir / "test2.nc", format="netcdf")