Hey,
yes, it is possible, but not with aggregate_temporal_period yet. Unfortunately, aggergate_temporal_period doesn’t support the yearly period yet, see also the corresponding issue: aggregate_temporal_period: No support for a period of type: year · Issue #145 · Open-EO/openeo-geopyspark-driver · GitHub
You can fall back to aggregate_temporal though. Here’s an example in R that uses pre-computed NDVI values:
p = processes()
datacube1 = p$load_collection(
id = "CGLS_NDVI_V3_GLOBAL",
spatial_extent = list("east" = 7.763041605582233, "north" = 52.10387582302508, "south" = 51.82970111402692, "west" = 7.498213761663553),
temporal_extent = list("2016-01-01T00:00:00Z", "2022-01-01T00:00:00Z"))
datacube2 = p$drop_dimension(data = datacube1, name = "bands")
reducer1 = function(data) {
return(p$mean(data = data))
}
datacube5 = p$aggregate_temporal(
data = datacube2,
intervals = list(list("2016-01-01T00:00:00Z", "2017-01-01T00:00:00Z"), list("2017-01-01T00:00:00Z", "2018-01-01T00:00:00Z"), list("2018-01-01T00:00:00Z", "2019-01-01T00:00:00Z"), list("2019-01-01T00:00:00Z", "2020-01-01T00:00:00Z"), list("2020-01-01T00:00:00Z", "2021-01-01T00:00:00Z"), list("2021-01-01T00:00:00Z", "2022-01-01T00:00:00Z")),
labels = list(2016, 2017, 2018, 2019, 2020, 2021),
reducer = reducer1)
datacube4 = p$save_result(data = datacube5, format = "GTIFF")
or as process:
{
"process_graph": {
"1": {
"arguments": {
"bands": null,
"id": "CGLS_NDVI_V3_GLOBAL",
"spatial_extent": {
"east": 7.763041605582233,
"north": 52.10387582302508,
"south": 51.82970111402692,
"west": 7.498213761663553
},
"temporal_extent": [
"2016-01-01T00:00:00Z",
"2022-01-01T00:00:00Z"
]
},
"process_id": "load_collection"
},
"2": {
"arguments": {
"data": {
"from_node": "1"
},
"name": "bands"
},
"process_id": "drop_dimension"
},
"4": {
"arguments": {
"data": {
"from_node": "5"
},
"format": "GTIFF"
},
"process_id": "save_result",
"result": true
},
"5": {
"arguments": {
"data": {
"from_node": "2"
},
"dimension": null,
"intervals": [
[
"2016-01-01T00:00:00Z",
"2017-01-01T00:00:00Z"
],
[
"2017-01-01T00:00:00Z",
"2018-01-01T00:00:00Z"
],
[
"2018-01-01T00:00:00Z",
"2019-01-01T00:00:00Z"
],
[
"2019-01-01T00:00:00Z",
"2020-01-01T00:00:00Z"
],
[
"2020-01-01T00:00:00Z",
"2021-01-01T00:00:00Z"
],
[
"2021-01-01T00:00:00Z",
"2022-01-01T00:00:00Z"
]
],
"labels": [
2016,
2017,
2018,
2019,
2020,
2021
],
"reducer": {
"process_graph": {
"1": {
"arguments": {
"data": {
"from_parameter": "data"
}
},
"process_id": "mean",
"result": true
}
}
}
},
"process_id": "aggregate_temporal"
}
}
}
Hope it helps. Let me know if there are additional questions.