Skip to content

Commit e291d96

Browse files
authored
Update README.md Use reducer process
1 parent 48afa62 commit e291d96

File tree

1 file changed

+16
-30
lines changed

1 file changed

+16
-30
lines changed

README.md

Lines changed: 16 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -27,42 +27,28 @@ rqatrend(y, 0.5, 10, 1)
2727
Use in openEO:
2828

2929
```python
30+
# Import required packages
3031
import openeo
31-
connection = openeo.connect("https://openeo.cloud")
32+
from openeo.processes import process
3233

33-
cube_in = connection.load_collection(
34-
"SENTINEL1_SIG0_20M",
35-
spatial_extent={"west": 16.06, "south": 48.06, "east": 16.67, "north": 48.07},
36-
temporal_extent=["2023-01-01", "2024-01-01"],
37-
bands=["VV"]
38-
)
39-
40-
rqatrend = openeo.UDF(
41-
"""
42-
# /// script
43-
# dependencies = [
44-
# "xarray",
45-
# "rqadeforestation @ git+https://github.com/EarthyScience/RQADeforestation.py",
46-
# ]
47-
# ///
48-
49-
import xarray as xr
50-
from rqadeforestation import rqatrend
34+
# Connect to the back-end
35+
connection = openeo.connect("https://openeo.eodc.eu/openeo/1.2.0/")
36+
connection.authenticate_oidc()
5137

52-
def apply_datacube(cube: xr.DataArray, context: dict) -> xr.DataArray:
53-
res_np = rqatrend(cube.to_numpy(), 0.5, 10, 1)
54-
res_xr = xr.DataArray(res_np)
55-
return res_xr
56-
"""
38+
bbox = {"west": 11.655947222212369, "east": 11.715643117926051, "south": 50.87929082462556, "north": 50.92129080534822}
39+
datacube1 = connection.load_collection(collection_id = "SENTINEL1_SIG0_20M", spatial_extent = bbox,
40+
temporal_extent = ["2020-01-01T00:00:00Z", "2021-01-01T00:00:00Z"], bands = None, properties = {}
5741
)
5842

59-
cube_out = cube_in.apply(process=rqatrend)
60-
result = cube_out.save_result("GTiff")
43+
def reducer1(data, context):
44+
rqadeforestation1 = process("rqadeforestation", data = data, threshold = 0.4)
45+
return rqadeforestation1
6146

62-
connection.authenticate_oidc()
63-
job = result.create_job()
64-
job.start_and_wait()
65-
job.get_results().download_files("output")
47+
reduce3 = datacube1.reduce_dimension(reducer = reducer1, dimension = "t")
48+
save4 = reduce3.save_result(format = "NETCDF")
49+
50+
# The process can be executed synchronously (see below), as batch job or as web service now
51+
result = connection.execute(save4)
6652
```
6753

6854
## Motivation

0 commit comments

Comments
 (0)