Skip to content

Commit 6cebf65

Browse files
authored
Make datacube optional by moving imports to function level (#146)
* Move datacube imports to function level * Fix missing import * Make sure geometry remains a tuple * Automatically update integration test validation results --------- Co-authored-by: robbibt <robbibt@users.noreply.github.com>
1 parent 7021214 commit 6cebf65

File tree

7 files changed

+75
-12
lines changed

7 files changed

+75
-12
lines changed

intertidal/composites.py

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,11 +2,9 @@
22
import sys
33

44
import click
5-
import datacube
65
import numpy as np
76
import odc.geo.xr
87
import xarray as xr
9-
from datacube.utils.aws import configure_s3_access
108
from dea_tools.dask import create_local_dask_cluster
119
from eo_tides.eo import pixel_tides
1210
from odc.algo import (
@@ -439,6 +437,18 @@ def tidal_composites_cli(
439437
aws_unsigned,
440438
overwrite,
441439
):
440+
# Attempt to import datacube and raise an error if not available
441+
try:
442+
import datacube
443+
from datacube.utils.aws import configure_s3_access
444+
except ImportError as e:
445+
msg = (
446+
"The DEA Tidal Composites CLI is configured for Australian applications, and "
447+
"requires `datacube`. Please install DEA Intertidal with the "
448+
"`[datacube]` extra, e.g.: `pip install dea-intertidal[datacube]`"
449+
)
450+
raise ImportError(msg) from e
451+
442452
# Create sample filename to test if data exists on file system
443453
filename = f"{output_dir}ga_s2_tidal_composites_cyear_3/{output_version.replace('.', '-')}/{study_area[:4]}/{study_area[4:]}/{label_date}--P1Y/ga_s2_tidal_composites_cyear_3_{study_area}_{label_date}--P1Y_final.stac-item.json"
444454

intertidal/elevation.py

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,12 +3,10 @@
33
from itertools import repeat
44

55
import click
6-
import datacube
76
import matplotlib.pyplot as plt
87
import numpy as np
98
import seaborn as sns
109
import xarray as xr
11-
from datacube.utils.aws import configure_s3_access
1210
from dea_tools.dask import create_local_dask_cluster
1311
from eo_tides.eo import pixel_tides
1412
from odc.algo import xr_quantile
@@ -1125,6 +1123,18 @@ def intertidal_cli(
11251123
exposure_offsets,
11261124
aws_unsigned,
11271125
):
1126+
# Attempt to import datacube and raise an error if not available
1127+
try:
1128+
import datacube
1129+
from datacube.utils.aws import configure_s3_access
1130+
except ImportError as e:
1131+
msg = (
1132+
"The DEA Intertidal CLI is configured for Australian applications, and "
1133+
"requires `datacube`. Please install DEA Intertidal with the "
1134+
"`[datacube]` extra, e.g.: `pip install dea-intertidal[datacube]`"
1135+
)
1136+
raise ImportError(msg) from e
1137+
11281138
# Create a unique run ID for analysis based on input params and use
11291139
# for logs
11301140
input_params = locals()

intertidal/exposure.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44
import numpy as np
55
import pandas as pd
66
import pytz
7-
import sunriset
87
import xarray as xr
98
from eo_tides.eo import _pixel_tides_resample, pixel_tides
109

@@ -38,6 +37,9 @@ def temporal_filters(x, time_range, dem):
3837
timesteps.
3938
4039
"""
40+
# Import sunriset here to avoid dependency issues if function not used
41+
import sunriset
42+
4143
if x == "dry":
4244
return time_range.drop(
4345
time_range[

intertidal/io.py

Lines changed: 46 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -11,8 +11,6 @@
1111
import numpy as np
1212
import odc.geo.xr
1313
import xarray as xr
14-
from datacube.utils.geometry import Geometry as Geometry_datacube18
15-
from datacube.utils.masking import mask_invalid_data
1614
from dea_tools.coastal import glint_angle
1715
from eo_tides.stats import tide_stats
1816
from eodatasets3 import DatasetAssembler, serialise
@@ -116,8 +114,14 @@ def extract_geobox(
116114
the CRS, resolution, shape and extent of the study area).
117115
118116
"""
119-
# List of valid input geometry types (from `odc-geo` or `datacube-core`)
120-
GEOM_TYPES = (odc.geo.geom.Geometry, Geometry_datacube18)
117+
# List of valid input geometry types (from `odc-geo` or `datacube`).
118+
# If `datacube` is not installed, only support `odc-geo` geometries
119+
try:
120+
from datacube.utils.geometry import Geometry as Geometry_datacube18
121+
122+
geom_types = (odc.geo.geom.Geometry, Geometry_datacube18)
123+
except ImportError:
124+
geom_types = (odc.geo.geom.Geometry,)
121125

122126
# Either `study_area` or `geom` must be provided
123127
if study_area is None and geom is None:
@@ -126,11 +130,11 @@ def extract_geobox(
126130
)
127131

128132
# If custom geom is provided, verify it is a geometry
129-
if geom is not None and not isinstance(geom, GEOM_TYPES):
133+
if geom is not None and not isinstance(geom, geom_types):
130134
raise ValueError("Unsupported input type for `geom`; please provide a datacube Geometry object.")
131135

132136
# Otherwise, extract GeoBox from geometry
133-
if geom is not None and isinstance(geom, GEOM_TYPES):
137+
if geom is not None and isinstance(geom, geom_types):
134138
geobox = GeoBox.from_geopolygon(geom, crs=crs, resolution=resolution)
135139

136140
# If no custom geom provided, load tile from GridSpec tile grid
@@ -253,6 +257,18 @@ def load_data(
253257
to generate ODC lineage metadata for DEA Intertidal)
254258
255259
"""
260+
# Attempt to import datacube and raise an error if not available
261+
try:
262+
from datacube.utils.masking import mask_invalid_data
263+
except ImportError as e:
264+
msg = (
265+
"The `load_data` function requires `datacube` to be installed. "
266+
"Please consider loading data with `odc-stac` instead, or install "
267+
"DEA Intertidal with the `[datacube]` extra, e.g.: `pip install "
268+
"dea-intertidal[datacube]`"
269+
)
270+
raise ImportError(msg) from e
271+
256272
# Set spectral bands to load
257273
s2_spectral_bands = [
258274
"nbart_blue",
@@ -516,6 +532,18 @@ def load_topobathy_mask(
516532
following analysis.
517533
518534
"""
535+
# Attempt to import datacube and raise an error if not available
536+
try:
537+
from datacube.utils.masking import mask_invalid_data
538+
except ImportError as e:
539+
msg = (
540+
"The `load_topobathy_mask` function requires `datacube` to be installed. "
541+
"Please consider loading data with `odc-stac` instead, or install "
542+
"DEA Intertidal with the `[datacube]` extra, e.g.: `pip install "
543+
"dea-intertidal[datacube]`"
544+
)
545+
raise ImportError(msg) from e
546+
519547
# Load from datacube, reprojecting to GeoBox of input satellite data
520548
topobathy_ds = dc.load(product=product, like=geobox, resampling=resampling).squeeze("time")
521549

@@ -572,6 +600,18 @@ def load_aclum_mask(
572600
False equals all other classes.
573601
574602
"""
603+
# Attempt to import datacube and raise an error if not available
604+
try:
605+
from datacube.utils.masking import mask_invalid_data
606+
except ImportError as e:
607+
msg = (
608+
"The `load_aclum_mask` function requires `datacube` to be installed. "
609+
"Please consider loading data with `odc-stac` instead, or install "
610+
"DEA Intertidal with the `[datacube]` extra, e.g.: `pip install "
611+
"dea-intertidal[datacube]`"
612+
)
613+
raise ImportError(msg) from e
614+
575615
try:
576616
# Load from datacube, reprojecting to GeoBox of input satellite data
577617
aclum_ds = dc.load(product=product, like=geobox, resampling=resampling).squeeze("time")

tests/README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ Integration tests
1010
1111
This directory contains tests that are run to verify that DEA Intertidal code runs correctly. The ``test_intertidal.py`` file runs a small-scale full workflow analysis over an intertidal flat in the Gulf of Carpentaria using the DEA Intertidal [Command Line Interface (CLI) tools](../notebooks/Intertidal_CLI.ipynb), and compares these results against a LiDAR validation DEM to produce some simple accuracy metrics.
1212

13-
The latest integration test completed at **2025-09-24 16:17**. Compared to the previous run, it had an:
13+
The latest integration test completed at **2025-09-24 17:19**. Compared to the previous run, it had an:
1414
- RMSE accuracy of **0.14 m ( :heavy_minus_sign: no change)**
1515
- MAE accuracy of **0.12 m ( :heavy_minus_sign: no change)**
1616
- Bias of **0.12 m ( :heavy_minus_sign: no change)**

tests/validation.csv

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -131,3 +131,4 @@ time,Correlation,RMSE,MAE,R-squared,Bias,Regression slope
131131
2025-09-24 05:21:37.998155+00:00,0.975,0.145,0.122,0.95,0.116,1.121
132132
2025-09-24 06:00:07.113646+00:00,0.975,0.145,0.122,0.95,0.116,1.121
133133
2025-09-24 06:17:15.044540+00:00,0.975,0.145,0.122,0.95,0.116,1.121
134+
2025-09-24 07:19:12.704574+00:00,0.975,0.145,0.122,0.95,0.116,1.121

tests/validation.jpg

41 Bytes
Loading

0 commit comments

Comments
 (0)