Skip to content

Bug in Morton encoding query #2162

@erikvansebille

Description

@erikvansebille

Parcels version

v4-dev

Description

The code below leads to the following error. @fluidnumerics-joe, do you have any idea what's going on?

Code sample

import xarray as xr
import numpy as np
import parcels
from glob import glob
DATA_ROOT = "/Users/erik/Desktop/Parcelsv4_test/MOi"

fileU = f"{DATA_ROOT}/GLO12/psy4v3r1-daily_U_2010-01-0[1-3].nc"
filenames = {"U": glob(fileU), "V": glob(fileU.replace("_U_", "_V_")), "W": glob(fileU.replace("_U_", "_W_"))}
mesh_mask = f"{DATA_ROOT}/domain_ORCA0083-N006/PSY4V3R1_mesh_hgr.nc"

fileargs = {"concat_dim": "time_counter",
    "combine": "nested",
    "data_vars": 'minimal',
    "coords": 'minimal',
    "compat": 'override',
    "chunks": {"time_counter": 1, "depth":2, "y": 64, "x": 64}
}

ds_u = xr.open_mfdataset(filenames["U"], **fileargs)[["vozocrtx"]].drop_vars(["nav_lon", "nav_lat"])
ds_v = xr.open_mfdataset(filenames["V"], **fileargs)[["vomecrty"]].drop_vars(["nav_lon", "nav_lat"])
ds_depth = xr.open_mfdataset(filenames["W"], **fileargs)[["depthw"]]
ds_mesh = xr.open_dataset(mesh_mask)[["glamf", "gphif"]].isel(t=0)

ds = xr.merge([ds_u, ds_v, ds_depth, ds_mesh], compat="identical").rename({"vozocrtx": "U", "vomecrty": "V"}).rename({"glamf": "lon", "gphif": "lat", "time_counter": "time", "depthw": "depth"})

coords={
    "X": {"left": "x"},
    "Y": {"left": "y"},
    "T": {"center": "time"},
}

ds = ds.isel(depth=0, deptht=0)
print(ds)

xgcm_grid = parcels.xgcm.Grid(ds, coords=coords, periodic=False)
grid = parcels.xgrid.XGrid(xgcm_grid)

U = parcels.Field("U", ds["U"], grid, interp_method=parcels.XLinear)
V = parcels.Field("V", ds["V"], grid, interp_method=parcels.XLinear)
U.units = parcels.GeographicPolar()
V.units = parcels.Geographic()
UV = parcels.VectorField("UV", U, V)

fieldset = parcels.FieldSet([U, V, UV])

lon = -70
lat = -70

pset = parcels.ParticleSet(fieldset=fieldset, lon=lon, lat=lat)

runtime = np.timedelta64(2, "D")
dt = np.timedelta64(15, "m")

pset.execute(parcels.AdvectionEE, runtime=runtime, dt=dt, verbose_progress=False)
Traceback (most recent call last):
  File "/Users/erik/Library/CloudStorage/OneDrive-UniversiteitUtrecht/ParcelsSnippets/test_mortonhashing_bug.py", line 55, in <module>
    pset.execute(parcels.AdvectionEE, runtime=runtime, dt=dt, verbose_progress=False)
  File "/Users/erik/Codes/ParcelsCode/parcels/particleset.py", line 583, in execute
    self._kernel.execute(self, endtime=next_time, dt=dt)
  File "/Users/erik/Codes/ParcelsCode/parcels/kernel.py", line 250, in execute
    f(pset[evaluate_particles], self._fieldset, None)
  File "/Users/erik/Codes/ParcelsCode/parcels/application_kernels/advection.py", line 103, in AdvectionEE
    (u1, v1) = fieldset.UV[particle]
               ~~~~~~~~~~~^^^^^^^^^^
  File "/Users/erik/Codes/ParcelsCode/parcels/field.py", line 340, in __getitem__
    return self.eval(key.time, key.depth, key.lat, key.lon, key)
           ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/Users/erik/Codes/ParcelsCode/parcels/field.py", line 315, in eval
    position = self.grid.search(z, y, x, ei=_ei)
               ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/Users/erik/Codes/ParcelsCode/parcels/xgrid.py", line 298, in search
    yi, eta, xi, xsi = _search_indices_curvilinear_2d(self, y, x, yi, xi)
                       ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/Users/erik/Codes/ParcelsCode/parcels/_index_search.py", line 258, in _search_indices_curvilinear_2d
    yi, xi = grid.get_spatial_hash().query(y, x)
             ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "/Users/erik/Codes/ParcelsCode/parcels/spatialhash.py", line 258, in query
    j_best[has_hits] = j[src_best[has_hits]]
                       ~^^^^^^^^^^^^^^^^^^^^
IndexError: index 9223372036854775807 is out of bounds for axis 0 with size 13213618

Metadata

Metadata

Labels

Type

No type

Projects

Status

Done

Status

Done

Milestone

No milestone

Relationships

None yet

Development

No branches or pull requests

Issue actions