-
Notifications
You must be signed in to change notification settings - Fork 1
Cube generation issue with new netcdf4 library
Hans Permana edited this page Sep 26, 2018
·
3 revisions
An issue was discovered when reading cube data generated with netcdf4 version using cablab cube access API.
---------------------------------------------------------------------------
OSError Traceback (most recent call last)
<ipython-input-10-735772665256> in <module>()
----> 1 ESDC = cube.data.dataset()
2 ESDC
C:\Miniconda3\envs\esdc\lib\site-packages\cablab_core-0.2.2-py3.5.egg\cablab\cube_access.py in dataset(self, key)
145 for i in indices:
146 key = self._cube_var_list[i]
--> 147 dataset = self._get_or_open_dataset(key)
148 # data_arrays[key.name] = dataset.variables[key.name]
149 data_arrays = xr.merge([data_arrays, dataset])
C:\Miniconda3\envs\esdc\lib\site-packages\cablab_core-0.2.2-py3.5.egg\cablab\cube_access.py in _get_or_open_dataset(self, cube_var)
300 def _get_or_open_dataset(self, cube_var):
301 if not cube_var.dataset:
--> 302 self._open_dataset(cube_var)
303 return cube_var.dataset
304
C:\Miniconda3\envs\esdc\lib\site-packages\cablab_core-0.2.2-py3.5.egg\cablab\cube_access.py in _open_dataset(self, variable)
308 concat_dim='time',
309 preprocess=self._preprocess_dataset,
--> 310 engine='h5netcdf')
311
312 def _preprocess_dataset(self, ds: Dataset):
C:\Miniconda3\envs\esdc\lib\site-packages\xarray\backends\api.py in open_mfdataset(paths, chunks, concat_dim, compat, preprocess, engine, lock, **kwargs)
503 lock = _default_lock(paths[0], engine)
504 datasets = [open_dataset(p, engine=engine, chunks=chunks or {}, lock=lock,
--> 505 **kwargs) for p in paths]
506 file_objs = [ds._file_obj for ds in datasets]
507
C:\Miniconda3\envs\esdc\lib\site-packages\xarray\backends\api.py in <listcomp>(.0)
503 lock = _default_lock(paths[0], engine)
504 datasets = [open_dataset(p, engine=engine, chunks=chunks or {}, lock=lock,
--> 505 **kwargs) for p in paths]
506 file_objs = [ds._file_obj for ds in datasets]
507
C:\Miniconda3\envs\esdc\lib\site-packages\xarray\backends\api.py in open_dataset(filename_or_obj, group, decode_cf, mask_and_scale, decode_times, autoclose, concat_characters, decode_coords, engine, chunks, lock, cache, drop_variables)
299 lock = _default_lock(filename_or_obj, engine)
300 with close_on_error(store):
--> 301 return maybe_decode_store(store, lock)
302 else:
303 if engine is not None and engine != 'scipy':
C:\Miniconda3\envs\esdc\lib\site-packages\xarray\backends\api.py in maybe_decode_store(store, lock)
223 store, mask_and_scale=mask_and_scale, decode_times=decode_times,
224 concat_characters=concat_characters, decode_coords=decode_coords,
--> 225 drop_variables=drop_variables)
226
227 _protect_dataset_variables_inplace(ds, cache)
C:\Miniconda3\envs\esdc\lib\site-packages\xarray\conventions.py in decode_cf(obj, concat_characters, mask_and_scale, decode_times, decode_coords, drop_variables)
944 encoding = obj.encoding
945 elif isinstance(obj, AbstractDataStore):
--> 946 vars, attrs = obj.load()
947 extra_coords = set()
948 file_obj = obj
C:\Miniconda3\envs\esdc\lib\site-packages\xarray\backends\common.py in load(self)
120 """
121 variables = FrozenOrderedDict((_decode_variable_name(k), v)
--> 122 for k, v in self.get_variables().items())
123 attributes = FrozenOrderedDict(self.get_attrs())
124 return variables, attributes
C:\Miniconda3\envs\esdc\lib\site-packages\xarray\backends\h5netcdf_.py in get_variables(self)
96 with self.ensure_open(autoclose=False):
97 return FrozenOrderedDict((k, self.open_store_variable(k, v))
---> 98 for k, v in iteritems(self.ds.variables))
99
100 def get_attrs(self):
C:\Miniconda3\envs\esdc\lib\site-packages\xarray\core\utils.py in FrozenOrderedDict(*args, **kwargs)
304
305 def FrozenOrderedDict(*args, **kwargs):
--> 306 return Frozen(OrderedDict(*args, **kwargs))
307
308
python3/cyordereddict/_cyordereddict.pyx in cyordereddict._cyordereddict.OrderedDict.__init__ (python3\cyordereddict\_cyordereddict.c:2295)()
C:\Miniconda3\envs\esdc\lib\_collections_abc.py in update(*args, **kwds)
776 self[key] = other[key]
777 else:
--> 778 for key, value in other:
779 self[key] = value
780 for key, value in kwds.items():
C:\Miniconda3\envs\esdc\lib\site-packages\xarray\backends\h5netcdf_.py in <genexpr>(.0)
96 with self.ensure_open(autoclose=False):
97 return FrozenOrderedDict((k, self.open_store_variable(k, v))
---> 98 for k, v in iteritems(self.ds.variables))
99
100 def get_attrs(self):
C:\Miniconda3\envs\esdc\lib\site-packages\xarray\backends\h5netcdf_.py in open_store_variable(self, name, var)
79 data = indexing.LazilyIndexedArray(
80 H5NetCDFArrayWrapper(name, self))
---> 81 attrs = _read_attributes(var)
82
83 # netCDF4 specific encoding
C:\Miniconda3\envs\esdc\lib\site-packages\xarray\backends\h5netcdf_.py in _read_attributes(h5netcdf_var)
34 attrs = OrderedDict()
35 for k in h5netcdf_var.ncattrs():
---> 36 v = h5netcdf_var.getncattr(k)
37 if k not in ['_FillValue', 'missing_value']:
38 v = maybe_decode_bytes(v)
C:\Miniconda3\envs\esdc\lib\site-packages\h5netcdf\legacyapi.py in getncattr(self, name)
6
7 def getncattr(self, name):
----> 8 return self.attrs[name]
9
10 def setncattr(self, name, value):
C:\Miniconda3\envs\esdc\lib\site-packages\h5netcdf\attrs.py in __getitem__(self, key)
14 if key in _hidden_attrs:
15 raise KeyError(key)
---> 16 return self._h5attrs[key]
17
18 def __setitem__(self, key, value):
h5py\_objects.pyx in h5py._objects.with_phil.wrapper()
h5py\_objects.pyx in h5py._objects.with_phil.wrapper()
C:\Miniconda3\envs\esdc\lib\site-packages\h5py\_hl\attrs.py in __getitem__(self, name)
79
80 arr = numpy.ndarray(shape, dtype=dtype, order='C')
---> 81 attr.read(arr, mtype=htype)
82
83 if len(arr.shape) == 0:
h5py\_objects.pyx in h5py._objects.with_phil.wrapper()
h5py\_objects.pyx in h5py._objects.with_phil.wrapper()
h5py\h5a.pyx in h5py.h5a.AttrID.read()
h5py\_proxy.pyx in h5py._proxy.attr_rw()
OSError: Unable to read attribute (no appropriate function for conversion path)
The error above occurred when executing
from esdl import Cube
cube = Cube.open("/path/to/cube)
cube.data.dataset()
According to this issue and this issue, it is caused by a global attribute with a certain type that is created by the new netcdf4 library, that is not interpreted correctly by the h5py reader.
Use netcdf4 1.2.2 to generate the cube.