Working with data in HydroMT#
[1]:
import os
import sys
import matplotlib.pyplot as plt
To obtain rasterdata, we can use a HydroMT DataCatalog. There are several pre-defined DataCatalogs:
artifact_data : Piave basin in Northern Italy (example data)
deltares_data : global datasets hosted on the p-drive
opendap_data (in progress …) : global tiled datasets hosted on the opendap server
[2]:
import hydromt
data_catalog = hydromt.DataCatalog(data_libs=["artifact_data"])
ds = data_catalog.get_rasterdataset("merit_hydro")
# merit hydro has multiple variables; select one
ds["elevtn"]
[2]:
<xarray.DataArray 'elevtn' (y: 1920, x: 1680)> Size: 13MB
dask.array<getitem, shape=(1920, 1680), dtype=float32, chunksize=(1920, 1680), chunktype=numpy.ndarray>
Coordinates:
* x (x) float64 13kB 11.6 11.6 11.6 11.6 ... 13.0 13.0 13.0 13.0
* y (y) float64 15kB 46.8 46.8 46.8 46.8 ... 45.2 45.2 45.2 45.2
spatial_ref int64 8B 0
Attributes:
AREA_OR_POINT: Area
_FillValue: -9999.0
source_file: elevtn.tifOften we don’t need the entire extent of certain datasets, but only part of it#
[3]:
# each model can be initialized with a data_catalog
# this works similar to the data_catalog above but now we use
from hydromt_sfincs import SfincsModel
# Initialize SfincsModel with the artifact data catalog which contains data for North Italy
sf = SfincsModel(data_libs=["deltares_data"], root="tmp_example")
inp_dict = {
"x0": 268650,
"y0": 5018550,
"dx": 200.0,
"dy": 200.0,
"nmax": 272,
"mmax": 425,
"rotation": 0,
"epsg": 32633,
}
# create grid
sf.setup_grid(**inp_dict)
Downloading file 'v0.7.0/data_catalog.yml' from 'https://raw.githubusercontent.com/Deltares/hydromt/main/data/catalogs/deltares_data/v0.7.0/data_catalog.yml' to '/home/runner/.hydromt_data/deltares_data'.
[4]:
# since we already know where our model is, we minimize the amount of data that is read-in by specifying the region:
da_dep1 = sf.data_catalog.get_rasterdataset(
"merit_hydro", variables=["elevtn"], geom=sf.region, buffer=5
)
da_dep2 = sf.data_catalog.get_rasterdataset(
"gebco", variables=["elevtn"], geom=sf.region, buffer=5
)
---------------------------------------------------------------------------
FileNotFoundError Traceback (most recent call last)
Cell In[4], line 3
1 # since we already know where our model is, we minimize the amount of data that is read-in by specifying the region:
----> 3 da_dep1 = sf.data_catalog.get_rasterdataset(
4 "merit_hydro", variables=["elevtn"], geom=sf.region, buffer=5
5 )
7 da_dep2 = sf.data_catalog.get_rasterdataset(
8 "gebco", variables=["elevtn"], geom=sf.region, buffer=5
9 )
File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/hydromt/data_catalog.py:1293, in DataCatalog.get_rasterdataset(self, data_like, bbox, geom, zoom_level, buffer, handle_nodata, align, variables, time_tuple, single_var_as_array, provider, version, **kwargs)
1290 else:
1291 raise ValueError(f'Unknown raster data type "{type(data_like).__name__}"')
-> 1293 obj = source.get_data(
1294 bbox=bbox,
1295 geom=geom,
1296 buffer=buffer,
1297 zoom_level=zoom_level,
1298 align=align,
1299 variables=variables,
1300 time_tuple=time_tuple,
1301 single_var_as_array=single_var_as_array,
1302 cache_root=self._cache_dir if self.cache else None,
1303 handle_nodata=handle_nodata,
1304 logger=self.logger,
1305 )
1306 return obj
File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/hydromt/data_adapter/rasterdataset.py:305, in RasterDatasetAdapter.get_data(self, bbox, geom, buffer, zoom_level, align, variables, time_tuple, handle_nodata, single_var_as_array, cache_root, logger)
302 try:
303 # load data
304 variables = list([variables]) if isinstance(variables, str) else variables
--> 305 fns = self._resolve_paths(
306 time_tuple, variables, zoom_level, geom, bbox, logger
307 )
308 self.mark_as_used() # mark used
309 ds = self._read_data(
310 fns,
311 geom,
(...) 316 logger=logger,
317 )
File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/hydromt/data_adapter/rasterdataset.py:366, in RasterDatasetAdapter._resolve_paths(self, time_tuple, variables, zoom_level, geom, bbox, logger)
363 zoom_level = self._parse_zoom_level(zoom_level, geom, bbox, logger=logger)
365 # resolve path based on time, zoom level and/or variables
--> 366 fns = super()._resolve_paths(
367 time_tuple=time_tuple,
368 variables=variables,
369 zoom_level=zoom_level,
370 )
371 return fns
File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/hydromt/data_adapter/data_adapter.py:413, in DataAdapter._resolve_paths(self, time_tuple, variables, zoom_level)
410 fns_out.append(UPath(fn, **self.storage_options))
412 if len(fns_out) == 0:
--> 413 raise FileNotFoundError(f"No such file found: {path}{postfix}")
415 return fns_out
FileNotFoundError: No such file found: /home/runner/work/hydromt_sfincs/hydromt_sfincs/docs/_examples/p:/wflow_global/hydromt/topography/merit_hydro/*.vrt
Intermezzo: We can also download the data to a local folder#
[5]:
# List of data sources to export
source_list = ["fabdem", "gebco"]
# Geographic extent
bbox = sf.region.to_crs(4326).total_bounds
folder_name = "tmp_data_export"
sf.data_catalog.export_data(
data_root=folder_name,
bbox=bbox,
source_names=source_list,
meta={"version": "1"},
)
Local data can be added to the model as well#
[6]:
# Please specify the local geotiff you want to use:
localtiff = r"./tmp_data_export/fabdem.tif"
[7]:
# The first option that exist is openning raster data with for example xarray:
import xarray as xr
with xr.open_dataset(localtiff, engine="rasterio") as ds:
print(ds)
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/xarray/backends/file_manager.py:211, in CachingFileManager._acquire_with_cache_info(self, needs_lock)
210 try:
--> 211 file = self._cache[self._key]
212 except KeyError:
File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/xarray/backends/lru_cache.py:56, in LRUCache.__getitem__(self, key)
55 with self._lock:
---> 56 value = self._cache[key]
57 self._cache.move_to_end(key)
KeyError: [<function open at 0x7fec89aca700>, ('./tmp_data_export/fabdem.tif',), 'r', (('sharing', False),), 'b91e2546-0d1c-4d14-b8bf-284b76227ebb']
During handling of the above exception, another exception occurred:
CPLE_OpenFailedError Traceback (most recent call last)
File rasterio/_base.pyx:310, in rasterio._base.DatasetBase.__init__()
File rasterio/_base.pyx:221, in rasterio._base.open_dataset()
File rasterio/_err.pyx:359, in rasterio._err.exc_wrap_pointer()
CPLE_OpenFailedError: ./tmp_data_export/fabdem.tif: No such file or directory
During handling of the above exception, another exception occurred:
RasterioIOError Traceback (most recent call last)
Cell In[7], line 4
1 # The first option that exist is openning raster data with for example xarray:
2 import xarray as xr
----> 4 with xr.open_dataset(localtiff, engine="rasterio") as ds:
5 print(ds)
File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/xarray/backends/api.py:715, in open_dataset(filename_or_obj, engine, chunks, cache, decode_cf, mask_and_scale, decode_times, decode_timedelta, use_cftime, concat_characters, decode_coords, drop_variables, create_default_indexes, inline_array, chunked_array_type, from_array_kwargs, backend_kwargs, **kwargs)
703 decoders = _resolve_decoders_kwargs(
704 decode_cf,
705 open_backend_dataset_parameters=backend.open_dataset_parameters,
(...) 711 decode_coords=decode_coords,
712 )
714 overwrite_encoded_chunks = kwargs.pop("overwrite_encoded_chunks", None)
--> 715 backend_ds = backend.open_dataset(
716 filename_or_obj,
717 drop_variables=drop_variables,
718 **decoders,
719 **kwargs,
720 )
721 ds = _dataset_from_backend_dataset(
722 backend_ds,
723 filename_or_obj,
(...) 734 **kwargs,
735 )
736 return ds
File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/rioxarray/xarray_plugin.py:59, in RasterioBackend.open_dataset(self, filename_or_obj, drop_variables, parse_coordinates, lock, masked, mask_and_scale, variable, group, default_name, decode_coords, decode_times, decode_timedelta, band_as_variable, open_kwargs)
57 if open_kwargs is None:
58 open_kwargs = {}
---> 59 rds = _io.open_rasterio(
60 filename_or_obj,
61 parse_coordinates=parse_coordinates,
62 cache=False,
63 lock=lock,
64 masked=masked,
65 mask_and_scale=mask_and_scale,
66 variable=variable,
67 group=group,
68 default_name=default_name,
69 decode_times=decode_times,
70 decode_timedelta=decode_timedelta,
71 band_as_variable=band_as_variable,
72 **open_kwargs,
73 )
74 if isinstance(rds, xarray.DataArray):
75 dataset = rds.to_dataset()
File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/rioxarray/_io.py:1135, in open_rasterio(filename, parse_coordinates, chunks, cache, lock, masked, mask_and_scale, variable, group, default_name, decode_times, decode_timedelta, band_as_variable, **open_kwargs)
1133 else:
1134 manager = URIManager(file_opener, filename, mode="r", kwargs=open_kwargs)
-> 1135 riods = manager.acquire()
1136 captured_warnings = rio_warnings.copy()
1138 # raise the NotGeoreferencedWarning if applicable
File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/xarray/backends/file_manager.py:193, in CachingFileManager.acquire(self, needs_lock)
178 def acquire(self, needs_lock=True):
179 """Acquire a file object from the manager.
180
181 A new file is only opened if it has expired from the
(...) 191 An open file object, as returned by ``opener(*args, **kwargs)``.
192 """
--> 193 file, _ = self._acquire_with_cache_info(needs_lock)
194 return file
File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/xarray/backends/file_manager.py:217, in CachingFileManager._acquire_with_cache_info(self, needs_lock)
215 kwargs = kwargs.copy()
216 kwargs["mode"] = self._mode
--> 217 file = self._opener(*self._args, **kwargs)
218 if self._mode == "w":
219 # ensure file doesn't get overridden when opened again
220 self._mode = "a"
File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/rasterio/env.py:463, in ensure_env_with_credentials.<locals>.wrapper(*args, **kwds)
460 session = DummySession()
462 with env_ctor(session=session):
--> 463 return f(*args, **kwds)
File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/rasterio/__init__.py:356, in open(fp, mode, driver, width, height, count, crs, transform, dtype, nodata, sharing, opener, **kwargs)
353 path = _parse_path(raw_dataset_path)
355 if mode == "r":
--> 356 dataset = DatasetReader(path, driver=driver, sharing=sharing, **kwargs)
357 elif mode == "r+":
358 dataset = get_writer_for_path(path, driver=driver)(
359 path, mode, driver=driver, sharing=sharing, **kwargs
360 )
File rasterio/_base.pyx:312, in rasterio._base.DatasetBase.__init__()
RasterioIOError: ./tmp_data_export/fabdem.tif: No such file or directory
[8]:
# The second and more elegant option is to use the data_catalog functionalities
# This also adds the data to the data_catalog, so you can use it later on in your workflow without having to specify the path again
# This also allows to keep track which data is actually used in your model (for reproducibility)
# and it has additional options to get the data for partly using bbox, region, zoom_level etc.
ds = sf.data_catalog.get_rasterdataset(
localtiff,
variables=["elevtn"],
geom=sf.region,
meta={"version": "1"},
)
# added to data_catalog
sf.data_catalog["fabdem"]
---------------------------------------------------------------------------
FileNotFoundError Traceback (most recent call last)
Cell In[8], line 6
1 # The second and more elegant option is to use the data_catalog functionalities
2 # This also adds the data to the data_catalog, so you can use it later on in your workflow without having to specify the path again
3 # This also allows to keep track which data is actually used in your model (for reproducibility)
4 # and it has additional options to get the data for partly using bbox, region, zoom_level etc.
----> 6 ds = sf.data_catalog.get_rasterdataset(
7 localtiff,
8 variables=["elevtn"],
9 geom=sf.region,
10 meta={"version": "1"},
11 )
13 # added to data_catalog
14 sf.data_catalog["fabdem"]
File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/hydromt/data_catalog.py:1293, in DataCatalog.get_rasterdataset(self, data_like, bbox, geom, zoom_level, buffer, handle_nodata, align, variables, time_tuple, single_var_as_array, provider, version, **kwargs)
1290 else:
1291 raise ValueError(f'Unknown raster data type "{type(data_like).__name__}"')
-> 1293 obj = source.get_data(
1294 bbox=bbox,
1295 geom=geom,
1296 buffer=buffer,
1297 zoom_level=zoom_level,
1298 align=align,
1299 variables=variables,
1300 time_tuple=time_tuple,
1301 single_var_as_array=single_var_as_array,
1302 cache_root=self._cache_dir if self.cache else None,
1303 handle_nodata=handle_nodata,
1304 logger=self.logger,
1305 )
1306 return obj
File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/hydromt/data_adapter/rasterdataset.py:305, in RasterDatasetAdapter.get_data(self, bbox, geom, buffer, zoom_level, align, variables, time_tuple, handle_nodata, single_var_as_array, cache_root, logger)
302 try:
303 # load data
304 variables = list([variables]) if isinstance(variables, str) else variables
--> 305 fns = self._resolve_paths(
306 time_tuple, variables, zoom_level, geom, bbox, logger
307 )
308 self.mark_as_used() # mark used
309 ds = self._read_data(
310 fns,
311 geom,
(...) 316 logger=logger,
317 )
File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/hydromt/data_adapter/rasterdataset.py:366, in RasterDatasetAdapter._resolve_paths(self, time_tuple, variables, zoom_level, geom, bbox, logger)
363 zoom_level = self._parse_zoom_level(zoom_level, geom, bbox, logger=logger)
365 # resolve path based on time, zoom level and/or variables
--> 366 fns = super()._resolve_paths(
367 time_tuple=time_tuple,
368 variables=variables,
369 zoom_level=zoom_level,
370 )
371 return fns
File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/hydromt/data_adapter/data_adapter.py:413, in DataAdapter._resolve_paths(self, time_tuple, variables, zoom_level)
410 fns_out.append(UPath(fn, **self.storage_options))
412 if len(fns_out) == 0:
--> 413 raise FileNotFoundError(f"No such file found: {path}{postfix}")
415 return fns_out
FileNotFoundError: No such file found: ./tmp_data_export/fabdem.tif
For higher-resolution datasets, sometimes making xyz-tiles is beneficial for speed#
[9]:
fabdem = sf.data_catalog.get_rasterdataset("fabdem")
name = f"fabdem_xyz"
root = os.path.join(folder_name, name)
fabdem.raster.to_xyz_tiles(
root=root,
tile_size=256,
zoom_levels=[0, 1, 2, 3],
driver="GTiff",
compress="deflate",
)
---------------------------------------------------------------------------
FileNotFoundError Traceback (most recent call last)
Cell In[9], line 1
----> 1 fabdem = sf.data_catalog.get_rasterdataset("fabdem")
3 name = f"fabdem_xyz"
4 root = os.path.join(folder_name, name)
File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/hydromt/data_catalog.py:1293, in DataCatalog.get_rasterdataset(self, data_like, bbox, geom, zoom_level, buffer, handle_nodata, align, variables, time_tuple, single_var_as_array, provider, version, **kwargs)
1290 else:
1291 raise ValueError(f'Unknown raster data type "{type(data_like).__name__}"')
-> 1293 obj = source.get_data(
1294 bbox=bbox,
1295 geom=geom,
1296 buffer=buffer,
1297 zoom_level=zoom_level,
1298 align=align,
1299 variables=variables,
1300 time_tuple=time_tuple,
1301 single_var_as_array=single_var_as_array,
1302 cache_root=self._cache_dir if self.cache else None,
1303 handle_nodata=handle_nodata,
1304 logger=self.logger,
1305 )
1306 return obj
File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/hydromt/data_adapter/rasterdataset.py:305, in RasterDatasetAdapter.get_data(self, bbox, geom, buffer, zoom_level, align, variables, time_tuple, handle_nodata, single_var_as_array, cache_root, logger)
302 try:
303 # load data
304 variables = list([variables]) if isinstance(variables, str) else variables
--> 305 fns = self._resolve_paths(
306 time_tuple, variables, zoom_level, geom, bbox, logger
307 )
308 self.mark_as_used() # mark used
309 ds = self._read_data(
310 fns,
311 geom,
(...) 316 logger=logger,
317 )
File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/hydromt/data_adapter/rasterdataset.py:366, in RasterDatasetAdapter._resolve_paths(self, time_tuple, variables, zoom_level, geom, bbox, logger)
363 zoom_level = self._parse_zoom_level(zoom_level, geom, bbox, logger=logger)
365 # resolve path based on time, zoom level and/or variables
--> 366 fns = super()._resolve_paths(
367 time_tuple=time_tuple,
368 variables=variables,
369 zoom_level=zoom_level,
370 )
371 return fns
File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/hydromt/data_adapter/data_adapter.py:413, in DataAdapter._resolve_paths(self, time_tuple, variables, zoom_level)
410 fns_out.append(UPath(fn, **self.storage_options))
412 if len(fns_out) == 0:
--> 413 raise FileNotFoundError(f"No such file found: {path}{postfix}")
415 return fns_out
FileNotFoundError: No such file found: /home/runner/work/hydromt_sfincs/hydromt_sfincs/docs/_examples/p:/wflow_global/hydromt/topography/fabdem/fabdem.vrt
Now build a model that uses:#
local dataset : Local download of gebco
data catalog xyz-tiles : Local xyz tiles of fabdem
delft dashboard data (in progress): global tiled datasets hosted on the opendap server
[10]:
sf = SfincsModel(
data_libs=["artifact_data", os.path.join(folder_name, name, f"{name}.yml")],
root="tmp_example",
)
inp_dict = {
"x0": 268650,
"y0": 5018550,
"dx": 200.0,
"dy": 200.0,
"nmax": 272,
"mmax": 425,
"rotation": 0,
"epsg": 32633,
}
# create grid
sf.setup_grid(**inp_dict)
---------------------------------------------------------------------------
NameError Traceback (most recent call last)
Cell In[10], line 2
1 sf = SfincsModel(
----> 2 data_libs=["artifact_data", os.path.join(folder_name, name, f"{name}.yml")],
3 root="tmp_example",
4 )
5 inp_dict = {
6 "x0": 268650,
7 "y0": 5018550,
(...) 13 "epsg": 32633,
14 }
15 # create grid
NameError: name 'name' is not defined
[11]:
da_dep1 = sf.data_catalog.get_rasterdataset(
"merit_hydro", variables=["elevtn"], geom=sf.region, buffer=5
)
da_dep2 = sf.data_catalog.get_rasterdataset(
os.path.join(folder_name, "gebco.tif"),
variables=["elevtn"],
)
# Make sure that you also load your local data_catalog if you want to use it in your model
# In this example, we only imported the fabdem_xyz.yml file, but this could be easily merged into one file to have a local data_catalog containing more datasets
da_dep3 = sf.data_catalog.get_rasterdataset(
"fabdem_xyz", variables=["elevtn"], zoom_level=(sf.config["dx"], "meter")
)
---------------------------------------------------------------------------
FileNotFoundError Traceback (most recent call last)
Cell In[11], line 1
----> 1 da_dep1 = sf.data_catalog.get_rasterdataset(
2 "merit_hydro", variables=["elevtn"], geom=sf.region, buffer=5
3 )
5 da_dep2 = sf.data_catalog.get_rasterdataset(
6 os.path.join(folder_name, "gebco.tif"),
7 variables=["elevtn"],
8 )
10 # Make sure that you also load your local data_catalog if you want to use it in your model
11 # In this example, we only imported the fabdem_xyz.yml file, but this could be easily merged into one file to have a local data_catalog containing more datasets
File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/hydromt/data_catalog.py:1293, in DataCatalog.get_rasterdataset(self, data_like, bbox, geom, zoom_level, buffer, handle_nodata, align, variables, time_tuple, single_var_as_array, provider, version, **kwargs)
1290 else:
1291 raise ValueError(f'Unknown raster data type "{type(data_like).__name__}"')
-> 1293 obj = source.get_data(
1294 bbox=bbox,
1295 geom=geom,
1296 buffer=buffer,
1297 zoom_level=zoom_level,
1298 align=align,
1299 variables=variables,
1300 time_tuple=time_tuple,
1301 single_var_as_array=single_var_as_array,
1302 cache_root=self._cache_dir if self.cache else None,
1303 handle_nodata=handle_nodata,
1304 logger=self.logger,
1305 )
1306 return obj
File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/hydromt/data_adapter/rasterdataset.py:305, in RasterDatasetAdapter.get_data(self, bbox, geom, buffer, zoom_level, align, variables, time_tuple, handle_nodata, single_var_as_array, cache_root, logger)
302 try:
303 # load data
304 variables = list([variables]) if isinstance(variables, str) else variables
--> 305 fns = self._resolve_paths(
306 time_tuple, variables, zoom_level, geom, bbox, logger
307 )
308 self.mark_as_used() # mark used
309 ds = self._read_data(
310 fns,
311 geom,
(...) 316 logger=logger,
317 )
File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/hydromt/data_adapter/rasterdataset.py:366, in RasterDatasetAdapter._resolve_paths(self, time_tuple, variables, zoom_level, geom, bbox, logger)
363 zoom_level = self._parse_zoom_level(zoom_level, geom, bbox, logger=logger)
365 # resolve path based on time, zoom level and/or variables
--> 366 fns = super()._resolve_paths(
367 time_tuple=time_tuple,
368 variables=variables,
369 zoom_level=zoom_level,
370 )
371 return fns
File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/hydromt/data_adapter/data_adapter.py:413, in DataAdapter._resolve_paths(self, time_tuple, variables, zoom_level)
410 fns_out.append(UPath(fn, **self.storage_options))
412 if len(fns_out) == 0:
--> 413 raise FileNotFoundError(f"No such file found: {path}{postfix}")
415 return fns_out
FileNotFoundError: No such file found: /home/runner/work/hydromt_sfincs/hydromt_sfincs/docs/_examples/p:/wflow_global/hydromt/topography/merit_hydro/*.vrt
[12]:
# Now create the de_dep_lst. The order determines the priority of the datasets. Each dataset is a dictionary with the dataset (da) and merge_arguments
datasets_dep = [
{"da": da_dep1, "zmin": 0.001},
{"da": da_dep2, "offset": 0},
{"da": da_dep3, "zmin": 0.001, "merge_method": "last"},
]
---------------------------------------------------------------------------
NameError Traceback (most recent call last)
Cell In[12], line 3
1 # Now create the de_dep_lst. The order determines the priority of the datasets. Each dataset is a dictionary with the dataset (da) and merge_arguments
2 datasets_dep = [
----> 3 {"da": da_dep1, "zmin": 0.001},
4 {"da": da_dep2, "offset": 0},
5 {"da": da_dep3, "zmin": 0.001, "merge_method": "last"},
6 ]
NameError: name 'da_dep1' is not defined
[13]:
dep = sf.setup_dep(datasets_dep=datasets_dep)
---------------------------------------------------------------------------
NameError Traceback (most recent call last)
Cell In[13], line 1
----> 1 dep = sf.setup_dep(datasets_dep=datasets_dep)
NameError: name 'datasets_dep' is not defined
[14]:
sf.grid["dep"].plot.imshow(vmin=-10, vmax=10, cmap="terrain")
---------------------------------------------------------------------------
KeyError Traceback (most recent call last)
/usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/xarray/core/dataset.py in ?(self, name)
1154 variable = self._variables[name]
1155 except KeyError:
-> 1156 _, name, variable = _get_virtual_variable(self._variables, name, self.sizes)
1157
KeyError: 'dep'
During handling of the above exception, another exception occurred:
KeyError Traceback (most recent call last)
File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/xarray/core/dataset.py:1261, in Dataset.__getitem__(self, key)
1260 try:
-> 1261 return self._construct_dataarray(key)
1262 except KeyError as e:
File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/xarray/core/dataset.py:1156, in Dataset._construct_dataarray(self, name)
1155 except KeyError:
-> 1156 _, name, variable = _get_virtual_variable(self._variables, name, self.sizes)
1158 needed_dims = set(variable.dims)
File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/xarray/core/dataset_utils.py:79, in _get_virtual_variable(variables, key, dim_sizes)
78 if len(split_key) != 2:
---> 79 raise KeyError(key)
81 ref_name, var_name = split_key
KeyError: 'dep'
The above exception was the direct cause of the following exception:
KeyError Traceback (most recent call last)
/tmp/ipykernel_4541/2259187073.py in ?()
----> 1 sf.grid["dep"].plot.imshow(vmin=-10, vmax=10, cmap="terrain")
/usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/xarray/core/dataset.py in ?(self, key)
1270
1271 # If someone attempts `ds['foo' , 'bar']` instead of `ds[['foo', 'bar']]`
1272 if isinstance(key, tuple):
1273 message += f"\nHint: use a list to select multiple variables, for example `ds[{list(key)}]`"
-> 1274 raise KeyError(message) from e
1275
1276 if utils.iterable_of_hashable(key):
1277 return self._copy_listed(key)
KeyError: "No variable named 'dep'. Variables on the dataset include []"