Tip

For an interactive online version click here: Binder badge

Working with data in HydroMT#

[1]:
import os
import sys
import matplotlib.pyplot as plt

To obtain rasterdata, we can use a HydroMT DataCatalog. There are several pre-defined DataCatalogs:

  • artifact_data : Piave basin in Northern Italy (example data)

  • deltares_data : global datasets hosted on the p-drive

  • opendap_data (in progress …) : global tiled datasets hosted on the opendap server

[2]:
import hydromt

data_catalog = hydromt.DataCatalog(data_libs=["artifact_data"])
ds = data_catalog.get_rasterdataset("merit_hydro")

# merit hydro has multiple variables; select one
ds["elevtn"]
[2]:
<xarray.DataArray 'elevtn' (y: 1920, x: 1680)> Size: 13MB
dask.array<getitem, shape=(1920, 1680), dtype=float32, chunksize=(1920, 1680), chunktype=numpy.ndarray>
Coordinates:
  * x            (x) float64 13kB 11.6 11.6 11.6 11.6 ... 13.0 13.0 13.0 13.0
  * y            (y) float64 15kB 46.8 46.8 46.8 46.8 ... 45.2 45.2 45.2 45.2
    spatial_ref  int64 8B 0
Attributes:
    AREA_OR_POINT:  Area
    _FillValue:     -9999.0
    source_file:    elevtn.tif

Often we don’t need the entire extent of certain datasets, but only part of it#

[3]:
# each model can be initialized with a data_catalog
# this works similar to the data_catalog above but now we use

from hydromt_sfincs import SfincsModel

# Initialize SfincsModel with the artifact data catalog which contains data for North Italy
sf = SfincsModel(data_libs=["deltares_data"], root="tmp_example")
inp_dict = {
    "x0": 268650,
    "y0": 5018550,
    "dx": 200.0,
    "dy": 200.0,
    "nmax": 272,
    "mmax": 425,
    "rotation": 0,
    "epsg": 32633,
}
# create grid
sf.setup_grid(**inp_dict)
[4]:
# since we already know where our model is, we minimize the amount of data that is read-in by specifying the region:

da_dep1 = sf.data_catalog.get_rasterdataset(
    "merit_hydro", variables=["elevtn"], geom=sf.region, buffer=5
)

da_dep2 = sf.data_catalog.get_rasterdataset(
    "gebco", variables=["elevtn"], geom=sf.region, buffer=5
)
---------------------------------------------------------------------------
FileNotFoundError                         Traceback (most recent call last)
Cell In[4], line 3
      1 # since we already know where our model is, we minimize the amount of data that is read-in by specifying the region:
----> 3 da_dep1 = sf.data_catalog.get_rasterdataset(
      4     "merit_hydro", variables=["elevtn"], geom=sf.region, buffer=5
      5 )
      7 da_dep2 = sf.data_catalog.get_rasterdataset(
      8     "gebco", variables=["elevtn"], geom=sf.region, buffer=5
      9 )

File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/hydromt/data_catalog.py:1393, in DataCatalog.get_rasterdataset(self, data_like, bbox, geom, zoom_level, buffer, handle_nodata, align, variables, time_tuple, single_var_as_array, provider, version, **kwargs)
   1390 else:
   1391     raise ValueError(f'Unknown raster data type "{type(data_like).__name__}"')
-> 1393 obj = source.get_data(
   1394     bbox=bbox,
   1395     geom=geom,
   1396     buffer=buffer,
   1397     zoom_level=zoom_level,
   1398     align=align,
   1399     variables=variables,
   1400     time_tuple=time_tuple,
   1401     single_var_as_array=single_var_as_array,
   1402     cache_root=self._cache_dir if self.cache else None,
   1403     handle_nodata=handle_nodata,
   1404     logger=self.logger,
   1405 )
   1406 return obj

File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/hydromt/data_adapter/rasterdataset.py:304, in RasterDatasetAdapter.get_data(self, bbox, geom, buffer, zoom_level, align, variables, time_tuple, handle_nodata, single_var_as_array, cache_root, logger)
    297 """Return a clipped, sliced and unified RasterDataset.
    298
    299 For a detailed description see:
    300 :py:func:`~hydromt.data_catalog.DataCatalog.get_rasterdataset`
    301 """
    302 try:
    303     # load data
--> 304     fns = self._resolve_paths(
    305         time_tuple, variables, zoom_level, geom, bbox, logger
    306     )
    307     self.mark_as_used()  # mark used
    308     ds = self._read_data(
    309         fns,
    310         geom,
   (...)
    314         logger=logger,
    315     )

File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/hydromt/data_adapter/rasterdataset.py:363, in RasterDatasetAdapter._resolve_paths(self, time_tuple, variables, zoom_level, geom, bbox, logger)
    360     zoom_level = self._parse_zoom_level(zoom_level, geom, bbox, logger=logger)
    362 # resolve path based on time, zoom level and/or variables
--> 363 fns = super()._resolve_paths(
    364     time_tuple=time_tuple,
    365     variables=variables,
    366     zoom_level=zoom_level,
    367 )
    368 return fns

File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/hydromt/data_adapter/data_adapter.py:407, in DataAdapter._resolve_paths(self, time_tuple, variables, zoom_level)
    404         fns_out.append(UPath(fn, **self.storage_options))
    406 if len(fns_out) == 0:
--> 407     raise FileNotFoundError(f"No such file found: {path}{postfix}")
    409 return fns_out

FileNotFoundError: No such file found: /home/runner/work/hydromt_sfincs/hydromt_sfincs/docs/_examples/p:/wflow_global/hydromt/topography/merit_hydro/*.vrt

Intermezzo: We can also download the data to a local folder#

[5]:
# List of data sources to export
source_list = ["fabdem", "gebco"]
# Geographic extent
bbox = sf.region.to_crs(4326).total_bounds

folder_name = "tmp_data_export"
sf.data_catalog.export_data(
    data_root=folder_name,
    bbox=bbox,
    source_names=source_list,
    meta={"version": "1"},
)

Local data can be added to the model as well#

[6]:
# Please specify the local geotiff you want to use:
localtiff = r"./tmp_data_export/fabdem.tif"
[7]:
# The first option that exist is openning raster data with for example xarray:
import xarray as xr

ds_xarray = xr.open_dataset(localtiff, engine="rasterio")
ds_xarray
---------------------------------------------------------------------------
KeyError                                  Traceback (most recent call last)
File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/xarray/backends/file_manager.py:211, in CachingFileManager._acquire_with_cache_info(self, needs_lock)
    210 try:
--> 211     file = self._cache[self._key]
    212 except KeyError:

File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/xarray/backends/lru_cache.py:56, in LRUCache.__getitem__(self, key)
     55 with self._lock:
---> 56     value = self._cache[key]
     57     self._cache.move_to_end(key)

KeyError: [<function open at 0x7f30abc6efc0>, ('./tmp_data_export/fabdem.tif',), 'r', (('sharing', False),), '409a31d1-13f4-4b20-8005-bce7ff28f35d']

During handling of the above exception, another exception occurred:

CPLE_OpenFailedError                      Traceback (most recent call last)
File rasterio/_base.pyx:310, in rasterio._base.DatasetBase.__init__()

File rasterio/_base.pyx:221, in rasterio._base.open_dataset()

File rasterio/_err.pyx:221, in rasterio._err.exc_wrap_pointer()

CPLE_OpenFailedError: ./tmp_data_export/fabdem.tif: No such file or directory

During handling of the above exception, another exception occurred:

RasterioIOError                           Traceback (most recent call last)
Cell In[7], line 4
      1 # The first option that exist is openning raster data with for example xarray:
      2 import xarray as xr
----> 4 ds_xarray = xr.open_dataset(localtiff, engine="rasterio")
      5 ds_xarray

File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/xarray/backends/api.py:573, in open_dataset(filename_or_obj, engine, chunks, cache, decode_cf, mask_and_scale, decode_times, decode_timedelta, use_cftime, concat_characters, decode_coords, drop_variables, inline_array, chunked_array_type, from_array_kwargs, backend_kwargs, **kwargs)
    561 decoders = _resolve_decoders_kwargs(
    562     decode_cf,
    563     open_backend_dataset_parameters=backend.open_dataset_parameters,
   (...)
    569     decode_coords=decode_coords,
    570 )
    572 overwrite_encoded_chunks = kwargs.pop("overwrite_encoded_chunks", None)
--> 573 backend_ds = backend.open_dataset(
    574     filename_or_obj,
    575     drop_variables=drop_variables,
    576     **decoders,
    577     **kwargs,
    578 )
    579 ds = _dataset_from_backend_dataset(
    580     backend_ds,
    581     filename_or_obj,
   (...)
    591     **kwargs,
    592 )
    593 return ds

File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/rioxarray/xarray_plugin.py:58, in RasterioBackend.open_dataset(self, filename_or_obj, drop_variables, parse_coordinates, lock, masked, mask_and_scale, variable, group, default_name, decode_coords, decode_times, decode_timedelta, band_as_variable, open_kwargs)
     56 if open_kwargs is None:
     57     open_kwargs = {}
---> 58 rds = _io.open_rasterio(
     59     filename_or_obj,
     60     parse_coordinates=parse_coordinates,
     61     cache=False,
     62     lock=lock,
     63     masked=masked,
     64     mask_and_scale=mask_and_scale,
     65     variable=variable,
     66     group=group,
     67     default_name=default_name,
     68     decode_times=decode_times,
     69     decode_timedelta=decode_timedelta,
     70     band_as_variable=band_as_variable,
     71     **open_kwargs,
     72 )
     73 if isinstance(rds, xarray.DataArray):
     74     dataset = rds.to_dataset()

File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/rioxarray/_io.py:1124, in open_rasterio(filename, parse_coordinates, chunks, cache, lock, masked, mask_and_scale, variable, group, default_name, decode_times, decode_timedelta, band_as_variable, **open_kwargs)
   1122     else:
   1123         manager = URIManager(file_opener, filename, mode="r", kwargs=open_kwargs)
-> 1124     riods = manager.acquire()
   1125     captured_warnings = rio_warnings.copy()
   1127 # raise the NotGeoreferencedWarning if applicable

File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/xarray/backends/file_manager.py:193, in CachingFileManager.acquire(self, needs_lock)
    178 def acquire(self, needs_lock=True):
    179     """Acquire a file object from the manager.
    180
    181     A new file is only opened if it has expired from the
   (...)
    191         An open file object, as returned by ``opener(*args, **kwargs)``.
    192     """
--> 193     file, _ = self._acquire_with_cache_info(needs_lock)
    194     return file

File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/xarray/backends/file_manager.py:217, in CachingFileManager._acquire_with_cache_info(self, needs_lock)
    215     kwargs = kwargs.copy()
    216     kwargs["mode"] = self._mode
--> 217 file = self._opener(*self._args, **kwargs)
    218 if self._mode == "w":
    219     # ensure file doesn't get overridden when opened again
    220     self._mode = "a"

File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/rasterio/env.py:451, in ensure_env_with_credentials.<locals>.wrapper(*args, **kwds)
    448     session = DummySession()
    450 with env_ctor(session=session):
--> 451     return f(*args, **kwds)

File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/rasterio/__init__.py:304, in open(fp, mode, driver, width, height, count, crs, transform, dtype, nodata, sharing, **kwargs)
    301 path = _parse_path(raw_dataset_path)
    303 if mode == "r":
--> 304     dataset = DatasetReader(path, driver=driver, sharing=sharing, **kwargs)
    305 elif mode == "r+":
    306     dataset = get_writer_for_path(path, driver=driver)(
    307         path, mode, driver=driver, sharing=sharing, **kwargs
    308     )

File rasterio/_base.pyx:312, in rasterio._base.DatasetBase.__init__()

RasterioIOError: ./tmp_data_export/fabdem.tif: No such file or directory
[8]:
# The second and more elegant option is to use the data_catalog functionalities
# This also adds the data to the data_catalog, so you can use it later on in your workflow without having to specify the path again
# This also allows to keep track which data is actually used in your model (for reproducibility)
# and it has additional options to get the data for partly using bbox, region, zoom_level etc.

ds = sf.data_catalog.get_rasterdataset(
    localtiff,
    variables=["elevtn"],
    geom=sf.region,
    meta={"version": "1"},
)

# added to data_catalog
sf.data_catalog["fabdem"]
---------------------------------------------------------------------------
FileNotFoundError                         Traceback (most recent call last)
Cell In[8], line 6
      1 # The second and more elegant option is to use the data_catalog functionalities
      2 # This also adds the data to the data_catalog, so you can use it later on in your workflow without having to specify the path again
      3 # This also allows to keep track which data is actually used in your model (for reproducibility)
      4 # and it has additional options to get the data for partly using bbox, region, zoom_level etc.
----> 6 ds = sf.data_catalog.get_rasterdataset(
      7     localtiff,
      8     variables=["elevtn"],
      9     geom=sf.region,
     10     meta={"version": "1"},
     11 )
     13 # added to data_catalog
     14 sf.data_catalog["fabdem"]

File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/hydromt/data_catalog.py:1393, in DataCatalog.get_rasterdataset(self, data_like, bbox, geom, zoom_level, buffer, handle_nodata, align, variables, time_tuple, single_var_as_array, provider, version, **kwargs)
   1390 else:
   1391     raise ValueError(f'Unknown raster data type "{type(data_like).__name__}"')
-> 1393 obj = source.get_data(
   1394     bbox=bbox,
   1395     geom=geom,
   1396     buffer=buffer,
   1397     zoom_level=zoom_level,
   1398     align=align,
   1399     variables=variables,
   1400     time_tuple=time_tuple,
   1401     single_var_as_array=single_var_as_array,
   1402     cache_root=self._cache_dir if self.cache else None,
   1403     handle_nodata=handle_nodata,
   1404     logger=self.logger,
   1405 )
   1406 return obj

File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/hydromt/data_adapter/rasterdataset.py:304, in RasterDatasetAdapter.get_data(self, bbox, geom, buffer, zoom_level, align, variables, time_tuple, handle_nodata, single_var_as_array, cache_root, logger)
    297 """Return a clipped, sliced and unified RasterDataset.
    298
    299 For a detailed description see:
    300 :py:func:`~hydromt.data_catalog.DataCatalog.get_rasterdataset`
    301 """
    302 try:
    303     # load data
--> 304     fns = self._resolve_paths(
    305         time_tuple, variables, zoom_level, geom, bbox, logger
    306     )
    307     self.mark_as_used()  # mark used
    308     ds = self._read_data(
    309         fns,
    310         geom,
   (...)
    314         logger=logger,
    315     )

File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/hydromt/data_adapter/rasterdataset.py:363, in RasterDatasetAdapter._resolve_paths(self, time_tuple, variables, zoom_level, geom, bbox, logger)
    360     zoom_level = self._parse_zoom_level(zoom_level, geom, bbox, logger=logger)
    362 # resolve path based on time, zoom level and/or variables
--> 363 fns = super()._resolve_paths(
    364     time_tuple=time_tuple,
    365     variables=variables,
    366     zoom_level=zoom_level,
    367 )
    368 return fns

File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/hydromt/data_adapter/data_adapter.py:407, in DataAdapter._resolve_paths(self, time_tuple, variables, zoom_level)
    404         fns_out.append(UPath(fn, **self.storage_options))
    406 if len(fns_out) == 0:
--> 407     raise FileNotFoundError(f"No such file found: {path}{postfix}")
    409 return fns_out

FileNotFoundError: No such file found: ./tmp_data_export/fabdem.tif

For higher-resolution datasets, sometimes making xyz-tiles is beneficial for speed#

[9]:
fabdem = sf.data_catalog.get_rasterdataset("fabdem")

name = f"fabdem_xyz"
root = os.path.join(folder_name, name)

fabdem.raster.to_xyz_tiles(
    root=root,
    tile_size=256,
    zoom_levels=[0, 1, 2, 3],
    driver="GTiff",
    compress="deflate",
)
---------------------------------------------------------------------------
FileNotFoundError                         Traceback (most recent call last)
Cell In[9], line 1
----> 1 fabdem = sf.data_catalog.get_rasterdataset("fabdem")
      3 name = f"fabdem_xyz"
      4 root = os.path.join(folder_name, name)

File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/hydromt/data_catalog.py:1393, in DataCatalog.get_rasterdataset(self, data_like, bbox, geom, zoom_level, buffer, handle_nodata, align, variables, time_tuple, single_var_as_array, provider, version, **kwargs)
   1390 else:
   1391     raise ValueError(f'Unknown raster data type "{type(data_like).__name__}"')
-> 1393 obj = source.get_data(
   1394     bbox=bbox,
   1395     geom=geom,
   1396     buffer=buffer,
   1397     zoom_level=zoom_level,
   1398     align=align,
   1399     variables=variables,
   1400     time_tuple=time_tuple,
   1401     single_var_as_array=single_var_as_array,
   1402     cache_root=self._cache_dir if self.cache else None,
   1403     handle_nodata=handle_nodata,
   1404     logger=self.logger,
   1405 )
   1406 return obj

File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/hydromt/data_adapter/rasterdataset.py:304, in RasterDatasetAdapter.get_data(self, bbox, geom, buffer, zoom_level, align, variables, time_tuple, handle_nodata, single_var_as_array, cache_root, logger)
    297 """Return a clipped, sliced and unified RasterDataset.
    298
    299 For a detailed description see:
    300 :py:func:`~hydromt.data_catalog.DataCatalog.get_rasterdataset`
    301 """
    302 try:
    303     # load data
--> 304     fns = self._resolve_paths(
    305         time_tuple, variables, zoom_level, geom, bbox, logger
    306     )
    307     self.mark_as_used()  # mark used
    308     ds = self._read_data(
    309         fns,
    310         geom,
   (...)
    314         logger=logger,
    315     )

File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/hydromt/data_adapter/rasterdataset.py:363, in RasterDatasetAdapter._resolve_paths(self, time_tuple, variables, zoom_level, geom, bbox, logger)
    360     zoom_level = self._parse_zoom_level(zoom_level, geom, bbox, logger=logger)
    362 # resolve path based on time, zoom level and/or variables
--> 363 fns = super()._resolve_paths(
    364     time_tuple=time_tuple,
    365     variables=variables,
    366     zoom_level=zoom_level,
    367 )
    368 return fns

File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/hydromt/data_adapter/data_adapter.py:407, in DataAdapter._resolve_paths(self, time_tuple, variables, zoom_level)
    404         fns_out.append(UPath(fn, **self.storage_options))
    406 if len(fns_out) == 0:
--> 407     raise FileNotFoundError(f"No such file found: {path}{postfix}")
    409 return fns_out

FileNotFoundError: No such file found: /home/runner/work/hydromt_sfincs/hydromt_sfincs/docs/_examples/p:/wflow_global/hydromt/topography/fabdem/fabdem.vrt

Now build a model that uses:#

  • local dataset : Local download of gebco

  • data catalog xyz-tiles : Local xyz tiles of fabdem

  • delft dashboard data (in progress): global tiled datasets hosted on the opendap server

[10]:
sf = SfincsModel(
    data_libs=["artifact_data", os.path.join(folder_name, name, f"{name}.yml")],
    root="tmp_example",
)
inp_dict = {
    "x0": 268650,
    "y0": 5018550,
    "dx": 200.0,
    "dy": 200.0,
    "nmax": 272,
    "mmax": 425,
    "rotation": 0,
    "epsg": 32633,
}
# create grid
sf.setup_grid(**inp_dict)
---------------------------------------------------------------------------
NameError                                 Traceback (most recent call last)
Cell In[10], line 2
      1 sf = SfincsModel(
----> 2     data_libs=["artifact_data", os.path.join(folder_name, name, f"{name}.yml")],
      3     root="tmp_example",
      4 )
      5 inp_dict = {
      6     "x0": 268650,
      7     "y0": 5018550,
   (...)
     13     "epsg": 32633,
     14 }
     15 # create grid

NameError: name 'name' is not defined
[11]:
da_dep1 = sf.data_catalog.get_rasterdataset(
    "merit_hydro", variables=["elevtn"], geom=sf.region, buffer=5
)

da_dep2 = sf.data_catalog.get_rasterdataset(
    os.path.join(folder_name, "gebco.tif"),
    variables=["elevtn"],
)

# Make sure that you also load your local data_catalog if you want to use it in your model
# In this example, we only imported the fabdem_xyz.yml file, but this could be easily merged into one file to have a local data_catalog containing more datasets
da_dep3 = sf.data_catalog.get_rasterdataset(
    "fabdem_xyz", variables=["elevtn"], zoom_level=(sf.config["dx"], "meter")
)
---------------------------------------------------------------------------
FileNotFoundError                         Traceback (most recent call last)
Cell In[11], line 1
----> 1 da_dep1 = sf.data_catalog.get_rasterdataset(
      2     "merit_hydro", variables=["elevtn"], geom=sf.region, buffer=5
      3 )
      5 da_dep2 = sf.data_catalog.get_rasterdataset(
      6     os.path.join(folder_name, "gebco.tif"),
      7     variables=["elevtn"],
      8 )
     10 # Make sure that you also load your local data_catalog if you want to use it in your model
     11 # In this example, we only imported the fabdem_xyz.yml file, but this could be easily merged into one file to have a local data_catalog containing more datasets

File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/hydromt/data_catalog.py:1393, in DataCatalog.get_rasterdataset(self, data_like, bbox, geom, zoom_level, buffer, handle_nodata, align, variables, time_tuple, single_var_as_array, provider, version, **kwargs)
   1390 else:
   1391     raise ValueError(f'Unknown raster data type "{type(data_like).__name__}"')
-> 1393 obj = source.get_data(
   1394     bbox=bbox,
   1395     geom=geom,
   1396     buffer=buffer,
   1397     zoom_level=zoom_level,
   1398     align=align,
   1399     variables=variables,
   1400     time_tuple=time_tuple,
   1401     single_var_as_array=single_var_as_array,
   1402     cache_root=self._cache_dir if self.cache else None,
   1403     handle_nodata=handle_nodata,
   1404     logger=self.logger,
   1405 )
   1406 return obj

File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/hydromt/data_adapter/rasterdataset.py:304, in RasterDatasetAdapter.get_data(self, bbox, geom, buffer, zoom_level, align, variables, time_tuple, handle_nodata, single_var_as_array, cache_root, logger)
    297 """Return a clipped, sliced and unified RasterDataset.
    298
    299 For a detailed description see:
    300 :py:func:`~hydromt.data_catalog.DataCatalog.get_rasterdataset`
    301 """
    302 try:
    303     # load data
--> 304     fns = self._resolve_paths(
    305         time_tuple, variables, zoom_level, geom, bbox, logger
    306     )
    307     self.mark_as_used()  # mark used
    308     ds = self._read_data(
    309         fns,
    310         geom,
   (...)
    314         logger=logger,
    315     )

File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/hydromt/data_adapter/rasterdataset.py:363, in RasterDatasetAdapter._resolve_paths(self, time_tuple, variables, zoom_level, geom, bbox, logger)
    360     zoom_level = self._parse_zoom_level(zoom_level, geom, bbox, logger=logger)
    362 # resolve path based on time, zoom level and/or variables
--> 363 fns = super()._resolve_paths(
    364     time_tuple=time_tuple,
    365     variables=variables,
    366     zoom_level=zoom_level,
    367 )
    368 return fns

File /usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/hydromt/data_adapter/data_adapter.py:407, in DataAdapter._resolve_paths(self, time_tuple, variables, zoom_level)
    404         fns_out.append(UPath(fn, **self.storage_options))
    406 if len(fns_out) == 0:
--> 407     raise FileNotFoundError(f"No such file found: {path}{postfix}")
    409 return fns_out

FileNotFoundError: No such file found: /home/runner/work/hydromt_sfincs/hydromt_sfincs/docs/_examples/p:/wflow_global/hydromt/topography/merit_hydro/*.vrt
[12]:
# Now create the de_dep_lst. The order determines the priority of the datasets. Each dataset is a dictionary with the dataset (da) and merge_arguments
datasets_dep = [
    {"da": da_dep1, "zmin": 0.001},
    {"da": da_dep2, "offset": 0},
    {"da": da_dep3, "zmin": 0.001, "merge_method": "last"},
]
---------------------------------------------------------------------------
NameError                                 Traceback (most recent call last)
Cell In[12], line 3
      1 # Now create the de_dep_lst. The order determines the priority of the datasets. Each dataset is a dictionary with the dataset (da) and merge_arguments
      2 datasets_dep = [
----> 3     {"da": da_dep1, "zmin": 0.001},
      4     {"da": da_dep2, "offset": 0},
      5     {"da": da_dep3, "zmin": 0.001, "merge_method": "last"},
      6 ]

NameError: name 'da_dep1' is not defined
[13]:
dep = sf.setup_dep(datasets_dep=datasets_dep)
---------------------------------------------------------------------------
NameError                                 Traceback (most recent call last)
Cell In[13], line 1
----> 1 dep = sf.setup_dep(datasets_dep=datasets_dep)

NameError: name 'datasets_dep' is not defined
[14]:
sf.grid["dep"].plot.imshow(vmin=-10, vmax=10, cmap="terrain")
---------------------------------------------------------------------------
KeyError                                  Traceback (most recent call last)
/usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/xarray/core/dataset.py in ?(self, name)
   1446             variable = self._variables[name]
   1447         except KeyError:
-> 1448             _, name, variable = _get_virtual_variable(self._variables, name, self.sizes)
   1449

KeyError: 'dep'

During handling of the above exception, another exception occurred:

KeyError                                  Traceback (most recent call last)
/usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/xarray/core/dataset.py in ?(self, key)
   1545                 return self._construct_dataarray(key)
   1546             except KeyError as e:
-> 1547                 raise KeyError(
   1548                     f"No variable named {key!r}. Variables on the dataset include {shorten_list_repr(list(self.variables.keys()), max_items=10)}"

/usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/xarray/core/dataset.py in ?(self, name)
   1446             variable = self._variables[name]
   1447         except KeyError:
-> 1448             _, name, variable = _get_virtual_variable(self._variables, name, self.sizes)
   1449

/usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/xarray/core/dataset.py in ?(variables, key, dim_sizes)
    205     split_key = key.split(".", 1)
    206     if len(split_key) != 2:
--> 207         raise KeyError(key)
    208

KeyError: 'dep'

The above exception was the direct cause of the following exception:

KeyError                                  Traceback (most recent call last)
/tmp/ipykernel_2962/2259187073.py in ?()
----> 1 sf.grid["dep"].plot.imshow(vmin=-10, vmax=10, cmap="terrain")

/usr/share/miniconda3/envs/hydromt-sfincs/lib/python3.11/site-packages/xarray/core/dataset.py in ?(self, key)
   1543         if utils.hashable(key):
   1544             try:
   1545                 return self._construct_dataarray(key)
   1546             except KeyError as e:
-> 1547                 raise KeyError(
   1548                     f"No variable named {key!r}. Variables on the dataset include {shorten_list_repr(list(self.variables.keys()), max_items=10)}"
   1549                 ) from e
   1550

KeyError: "No variable named 'dep'. Variables on the dataset include []"