Skip to content

Commit

Permalink
Improve output (#297)
Browse files Browse the repository at this point in the history
* Add add_nans-argument to nlmod.gwf.output.get_heads_da

* Add parameters for hdry and hnoflo

And add tests

* Add test for a transport model and geotop

and a minor change in gis.py

* Add test for nlmod.plot.map_array

* Add modpath test (and add support for structured grids)

* Test if docs complete without geotop notebook

* Fix notebooks

* Revert "Test if docs complete without geotop notebook"

This reverts commit b130a1f.

* Only refine Lek at the boundary

* Increase speed of notebooks

* Update path of model of notebook 17 (so it is in gitignore)

* Change hhnk url, which failed in test

* Handle comments from @dbrakenhoff

Remove add_nans again, and just use the value for hdry or hnoflo to not add NaNs
  • Loading branch information
rubencalje authored Nov 30, 2023
1 parent bb70164 commit 098c680
Show file tree
Hide file tree
Showing 18 changed files with 318 additions and 86 deletions.
14 changes: 3 additions & 11 deletions docs/examples/02_surface_water.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -223,7 +223,7 @@
"metadata": {},
"source": [
"#### Save the data to use in other notebooks as well\n",
"We save the bgt-data to a geojson file, so we can use the data in other notebooks with surface water as well"
"We save the bgt-data to a GeoPackage file, so we can use the data in other notebooks with surface water as well"
]
},
{
Expand All @@ -233,8 +233,8 @@
"metadata": {},
"outputs": [],
"source": [
"fname_bgt = os.path.join(cachedir, \"bgt.geojson\")\n",
"bgt.to_file(fname_bgt, driver=\"GeoJSON\")"
"fname_bgt = os.path.join(cachedir, \"bgt.gpkg\")\n",
"bgt.to_file(fname_bgt)"
]
},
{
Expand Down Expand Up @@ -787,14 +787,6 @@
"cbar = fig.colorbar(qm, shrink=1.0)\n",
"cbar.set_label(\"head [m NAP]\")"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "6e5833d6",
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
Expand Down
8 changes: 4 additions & 4 deletions docs/examples/09_schoonhoven.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@
"metadata": {},
"outputs": [],
"source": [
"fname_bgt = os.path.join(cachedir, \"bgt.geojson\")\n",
"fname_bgt = os.path.join(cachedir, \"bgt.gpkg\")\n",
"if not os.path.isfile(fname_bgt):\n",
" raise (\n",
" Exception(\n",
Expand Down Expand Up @@ -207,7 +207,7 @@
"metadata": {},
"outputs": [],
"source": [
"refinement_features = [(bgt[bgt[\"bronhouder\"] == \"L0002\"], 2)]\n",
"refinement_features = [(bgt[bgt[\"bronhouder\"] == \"L0002\"].dissolve().boundary, 2)]\n",
"ds = nlmod.grid.refine(ds, refinement_features=refinement_features)"
]
},
Expand Down Expand Up @@ -503,7 +503,7 @@
" colorbar_label=\"head [m NAP]\",\n",
" title=\"mean head\",\n",
")\n",
"bgt.plot(ax=pc.axes, edgecolor=\"k\", facecolor=\"none\");"
"bgt.dissolve().plot(ax=pc.axes, edgecolor=\"k\", facecolor=\"none\");"
]
},
{
Expand Down Expand Up @@ -745,7 +745,7 @@
"line = ax.add_collection(lc)\n",
"nlmod.plot.colorbar_inside(line, label=\"Travel time (years)\")\n",
"\n",
"bgt.plot(ax=ax, edgecolor=\"k\", facecolor=\"none\")"
"bgt.dissolve().plot(ax=ax, edgecolor=\"k\", facecolor=\"none\")"
]
},
{
Expand Down
14 changes: 3 additions & 11 deletions docs/examples/11_grid_rotation.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -165,17 +165,9 @@
"metadata": {},
"outputs": [],
"source": [
"bgt = nlmod.gwf.surface_water.get_gdf(ds)"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "e023dbca",
"metadata": {},
"outputs": [],
"source": [
"bgt.plot()"
"bgt = nlmod.read.bgt.get_bgt(extent)\n",
"bgt = nlmod.gwf.surface_water.add_stages_from_waterboards(bgt, extent=extent)\n",
"bgt = nlmod.grid.gdf_to_grid(bgt, ds).set_index(\"cellid\")"
]
},
{
Expand Down
2 changes: 1 addition & 1 deletion docs/examples/14_stromingen_example.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@
"# wells = nlmod.read.get_wells(extent) # no well data is available just yet\n",
"\n",
"# surface water features and levels\n",
"fname_bgt = os.path.join(cachedir, \"bgt.geojson\")\n",
"fname_bgt = os.path.join(cachedir, \"bgt.gpkg\")\n",
"if not os.path.isfile(fname_bgt):\n",
" raise (\n",
" Exception(\n",
Expand Down
6 changes: 3 additions & 3 deletions docs/examples/15_geotop.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@
"outputs": [],
"source": [
"# Define an extent and a line from southwest to northeast through this extent\n",
"extent = [95000.0, 105000.0, 494000.0, 500000.0]\n",
"extent = [100000.0, 105000.0, 499800.0, 500000.0]\n",
"line = LineString([(extent[0], extent[2]), (extent[1], extent[3])])"
]
},
Expand Down Expand Up @@ -386,13 +386,13 @@
"var = \"kh\"\n",
"norm = matplotlib.colors.Normalize(0.0, 40.0)\n",
"\n",
"f, axes = nlmod.plot.get_map(extent, ncols=2)\n",
"f, axes = nlmod.plot.get_map(extent, nrows=2, figsize=20)\n",
"pc = nlmod.plot.data_array(regis[var].loc[layer], ax=axes[0], norm=norm)\n",
"nlmod.plot.colorbar_inside(pc, bounds=[0.02, 0.05, 0.02, 0.9], ax=axes[0])\n",
"nlmod.plot.title_inside(\"REGIS\", ax=axes[0])\n",
"pc = nlmod.plot.data_array(regis[f\"{var}_gt\"].loc[layer], ax=axes[1], norm=norm)\n",
"nlmod.plot.title_inside(\"GeoTOP\", ax=axes[1])\n",
"nlmod.plot.colorbar_inside(pc, bounds=[0.02, 0.05, 0.02, 0.9], ax=axes[1])"
"nlmod.plot.colorbar_inside(pc, bounds=[0.02, 0.05, 0.02, 0.9], ax=axes[1]);"
]
},
{
Expand Down
10 changes: 2 additions & 8 deletions docs/examples/16_groundwater_transport.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@
"start_time = \"2010-1-1\"\n",
"starting_head = 1.0\n",
"\n",
"municipalities = nlmod.read.boundaries.get_municipalities(extent=extent_hbossche)"
"municipalities = nlmod.read.administrative.get_municipalities(extent=extent_hbossche)"
]
},
{
Expand Down Expand Up @@ -600,14 +600,8 @@
}
],
"metadata": {
"kernelspec": {
"display_name": "artesia",
"language": "python",
"name": "python3"
},
"language_info": {
"name": "python",
"version": "3.10.12"
"name": "python"
}
},
"nbformat": 4,
Expand Down
3 changes: 1 addition & 2 deletions docs/examples/17_unsaturated_zone_flow.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -66,8 +66,7 @@
"metadata": {},
"outputs": [],
"source": [
"model_name = \"test_uzf\"\n",
"model_ws = os.path.join(\"data\", model_name)\n",
"model_name = model_ws = \"model17\"\n",
"figdir, cachedir = nlmod.util.get_model_dirs(model_ws)"
]
},
Expand Down
59 changes: 44 additions & 15 deletions nlmod/dims/grid.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,12 +107,37 @@ def xy_to_icell2d(xy, ds):
icell2d : int
number of the icell2d value of a cell containing the xy point.
"""

msg = "xy_to_icell2d can only be applied to a vertex grid"
assert ds.gridtype == "vertex", msg
icell2d = (np.abs(ds.x.data - xy[0]) + np.abs(ds.y.data - xy[1])).argmin().item()

return icell2d


def xy_to_row_col(xy, ds):
"""get the row and column values of a point defined by its x and y coordinates.
Parameters
----------
xy : list, tuple
coordinates of ta point.
ds : xarary dataset
model dataset.
Returns
-------
row : int
number of the row value of a cell containing the xy point.
col : int
number of the column value of a cell containing the xy point.
"""
msg = "xy_to_row_col can only be applied to a structured grid"
assert ds.gridtype == "structured", msg
row = np.abs(ds.y.data - xy[1]).argmin()
col = np.abs(ds.x.data - xy[0]).argmin()
return row, col


def xyz_to_cid(xyz, ds=None, modelgrid=None):
"""get the icell2d value of a point defined by its x and y coordinates.
Expand Down Expand Up @@ -1142,11 +1167,10 @@ def gdf_to_data_array_struc(


def gdf_to_da(
gdf, ds, column, agg_method=None, fill_value=np.NaN, min_total_overlap=0.0,
ix=None
gdf, ds, column, agg_method=None, fill_value=np.NaN, min_total_overlap=0.0, ix=None
):
"""Project vector data on a grid. Aggregate data if multiple
geometries are in a single cell. Supports structured and vertex grids.
geometries are in a single cell. Supports structured and vertex grids.
This method replaces gdf_to_data_array_struc.
Parameters
Expand Down Expand Up @@ -1385,12 +1409,17 @@ def aggregate_vector_per_cell(gdf, fields_methods, modelgrid=None):
gdf_copy["area_times_field"] = gdf_copy["area"] * gdf_copy[field]

# skipna is not implemented by groupby therefore we use min_count=1
celldata[field] = (
gdf_copy.groupby(by="cellid")["area_times_field"].sum(min_count=1) /
gdf_copy.groupby(by="cellid")["area"].sum(min_count=1)
)

elif method in ("nearest", "length_weighted", "max_length", "max_area", "center_grid"):
celldata[field] = gdf_copy.groupby(by="cellid")["area_times_field"].sum(
min_count=1
) / gdf_copy.groupby(by="cellid")["area"].sum(min_count=1)

elif method in (
"nearest",
"length_weighted",
"max_length",
"max_area",
"center_grid",
):
for cid, group in tqdm(gr, desc="Aggregate vector data"):
agg_dic = _get_aggregates_values(group, {field: method}, modelgrid)

Expand All @@ -1403,7 +1432,7 @@ def aggregate_vector_per_cell(gdf, fields_methods, modelgrid=None):
return celldata


def gdf_to_bool_da(gdf, ds, ix=None, buffer=0., **kwargs):
def gdf_to_bool_da(gdf, ds, ix=None, buffer=0.0, **kwargs):
"""convert a GeoDataFrame with polygon geometries into a data array
corresponding to the modelgrid in which each cell is 1 (True) if one or
more geometries are (partly) in that cell.
Expand All @@ -1430,7 +1459,7 @@ def gdf_to_bool_da(gdf, ds, ix=None, buffer=0., **kwargs):
return da


def gdf_to_bool_ds(gdf, ds, da_name, keep_coords=None, ix=None, buffer=0., **kwargs):
def gdf_to_bool_ds(gdf, ds, da_name, keep_coords=None, ix=None, buffer=0.0, **kwargs):
"""convert a GeoDataFrame with polygon geometries into a model dataset with
a data_array named 'da_name' in which each cell is 1 (True) if one or more
geometries are (partly) in that cell.
Expand Down Expand Up @@ -1465,7 +1494,7 @@ def gdf_to_bool_ds(gdf, ds, da_name, keep_coords=None, ix=None, buffer=0., **kwa
return ds_out


def gdf_to_count_da(gdf, ds, ix=None, buffer=0., **kwargs):
def gdf_to_count_da(gdf, ds, ix=None, buffer=0.0, **kwargs):
"""Counts in how many polygons a coordinate of ds appears.
Parameters
Expand Down Expand Up @@ -1509,7 +1538,7 @@ def gdf_to_count_da(gdf, ds, ix=None, buffer=0., **kwargs):
geoms = [gdf]

for geom in geoms:
if buffer > 0.:
if buffer > 0.0:
cids = ix.intersects(geom.buffer(buffer), **kwargs)["cellids"]
else:
cids = ix.intersects(geom, **kwargs)["cellids"]
Expand All @@ -1526,7 +1555,7 @@ def gdf_to_count_da(gdf, ds, ix=None, buffer=0., **kwargs):
return da


def gdf_to_count_ds(gdf, ds, da_name, keep_coords=None, ix=None, buffer=0., **kwargs):
def gdf_to_count_ds(gdf, ds, da_name, keep_coords=None, ix=None, buffer=0.0, **kwargs):
"""Counts in how many polygons a coordinate of ds appears.
Parameters
Expand Down
2 changes: 1 addition & 1 deletion nlmod/gis.py
Original file line number Diff line number Diff line change
Expand Up @@ -396,7 +396,7 @@ def ds_to_ugrid_nc_file(
# Convert boolean layers to integer and int64 to int32
for var in variables:
if np.issubdtype(ds[var].dtype, bool):
ds[var].encoding["dtype"] = np.int
ds[var].encoding["dtype"] = np.int32
elif np.issubdtype(ds[var].dtype, str) or np.issubdtype(ds[var].dtype, object):
# convert the string to an index of unique strings
index = np.unique(ds[var], return_inverse=True)[1]
Expand Down
29 changes: 21 additions & 8 deletions nlmod/mfoutput/mfoutput.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def _get_time_index(fobj, ds=None, gwf_or_gwt=None):
return tindex


def _create_da(arr, modelgrid, times):
def _create_da(arr, modelgrid, times, hdry=-1e30, hnoflo=1e30):
"""Create data array based on array, modelgrid, and time array.
Parameters
Expand All @@ -88,6 +88,12 @@ def _create_da(arr, modelgrid, times):
flopy modelgrid object
times : list or array
list or array containing times as floats (usually in days)
hdry : float, optional
The value of dry cells, which will be replaced by NaNs. If hdry is None, the
values of dry cells will not be replaced by NaNs. The default is -1e30.
hnoflo : float, optional
The value of no-flow cells, which will be replaced by NaNs. If hnoflo is None,
the values of no-flow cells will not be replaced by NaNs. The default is 1e30.
Returns
-------
Expand All @@ -99,10 +105,15 @@ def _create_da(arr, modelgrid, times):
dims, coords = get_dims_coords_from_modelgrid(modelgrid)
da = xr.DataArray(data=arr, dims=("time",) + dims, coords=coords)

# set dry/no-flow to nan
hdry = -1e30
hnoflo = 1e30
da = da.where((da != hdry) & (da != hnoflo))
if hdry is not None or hnoflo is not None:
# set dry/no-flow to nan
if hdry is None:
mask = da != hnoflo
elif hnoflo is None:
mask = da != hdry
else:
mask = (da != hdry) & (da != hnoflo)
da = da.where(mask)

# set local time coordinates
da.coords["time"] = ds_time_idx(times)
Expand Down Expand Up @@ -168,7 +179,7 @@ def _get_heads_da(
stacked_arr = stacked_arr[:, :, 0, :]

# create data array
da = _create_da(stacked_arr, modelgrid, hobj.get_times())
da = _create_da(stacked_arr, modelgrid, hobj.get_times(), **kwargs)

return da

Expand Down Expand Up @@ -263,10 +274,12 @@ def _get_flopy_data_object(var, ds=None, gwml=None, fname=None, grbfile=None):
extension = "_gwt.ucn"
else:
raise (ValueError(f"Unknown variable {var}"))
msg = f"Load the {var}s using either ds, {ml_name} or fname"
assert ((ds is not None) + (gwml is not None) + (fname is not None)) == 1, msg

if fname is None:
if ds is None:
if gwml is None:
msg = f"Load the {var}s using either ds, {ml_name} or fname"
raise (ValueError(msg))
# return gwf.output.head(), gwf.output.budget() or gwt.output.concentration()
return getattr(gwml.output, var)()
fname = os.path.join(ds.model_ws, ds.model_name + extension)
Expand Down
Loading

0 comments on commit 098c680

Please sign in to comment.