Skip to content

Commit

Permalink
NetCDF scalar output
Browse files Browse the repository at this point in the history
An extra dimension is not allowed when running Wflow as part of Delft-FEWS: specification of a layer index is optional, so the General Adapter of FEWS can import this file.
  • Loading branch information
verseve committed Jan 31, 2022
1 parent 20411db commit 0898581
Show file tree
Hide file tree
Showing 7 changed files with 103 additions and 43 deletions.
25 changes: 14 additions & 11 deletions docs/src/config.md
Original file line number Diff line number Diff line change
Expand Up @@ -235,13 +235,16 @@ required. A `reducer` can be specified to apply to the model output, see for mor
information the following section [Output CSV section](@ref). When a `map` is provided to
extract data for certain locations (e.g. `gauges`) or areas (e.g. `subcatchment`), the
NetCDF location names are extracted from these maps. For a specific location (grid cell) a
`location` is required. Model parameters with the extra dimension `layer` for layered model
parameters of the vertical `sbm` concept require the specification of the layer (see also
example below). If multiple layers are desired, this can be specified in separate
`[[netcdf.variable]]` entries. In the section [Output CSV section](@ref), similar
functionality is available for CSV. For integration with Delft-FEWS, see also [Run from
Delft-FEWS](@ref), it is recommended to write scalar data to NetCDF format since the General
Adapter of Delft-FEWS can ingest this data format directly.
`location` is required. For layered model parameters and variables that have an extra
dimension `layer` and are part of the vertical `sbm` concept it is possible to specify an
internal layer index (see also example below). If multiple layers are desired, this can be
specified in separate `[[netcdf.variable]]` entries. Note that the specification of the
layer is not optional when Wflow is integrated with Delft-FEWS, for NetCDF scalar data an
extra dimension is not allowed by the `importNetcdfActivity` of the Delft-FEWS General
Adapter. In the section [Output CSV section](@ref), similar functionality is available for
CSV. For integration with Delft-FEWS, see also [Run from Delft-FEWS](@ref), it is
recommended to write scalar data to NetCDF format since the General Adapter of Delft-FEWS
can ingest this data format directly.

```toml
[netcdf]
Expand Down Expand Up @@ -287,10 +290,10 @@ of the vector, the coordinates `coordinate.x` and `coordinate.y`, or the x and y
the 2D array (`index.x` and `index.y`) can be provided. Finally a `map` can be provided to
extract data for certain locations (e.g. `gauges`) or areas (e.g. `subcatchment`). In this
case a single entry can lead to multiple columns in the CSV file, which will be of the form
`header_id`, e.g. `Q_20`, for a gauge with integer ID 20. Model parameters with the extra
dimension `layer` for layered model parameters of the vertical `sbm` concept require the
specification of the layer (see also example below). If multiple layers are desired, this
can be specified in separate `[[csv.column]]` entries.
`header_id`, e.g. `Q_20`, for a gauge with integer ID 20. For layered model parameters and
variables that have an extra dimension `layer` and are part of the vertical `sbm` concept an
internal layer index (see also example below) should be specified. If multiple layers are
desired, this can be specified in separate `[[csv.column]]` entries.

The double brackets in `[[csv.column]]` is TOML syntax to indicate that it is part of a
list. You may specify as many entries as you wish.
Expand Down
2 changes: 1 addition & 1 deletion src/hbv_model.jl
Original file line number Diff line number Diff line change
Expand Up @@ -415,7 +415,7 @@ function initialize_hbv_model(config::Config)
reservoir = isempty(reservoir) ? nothing : reservoir.reverse_indices,
lake = isempty(lake) ? nothing : lake.reverse_indices,
)
writer = prepare_writer(config, reader, modelmap, indices_reverse, x_nc, y_nc, nc)
writer = prepare_writer(config, modelmap, indices_reverse, x_nc, y_nc, nc)
close(nc)

# for each domain save:
Expand Down
111 changes: 85 additions & 26 deletions src/io.jl
Original file line number Diff line number Diff line change
Expand Up @@ -350,7 +350,7 @@ function create_tracked_netcdf(path)
end

"prepare an output dataset for scalar data"
function setup_scalar_netcdf(path, ncvars, calendar, time_units, float_type = Float32)
function setup_scalar_netcdf(path, ncvars, modelmap, calendar, time_units, extra_dim, config, float_type = Float32)
ds = create_tracked_netcdf(path)
defDim(ds, "time", Inf) # unlimited
defVar(
Expand All @@ -360,7 +360,10 @@ function setup_scalar_netcdf(path, ncvars, calendar, time_units, float_type = Fl
("time",),
attrib = ["units" => time_units, "calendar" => calendar],
)
for nc in ncvars
if isnothing(extra_dim) == false
set_extradim_netcdf(ds, extra_dim)
end
for (nc, netcdfvars) in zip(ncvars, config.netcdf.variable)
# Delft-FEWS requires the attribute :cf_role = "timeseries_id" when a NetCDF file
# contains more than one location list
defVar(
Expand All @@ -370,17 +373,57 @@ function setup_scalar_netcdf(path, ncvars, calendar, time_units, float_type = Fl
(nc.location_dim,),
attrib = ["cf_role" => "timeseries_id"],
)
defVar(
ds,
nc.var,
float_type,
(nc.location_dim, "time"),
attrib = ["_FillValue" => float_type(NaN)],
)
v = param(modelmap, nc.par)
if eltype(v) <: AbstractFloat
defVar(
ds,
nc.var,
float_type,
(nc.location_dim, "time"),
attrib = ["_FillValue" => float_type(NaN)],
)
elseif eltype(v) <: SVector
if haskey(netcdfvars, extra_dim.name)
# `extra_dim.name` as specified in the TOML file is used to index
defVar(
ds,
nc.var,
float_type,
(nc.location_dim, "time"),
attrib = ["_FillValue" => float_type(NaN)],
)
else
defVar(
ds,
nc.var,
float_type,
(nc.location_dim, extra_dim.name, "time"),
attrib = ["_FillValue" => float_type(NaN)],
)
end
else
error("Unsupported output type: ", typeof(v))
end
end
return ds
end

"set extra dimension in output NetCDF file"
function set_extradim_netcdf(ds, extra_dim)
# the axis attribute `Z` is required to import this type of 3D data by Delft-FEWS
# the values of this dimension `extra_dim.value` should be of type float
if extra_dim.name == "layer"
attributes = [
"long_name" => "layer_index",
"standard_name" => "layer_index",
"axis" => "Z",
]
end
defVar(ds, extra_dim.name, extra_dim.value, (extra_dim.name,),
attrib = attributes)
return nothing
end

"prepare an output dataset for grid data"
function setup_grid_netcdf(
path,
Expand Down Expand Up @@ -453,7 +496,7 @@ function setup_grid_netcdf(
)
end
if isnothing(extra_dim) == false
defVar(ds, extra_dim.name, extra_dim.value, (extra_dim.name,))
set_extradim_netcdf(ds, extra_dim)
end
defVar(
ds,
Expand All @@ -474,7 +517,7 @@ function setup_grid_netcdf(
attrib = ["_FillValue" => float_type(NaN)],
)
elseif eltype(val.vector) <: SVector
# SVectors are used for additional dimension (`extra_dim`)
# for SVectors an additional dimension (`extra_dim`) is required
defVar(
ds,
key,
Expand All @@ -498,7 +541,7 @@ function setup_grid_netcdf(
attrib = ["_FillValue" => float_type(NaN)],
)
elseif eltype(val.vector) <: SVector
# SVectors are used for additional dimension (`extra_dim`)
# for SVectors an additional dimension (`extra_dim`) is required
defVar(
ds,
key,
Expand Down Expand Up @@ -541,8 +584,9 @@ struct Writer
state_nc_path::Union{String,Nothing} # path NetCDF file with states
dataset_scalar::Union{NCDataset,Nothing} # dataset(NetCDF) for scalar data
nc_scalar::Vector # model parameter (arrays) and associated reducer function for NetCDF scalar output
ncvars_dims::Vector # NetCDF variable, location dimension and location name for scalar data
ncvars_dims::Vector # model parameter (String) and associated NetCDF variable, location dimension and location name for scalar data
nc_scalar_path::Union{String,Nothing} # path NetCDF file (scalar data)
extra_dim::Union{NamedTuple,Nothing} # name and values for extra dimension (to store SVectors)
end

function prepare_reader(config)
Expand Down Expand Up @@ -647,20 +691,21 @@ function locations_map(ds, mapname, config)
return ids
end

"Get a Vector{Tuple} with NetCDF variable, dimension and location names for scalar data"
"Get a Vector{Tuple} with model parameter and associated NetCDF variable name, dimension and location names for scalar data"
function nc_variables_dims(nc_variables, dataset, config)
ncvars_dims = []
for nc_var in nc_variables
var = nc_var["name"]::String
par = nc_var["parameter"]::String
if haskey(nc_var, "map")
mapname = nc_var["map"]
ids = string.(locations_map(dataset, mapname, config))
location_dim = string(var, '_', nc_var["map"])
push!(ncvars_dims, (var = var, location_dim = location_dim, locations = ids))
push!(ncvars_dims, (par = par, var = var, location_dim = location_dim, locations = ids))
else
push!(
ncvars_dims,
(
( par = par,
var = var,
location_dim = nc_var["location"],
locations = [nc_var["location"]],
Expand Down Expand Up @@ -789,7 +834,6 @@ end

function prepare_writer(
config,
reader,
modelmap,
rev_inds,
x_nc,
Expand Down Expand Up @@ -859,7 +903,7 @@ function prepare_writer(
# get NetCDF info for scalar data (variable name, locationset (dim) and
# location ids)
ncvars_dims = nc_variables_dims(config.netcdf.variable, nc_static, config)
ds_scalar = setup_scalar_netcdf(nc_scalar_path, ncvars_dims, calendar, time_units)
ds_scalar = setup_scalar_netcdf(nc_scalar_path, ncvars_dims, modelmap, calendar, time_units, extra_dim, config)
# create a vector of (parameter, reducer) named tuples which will be used to
# retrieve and reduce data during a model run
nc_scalar = []
Expand Down Expand Up @@ -919,6 +963,7 @@ function prepare_writer(
nc_scalar,
ncvars_dims,
nc_scalar_path,
extra_dim,
)
end

Expand All @@ -927,16 +972,29 @@ function write_netcdf_timestep(model, dataset)
@unpack writer, clock, config = model

time_index = add_time(dataset, clock.time)
for (nt, nc, var) in zip(writer.nc_scalar, writer.ncvars_dims, config.netcdf.variable)
for (nt, nc) in zip(writer.nc_scalar, config.netcdf.variable)
A = param(model, nt.parameter)
elemtype = eltype(A)
# could be a value, or a vector in case of map
if eltype(A) <: SVector
i = get_index_dimension(var, model)
v = nt.reducer(getindex.(A,i))
else
if elemtype <: AbstractFloat
v = nt.reducer(A)
dataset[nc["name"]][:, time_index] .= v
elseif elemtype <: SVector
# check if an extra dimension and index is specified in the TOML file
if haskey(nc, writer.extra_dim.name)
i = get_index_dimension(nc, model)
v = nt.reducer(getindex.(A,i))
dataset[nc["name"]][:, time_index] .= v
else
nlayer = length(first(A))
for i = 1:nlayer
v = nt.reducer(getindex.(A,i))
dataset[nc["name"]][:, i, time_index] .= v
end
end
else
error("Unsupported output type: ", elemtype)
end
dataset[nc.var][:, time_index] .= v
end
return model
end
Expand Down Expand Up @@ -1159,8 +1217,9 @@ function write_csv_row(model)
print(io, string(clock.time))
for (nt, col) in zip(writer.csv_cols, config.csv.column)
A = param(model, nt.parameter)
# could be a value, or a vector in case of map
# v could be a value, or a vector in case of map
if eltype(A) <: SVector
# indexing is required in case of a SVector and CSV output
i = get_index_dimension(col, model)
v = nt.reducer(getindex.(A,i))
else
Expand Down Expand Up @@ -1460,7 +1519,7 @@ function get_index_dimension(var, model)
inds = collect(1:vertical.maxlayers)
index = inds[var["layer"]]
else
error("Unrecognized dimension name to index $(var)")
error("Unrecognized or missing dimension name to index $(var)")
end
return index
end
1 change: 0 additions & 1 deletion src/sbm_gwf_model.jl
Original file line number Diff line number Diff line change
Expand Up @@ -348,7 +348,6 @@ function initialize_sbm_gwf_model(config::Config)
)
writer = prepare_writer(
config,
reader,
modelmap,
indices_reverse,
x_nc,
Expand Down
3 changes: 1 addition & 2 deletions src/sbm_model.jl
Original file line number Diff line number Diff line change
Expand Up @@ -274,13 +274,12 @@ function initialize_sbm_model(config::Config)
)
writer = prepare_writer(
config,
reader,
modelmap,
indices_reverse,
x_nc,
y_nc,
nc,
extra_dim = (name = "layer", value = collect(1:sbm.maxlayers)),
extra_dim = (name = "layer", value = Float.(collect(1:sbm.maxlayers))),
)
close(nc)

Expand Down
2 changes: 1 addition & 1 deletion src/sediment_model.jl
Original file line number Diff line number Diff line change
Expand Up @@ -146,7 +146,7 @@ function initialize_sediment_model(config::Config)
reservoir = isempty(reservoir) ? nothing : reservoir.reverse_indices,
lake = isempty(lake) ? nothing : lake.reverse_indices,
)
writer = prepare_writer(config, reader, modelmap, indices_reverse, x_nc, y_nc, nc)
writer = prepare_writer(config, modelmap, indices_reverse, x_nc, y_nc, nc)
close(nc)

# for each domain save the directed acyclic graph, the traversion order,
Expand Down
2 changes: 1 addition & 1 deletion test/run_sbm.jl
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ end
@test ds["Q_gauges"].attrib["cf_role"] == "timeseries_id"
@test ds["temp_index"][:] [2.39f0]
@test ds["temp_coord"][:] [2.39f0]
@test keys(ds.dim) == ["time", "Q_gauges", "temp_bycoord", "temp_byindex"]
@test keys(ds.dim) == ["time", "layer", "Q_gauges", "temp_bycoord", "temp_byindex"]
end

@testset "first timestep" begin
Expand Down

0 comments on commit 0898581

Please sign in to comment.