Coverage for C:\src\imod-python\imod\wq\wel.py: 15%
125 statements
« prev ^ index » next coverage.py v7.4.4, created at 2024-04-08 10:26 +0200
« prev ^ index » next coverage.py v7.4.4, created at 2024-04-08 10:26 +0200
1from pathlib import Path
3import jinja2
4import numpy as np
6import imod
7from imod.wq.pkgbase import BoundaryCondition
10def _column_order(df, variable):
11 """
12 Return ordered columns, and associated timeseries columns.
13 """
14 if "time" in df:
15 assoc_columns = ["time", variable]
16 if "layer" in df:
17 columns = ["x", "y", "id", "time", "layer", variable]
18 else:
19 columns = ["x", "y", "id", "time", variable]
20 else:
21 assoc_columns = None
22 if "layer" in df:
23 columns = ["x", "y", variable, "layer", "id"]
24 else:
25 columns = ["x", "y", variable, "id"]
26 return columns, assoc_columns
29class Well(BoundaryCondition):
30 """
31 The Well package is used to simulate a specified flux to individual cells
32 and specified in units of length3/time.
34 Parameters
35 ----------
36 id_name: str or list of str
37 name of the well(s).
38 x: float or list of floats
39 x coordinate of the well(s).
40 y: float or list of floats
41 y coordinate of the well(s).
42 rate: float or list of floats.
43 pumping rate in the well(s).
44 layer: "None" or int, optional
45 layer from which the pumping takes place.
46 time: "None" or listlike of np.datetime64, datetime.datetime, pd.Timestamp,
47 cftime.datetime
48 time during which the pumping takes place. Only need to specify if model
49 is transient.
50 save_budget: bool, optional
51 is a flag indicating if the budget should be saved (IRIVCB).
52 Default is False.
53 """
55 _pkg_id = "wel"
57 _template = jinja2.Template(
58 " {%- for time, timedict in wels.items() -%}"
59 " {%- for layer, value in timedict.items() %}\n"
60 " wel_p{{time}}_s{{system_index}}_l{{layer}} = {{value}}"
61 " {%- endfor -%}\n"
62 " {%- endfor -%}"
63 )
65 # TODO: implement well to concentration IDF and use ssm_template
66 # Ignored for now, since wells are nearly always extracting
68 def __init__(
69 self,
70 id_name,
71 x,
72 y,
73 rate,
74 layer=None,
75 time=None,
76 concentration=None,
77 save_budget=False,
78 ):
79 super().__init__()
80 variables = {
81 "id_name": id_name,
82 "x": x,
83 "y": y,
84 "rate": rate,
85 "layer": layer,
86 "time": time,
87 "concentration": concentration,
88 }
89 variables = {k: np.atleast_1d(v) for k, v in variables.items() if v is not None}
90 length = max(map(len, variables.values()))
91 index = np.arange(1, length + 1)
92 self["index"] = index
94 for k, v in variables.items():
95 if v.size == index.size:
96 self[k] = ("index", v)
97 elif v.size == 1:
98 self[k] = ("index", np.full(length, v))
99 else:
100 raise ValueError(f"Length of {k} does not match other arguments")
102 self["save_budget"] = save_budget
104 def _max_active_n(self, varname, nlayer, nrow, ncol):
105 """
106 Determine the maximum active number of cells that are active
107 during a stress period.
109 Parameters
110 ----------
111 varname : str
112 name of the variable to use to calculate the maximum number of
113 active cells. Not used for well, here for polymorphism.
114 nlayer, nrow, ncol : int
115 """
116 nmax = np.unique(self["id_name"]).size
117 if "layer" not in self.dataset.coords: # Then it applies to every layer
118 nmax *= nlayer
119 self._cellcount = nmax
120 self._ssm_cellcount = nmax
121 return nmax
123 def _compose_values_layer(self, directory, nlayer, name, time=None, compress=True):
124 values = {}
125 d = {"directory": directory, "name": name, "extension": ".ipf"}
127 if time is None:
128 if "layer" in self.dataset:
129 for layer in np.unique(self.dataset["layer"]):
130 layer = int(layer)
131 d["layer"] = layer
132 values[layer] = self._compose_path(d)
133 else:
134 values["$"] = self._compose_path(d)
136 else:
137 d["time"] = time
138 if "layer" in self.dataset:
139 # Since the well data is in long table format, it's the only
140 # input that has to be inspected.
141 select = np.argwhere((self.dataset["time"] == time).values)
142 for layer in np.unique(self.dataset["layer"].values[select]):
143 d["layer"] = layer
144 values[layer] = self._compose_path(d)
145 else:
146 values["?"] = self._compose_path(d)
148 if "layer" in self.dataset:
149 # Compose does not accept non-integers, so use 0, then replace
150 d["layer"] = 0
151 if np.unique(self.dataset["layer"].values).size == nlayer:
152 token_path = imod.util.path.compose(d).as_posix()
153 token_path = token_path.replace("_l0", "_l$")
154 values = {"$": token_path}
155 elif compress:
156 range_path = imod.util.path.compose(d).as_posix()
157 range_path = range_path.replace("_l0", "_l:")
158 # TODO: temporarily disable until imod-wq is fixed
159 values = self._compress_idflayers(values, range_path)
161 return values
163 def _compose_values_time(self, directory, name, globaltimes, nlayer):
164 # TODO: rename to _compose_values_timelayer?
165 values = {"?": self._compose_values_layer(directory, nlayer=nlayer, name=name)}
166 return values
168 def _render(self, directory, globaltimes, system_index, nlayer):
169 d = {"system_index": system_index}
170 d["wels"] = self._compose_values_time(directory, "rate", globaltimes, nlayer)
171 return self._template.render(d)
173 def _render_ssm(self, directory, globaltimes, nlayer):
174 if "concentration" in self.dataset.data_vars:
175 d = {"pkg_id": self._pkg_id}
176 name = "concentration"
177 if "species" in self.dataset["concentration"].coords:
178 concentration = {}
179 for species in self.dataset["concentration"]["species"].values:
180 concentration[species] = self._compose_values_time(
181 directory, f"{name}_c{species}", globaltimes, nlayer=nlayer
182 )
183 else:
184 concentration = {
185 1: self._compose_values_time(
186 directory, name, globaltimes, nlayer=nlayer
187 )
188 }
189 d["concentration"] = concentration
190 return self._ssm_template.render(d)
191 else:
192 return ""
194 def _save_layers(self, df, directory, name, variable):
195 d = {"directory": directory, "name": name, "extension": ".ipf"}
196 d["directory"].mkdir(exist_ok=True, parents=True)
198 if "time" in df:
199 itype = "timeseries"
200 else:
201 itype = None
203 columns, assoc_columns = _column_order(df, variable)
204 path = self._compose_path(d)
205 df = df[columns]
206 if "layer" in df:
207 for layer, layerdf in df.groupby("layer"):
208 # Ensure different IDs per layer are not overwritten.
209 layerdf["id"] = f"{name}_l{layer}/" + layerdf["id"].astype(str)
210 imod.ipf.save(
211 path=path, df=layerdf, itype=itype, assoc_columns=assoc_columns
212 )
213 else:
214 imod.ipf.save(path=path, df=df, itype=itype, assoc_columns=assoc_columns)
216 return
218 def save(self, directory):
219 directory = Path(directory)
221 all_species = [None]
222 if "concentration" in self.dataset.data_vars:
223 if "species" in self.dataset["concentration"].coords:
224 all_species = self.dataset["concentration"]["species"].values
226 df = self.dataset.to_dataframe().rename(columns={"id_name": "id"})
227 self._save_layers(df, directory, "rate", "rate")
229 # Loop over species if applicable
230 if "concentration" in self.dataset:
231 for species in all_species:
232 if species is not None:
233 ds = self.dataset.sel(species=species)
234 else:
235 ds = self.dataset
237 df = ds.to_dataframe().rename(columns={"id_name": "id"})
238 name = "concentration"
239 if species is not None:
240 name = f"{name}_c{species}"
241 self._save_layers(df, directory, name, "concentration")
243 return
245 def _pkgcheck(self, ibound=None):
246 # TODO: implement
247 pass
249 def repeat_stress(self, stress_repeats, use_cftime=False):
250 raise NotImplementedError(
251 "Well does not support repeated stresses: time-varying data is "
252 "saved into associated IPF files. Set explicit timeseries intead."
253 )