Coverage for C:\src\imod-python\imod\formats\prj\disv_conversion.py: 62%

347 statements  

« prev     ^ index     » next       coverage.py v7.4.4, created at 2024-04-08 13:27 +0200

1""" 

2Most of the functionality here attempts to replicate what iMOD does with 

3project files. 

4""" 

5 

6from __future__ import annotations 

7 

8import itertools 

9import pickle 

10from collections import Counter 

11from datetime import datetime 

12from typing import Dict, List, Optional, Tuple, cast 

13 

14import numpy as np 

15import pandas as pd 

16import xarray as xr 

17import xugrid as xu 

18 

19import imod 

20from imod.mf6.model import Modflow6Model 

21from imod.mf6.utilities.package import get_repeat_stress 

22from imod.prepare.layer import get_upper_active_grid_cells 

23from imod.typing import GridDataArray 

24from imod.util.imports import MissingOptionalModule 

25 

26try: 

27 import geopandas as gpd 

28except ImportError: 

29 gpd = MissingOptionalModule("geopandas") 

30 

31 

32def hash_xy(da: xr.DataArray) -> Tuple[int]: 

33 """ 

34 Create a unique identifier based on the x and y coordinates of a DataArray. 

35 """ 

36 x = hash(pickle.dumps(da["x"].values)) 

37 y = hash(pickle.dumps(da["y"].values)) 

38 return x, y 

39 

40 

41class SingularTargetRegridderWeightsCache: 

42 """ 

43 Create a mapping of (source_coords, regridder_cls) => regridding weights. 

44 

45 Allows re-use of the regridding weights, as computing the weights is the 

46 most costly step. 

47 

48 The regridder only processes x and y coordinates: we can hash these, 

49 and get a unique identifier. The target is assumed to be constant. 

50 """ 

51 

52 def __init__(self, projectfile_data, target, cache_size: int): 

53 # Collect the x-y coordinates of all x-y dimensioned DataArrays. 

54 # Determine which regridding method to use. 

55 # Count occurrences of both. 

56 # Create and cache weights of the most common ones. 

57 keys = [] 

58 sources = {} 

59 methods = {} 

60 for pkgdict in projectfile_data.values(): 

61 for variable, da in pkgdict.items(): 

62 xydims = set(("x", "y")) 

63 

64 if isinstance(da, xr.DataArray) and xydims.issubset(da.dims): 

65 # for initial condition, constant head, general head boundary 

66 if variable == "head": 

67 cls = xu.BarycentricInterpolator 

68 method = None 

69 elif variable == "conductance": 

70 cls = xu.RelativeOverlapRegridder 

71 method = "conductance" 

72 else: 

73 cls = xu.OverlapRegridder 

74 method = "mean" 

75 

76 x, y = hash_xy(da) 

77 key = (x, y, cls) 

78 keys.append(key) 

79 sources[key] = da 

80 methods[key] = method 

81 

82 counter = Counter(keys) 

83 self.target = target 

84 self.weights = {} 

85 for key, _ in counter.most_common(cache_size): 

86 cls = key[2] 

87 ugrid_source = xu.UgridDataArray.from_structured(sources[key]) 

88 kwargs = {"source": ugrid_source, "target": target} 

89 method = methods[key] 

90 if method is not None: 

91 kwargs["method"] = method 

92 regridder = cls(**kwargs) 

93 self.weights[key] = regridder.weights 

94 

95 def regrid( 

96 self, 

97 source: xr.DataArray, 

98 method: str = "mean", 

99 original2d: Optional[xr.DataArray] = None, 

100 ): 

101 if source.dims[-2:] != ("y", "x"): # So it's a constant 

102 if original2d is None: 

103 raise ValueError("original2d must be provided for constant values") 

104 source = source * xr.ones_like(original2d) 

105 

106 kwargs = {"target": self.target} 

107 if method == "barycentric": 

108 cls = xu.BarycentricInterpolator 

109 elif method == "conductance": 

110 cls = xu.RelativeOverlapRegridder 

111 kwargs["method"] = method 

112 else: 

113 cls = xu.OverlapRegridder 

114 kwargs["method"] = method 

115 

116 x, y = hash_xy(source) 

117 key = (x, y, cls) 

118 if key in self.weights: 

119 kwargs["weights"] = self.weights[key] 

120 regridder = cls.from_weights(**kwargs) 

121 # Avoid creation of a UgridDataArray here 

122 dims = source.dims[:-2] 

123 coords = {k: source.coords[k] for k in dims} 

124 facedim = self.target.face_dimension 

125 face_source = xr.DataArray( 

126 source.data.reshape(*source.shape[:-2], -1), 

127 coords=coords, 

128 dims=[*dims, facedim], 

129 name=source.name, 

130 ) 

131 return xu.UgridDataArray( 

132 regridder.regrid_dataarray(face_source, (facedim,)), 

133 regridder._target.ugrid_topology, 

134 ) 

135 else: 

136 ugrid_source = xu.UgridDataArray.from_structured(source) 

137 kwargs["source"] = ugrid_source 

138 regridder = cls(**kwargs) 

139 return regridder.regrid(ugrid_source) 

140 

141 

142def raise_on_layer(value, variable: str): 

143 da = value[variable] 

144 if "layer" in da.dims: 

145 raise ValueError(f"{variable} should not be assigned a layer") 

146 return da 

147 

148 

149def finish(uda): 

150 """ 

151 Set dimension order, and drop empty layers. 

152 """ 

153 facedim = uda.ugrid.grid.face_dimension 

154 dims = ("time", "layer", facedim) 

155 return uda.transpose(*dims, missing_dims="ignore").dropna("layer", how="all") 

156 

157 

158def create_idomain(thickness): 

159 """ 

160 Find cells that should get a passthrough value: IDOMAIN = -1. 

161 

162 We may find them by forward and back-filling: if they are filled in both, they 

163 contain active cells in both directions, and the value should be set to -1. 

164 """ 

165 active = ( 

166 thickness > 0 

167 ).compute() # TODO larger than a specific value for round off? 

168 ones = xu.ones_like(active, dtype=float).where(active) 

169 passthrough = ones.ffill("layer").notnull() & ones.bfill("layer").notnull() 

170 idomain = ones.combine_first( 

171 xu.full_like(active, -1.0, dtype=float).where(passthrough) 

172 ) 

173 return idomain.fillna(0).astype(int) 

174 

175 

176def create_disv( 

177 cache, 

178 top, 

179 bottom, 

180 ibound, 

181): 

182 if top.dims == ("layer",): 

183 if ibound.dims != ("layer", "y", "x"): 

184 raise ValueError( 

185 "Either ibound or top must have dimensions (layer, y, x) to " 

186 "derive model extent. Both may not be provided as constants." 

187 ) 

188 top = top * xr.ones_like(ibound) 

189 original2d = ibound.isel(layer=0, drop=True) 

190 else: 

191 original2d = top.isel(layer=0, drop=True) 

192 

193 if bottom.dims == ("layer",): 

194 bottom = bottom * xr.ones_like(ibound) 

195 

196 top = top.compute() 

197 bottom = bottom.compute() 

198 disv_top = cache.regrid(top).compute() 

199 disv_bottom = cache.regrid(bottom).compute() 

200 thickness = disv_top - disv_bottom 

201 idomain = create_idomain(thickness) 

202 disv = imod.mf6.VerticesDiscretization( 

203 top=disv_top.sel(layer=1), 

204 bottom=disv_bottom, 

205 idomain=idomain, 

206 ) 

207 active = idomain > 0 

208 return disv, disv_top, disv_bottom, active, original2d 

209 

210 

211def create_npf( 

212 cache, 

213 k, 

214 vertical_anisotropy, 

215 active, 

216 original2d, 

217): 

218 disv_k = cache.regrid(k, method="geometric_mean", original2d=original2d).where( 

219 active 

220 ) 

221 k33 = k * vertical_anisotropy 

222 disv_k33 = cache.regrid(k33, method="harmonic_mean", original2d=original2d).where( 

223 active 

224 ) 

225 return imod.mf6.NodePropertyFlow( 

226 icelltype=0, 

227 k=disv_k, 

228 k33=disv_k33, 

229 ) 

230 

231 

232def create_chd( 

233 cache, 

234 model, 

235 key, 

236 value, 

237 ibound, 

238 active, 

239 original2d, 

240 repeat, 

241 **kwargs, 

242): 

243 head = value["head"] 

244 

245 if "layer" in head.coords: 

246 layer = head.layer 

247 ibound = ibound.sel(layer=layer) 

248 active = active.sel(layer=layer) 

249 

250 disv_head = cache.regrid( 

251 head, 

252 method="barycentric", 

253 original2d=original2d, 

254 ) 

255 valid = (ibound < 0) & active 

256 

257 if not valid.any(): 

258 return 

259 

260 chd = imod.mf6.ConstantHead(head=disv_head.where(valid)) 

261 if repeat is not None: 

262 chd.dataset["repeat_stress"] = get_repeat_stress(repeat) 

263 model[key] = chd 

264 return 

265 

266 

267def create_drn( 

268 cache, 

269 model, 

270 key, 

271 value, 

272 active, 

273 top, 

274 bottom, 

275 original2d, 

276 repeat, 

277 **kwargs, 

278): 

279 conductance = raise_on_layer(value, "conductance") 

280 elevation = raise_on_layer(value, "elevation") 

281 

282 disv_cond = cache.regrid( 

283 conductance, 

284 method="conductance", 

285 original2d=original2d, 

286 ) 

287 disv_elev = cache.regrid(elevation, original2d=original2d) 

288 valid = (disv_cond > 0) & disv_elev.notnull() & active 

289 location = xu.ones_like(active, dtype=float) 

290 location = location.where((disv_elev > bottom) & (disv_elev <= top)).where(valid) 

291 disv_cond = finish(location * disv_cond) 

292 disv_elev = finish(location * disv_elev) 

293 

294 if disv_cond.isnull().all(): 

295 return 

296 

297 drn = imod.mf6.Drainage( 

298 elevation=disv_elev, 

299 conductance=disv_cond, 

300 ) 

301 if repeat is not None: 

302 drn.dataset["repeat_stress"] = get_repeat_stress(repeat) 

303 model[key] = drn 

304 return 

305 

306 

307def create_ghb( 

308 cache, 

309 model, 

310 key, 

311 value, 

312 active, 

313 original2d, 

314 repeat, 

315 **kwargs, 

316): 

317 conductance = value["conductance"] 

318 head = value["head"] 

319 

320 disv_cond = cache.regrid( 

321 conductance, 

322 method="conductance", 

323 original2d=original2d, 

324 ) 

325 disv_head = cache.regrid( 

326 head, 

327 method="barycentric", 

328 original2d=original2d, 

329 ) 

330 valid = (disv_cond > 0.0) & disv_head.notnull() & active 

331 

332 ghb = imod.mf6.GeneralHeadBoundary( 

333 conductance=disv_cond.where(valid), 

334 head=disv_head.where(valid), 

335 ) 

336 if repeat is not None: 

337 ghb.dataset["repeat_stress"] = get_repeat_stress(repeat) 

338 model[key] = ghb 

339 return 

340 

341 

342def create_riv( 

343 cache, 

344 model, 

345 key, 

346 value, 

347 active, 

348 original2d, 

349 top, 

350 bottom, 

351 repeat, 

352 **kwargs, 

353): 

354 def assign_to_layer( 

355 conductance, stage, elevation, infiltration_factor, top, bottom, active 

356 ): 

357 """ 

358 Assign river boundary to multiple layers. Distribute the conductance based 

359 on the vertical degree of overlap. 

360 

361 Parameters 

362 ---------- 

363 conductance 

364 stage: 

365 water stage 

366 elevation: 

367 bottom elevation of the river 

368 infiltration_factor 

369 factor (generally <1) to reduce infiltration conductance compared 

370 to drainage conductance. 

371 top: 

372 layer model top elevation 

373 bottom: 

374 layer model bottom elevation 

375 active: 

376 active or inactive cells (idomain > 0) 

377 """ 

378 valid = conductance > 0.0 

379 conductance = conductance.where(valid) 

380 stage = stage.where(valid) 

381 elevation = elevation.where(valid) 

382 elevation = elevation.where(elevation <= stage, other=stage) 

383 

384 # TODO: this removes too much when the stage is higher than the top... 

385 # Instead: just cut through all layers until the bottom elevation. 

386 # Then, assign a transmissivity weighted conductance. 

387 water_top = stage.where(stage <= top) 

388 water_bottom = elevation.where(elevation > bottom) 

389 layer_height = top - bottom 

390 layer_height = layer_height.where(active) # avoid 0 thickness layers 

391 fraction = (water_top - water_bottom) / layer_height 

392 # Set values of 0.0 to 1.0, but do not change NaN values: 

393 fraction = fraction.where(~(fraction == 0.0), other=1.0) 

394 location = xu.ones_like(fraction).where(fraction.notnull() & active) 

395 

396 layered_conductance = finish(conductance * fraction) 

397 layered_stage = finish(stage * location) 

398 layered_elevation = finish(elevation * location) 

399 infiltration_factor = finish(infiltration_factor * location) 

400 

401 return ( 

402 layered_conductance, 

403 layered_stage, 

404 layered_elevation, 

405 infiltration_factor, 

406 ) 

407 

408 conductance = raise_on_layer(value, "conductance") 

409 stage = raise_on_layer(value, "stage") 

410 bottom_elevation = raise_on_layer(value, "bottom_elevation") 

411 infiltration_factor = raise_on_layer(value, "infiltration_factor") 

412 

413 disv_cond_2d = cache.regrid( 

414 conductance, 

415 method="conductance", 

416 original2d=original2d, 

417 ).compute() 

418 disv_elev_2d = cache.regrid(bottom_elevation, original2d=original2d) 

419 disv_stage_2d = cache.regrid(stage, original2d=original2d) 

420 disv_inff_2d = cache.regrid(infiltration_factor, original2d=original2d) 

421 

422 disv_cond, disv_stage, disv_elev, disv_inff = assign_to_layer( 

423 conductance=disv_cond_2d, 

424 stage=disv_stage_2d, 

425 elevation=disv_elev_2d, 

426 infiltration_factor=disv_inff_2d, 

427 top=top, 

428 bottom=bottom, 

429 active=active, 

430 ) 

431 

432 if disv_cond.isnull().all(): 

433 return 

434 

435 # The infiltration factor may be 0. In that case, we need only create a DRN 

436 # package. 

437 drn = imod.mf6.Drainage( 

438 conductance=(1.0 - disv_inff) * disv_cond, 

439 elevation=disv_stage, 

440 ) 

441 if repeat is not None: 

442 drn.dataset["repeat_stress"] = get_repeat_stress(repeat) 

443 model[f"{key}-drn"] = drn 

444 

445 riv_cond = disv_cond * disv_inff 

446 riv_valid = riv_cond > 0.0 

447 if not riv_valid.any(): 

448 return 

449 

450 riv = imod.mf6.River( 

451 stage=disv_stage.where(riv_valid), 

452 conductance=riv_cond.where(riv_valid), 

453 bottom_elevation=disv_elev.where(riv_valid), 

454 ) 

455 if repeat is not None: 

456 riv.dataset["repeat_stress"] = get_repeat_stress(repeat) 

457 model[key] = riv 

458 

459 return 

460 

461 

462def create_rch( 

463 cache, 

464 model, 

465 key, 

466 value, 

467 active, 

468 original2d, 

469 repeat, 

470 **kwargs, 

471): 

472 rate = raise_on_layer(value, "rate") * 0.001 

473 disv_rate = cache.regrid(rate, original2d=original2d).where(active) 

474 # Find highest active layer 

475 location = get_upper_active_grid_cells(active) 

476 disv_rate = finish(disv_rate.where(location)) 

477 

478 # Skip if there's no data 

479 if disv_rate.isnull().all(): 

480 return 

481 

482 rch = imod.mf6.Recharge(rate=disv_rate) 

483 if repeat is not None: 

484 rch.dataset["repeat_stress"] = get_repeat_stress(repeat) 

485 model[key] = rch 

486 return 

487 

488 

489def create_evt( 

490 cache, 

491 model, 

492 key, 

493 value, 

494 active, 

495 original2d, 

496 repeat, 

497 **kwargs, 

498): 

499 surface = raise_on_layer(value, "surface") 

500 rate = raise_on_layer(value, "rate") * 0.001 

501 depth = raise_on_layer(value, "depth") 

502 

503 # Find highest active layer 

504 highest = active["layer"] == active["layer"].where(active).min() 

505 location = highest.where(highest) 

506 

507 disv_surface = cache.regrid(surface, original2d=original2d).where(active) 

508 disv_surface = finish(disv_surface * location) 

509 

510 disv_rate = cache.regrid(rate, original2d=original2d).where(active) 

511 disv_rate = finish(disv_rate * location) 

512 

513 disv_depth = cache.regrid(depth, original2d=original2d).where(active) 

514 disv_depth = finish(disv_depth * location) 

515 

516 # At depth 1.0, the rate is 0.0. 

517 proportion_depth = xu.ones_like(disv_surface).where(disv_surface.notnull()) 

518 proportion_rate = xu.zeros_like(disv_surface).where(disv_surface.notnull()) 

519 

520 evt = imod.mf6.Evapotranspiration( 

521 surface=disv_surface, 

522 rate=disv_rate, 

523 depth=disv_depth, 

524 proportion_rate=proportion_rate, 

525 proportion_depth=proportion_depth, 

526 ) 

527 if repeat is not None: 

528 evt.dataset["repeat_stress"] = get_repeat_stress(repeat) 

529 model[key] = evt 

530 return 

531 

532 

533def create_sto( 

534 cache, 

535 storage_coefficient, 

536 active, 

537 original2d, 

538 transient, 

539): 

540 if storage_coefficient is None: 

541 disv_coef = 0.0 

542 else: 

543 disv_coef = cache.regrid(storage_coefficient, original2d=original2d).where( 

544 active 

545 ) 

546 

547 sto = imod.mf6.StorageCoefficient( 

548 storage_coefficient=disv_coef, 

549 specific_yield=0.0, 

550 transient=transient, 

551 convertible=0, 

552 ) 

553 return sto 

554 

555 

556def create_wel( 

557 cache, 

558 model, 

559 key, 

560 value, 

561 active, 

562 top, 

563 bottom, 

564 k, 

565 repeat, 

566 **kwargs, 

567): 

568 target = cache.target 

569 dataframe = value["dataframe"] 

570 layer = value["layer"] 

571 

572 if layer <= 0: 

573 dataframe = imod.prepare.assign_wells( 

574 wells=dataframe, 

575 top=top, 

576 bottom=bottom, 

577 k=k, 

578 minimum_thickness=0.01, 

579 minimum_k=1.0, 

580 ) 

581 else: 

582 dataframe["index"] = np.arange(len(dataframe)) 

583 dataframe["layer"] = layer 

584 

585 first = dataframe.groupby("index").first() 

586 well_layer = first["layer"].values 

587 xy = np.column_stack([first["x"], first["y"]]) 

588 cell2d = target.locate_points(xy) 

589 valid = (cell2d >= 0) & active.values[well_layer - 1, cell2d] 

590 

591 cell2d = cell2d[valid] + 1 

592 # Skip if no wells are located inside cells 

593 if not valid.any(): 

594 return 

595 

596 if "time" in dataframe.columns: 

597 # Ensure the well data is rectangular. 

598 time = np.unique(dataframe["time"].values) 

599 dataframe = dataframe.set_index("time") 

600 # First ffill, then bfill! 

601 dfs = [df.reindex(time).ffill().bfill() for _, df in dataframe.groupby("index")] 

602 rate = ( 

603 pd.concat(dfs) 

604 .reset_index() 

605 .set_index(["time", "index"])["rate"] 

606 .to_xarray() 

607 ) 

608 else: 

609 rate = xr.DataArray( 

610 dataframe["rate"], coords={"index": dataframe["index"]}, dims=["index"] 

611 ) 

612 

613 # Don't forget to remove the out-of-bounds points. 

614 rate = rate.where(xr.DataArray(valid, dims=["index"]), drop=True) 

615 

616 wel = imod.mf6.WellDisVertices( 

617 layer=well_layer, 

618 cell2d=cell2d, 

619 rate=rate, 

620 ) 

621 if repeat is not None: 

622 wel.dataset["repeat_stress"] = get_repeat_stress(repeat) 

623 

624 model[key] = wel 

625 return 

626 

627 

628def create_ic(cache, model, key, value, active, **kwargs): 

629 start = value["head"] 

630 disv_start = cache.regrid(source=start, method="barycentric").where(active) 

631 model[key] = imod.mf6.InitialConditions(start=disv_start) 

632 return 

633 

634 

635def create_hfb( 

636 model: Modflow6Model, 

637 key: str, 

638 value: Dict, 

639 top: GridDataArray, 

640 bottom: GridDataArray, 

641 **kwargs, 

642) -> None: 

643 dataframe = value["geodataframe"] 

644 

645 barrier_gdf = gpd.GeoDataFrame( 

646 geometry=dataframe["geometry"].values, 

647 data={ 

648 "resistance": dataframe["resistance"].values, 

649 "ztop": np.ones_like(dataframe["geometry"].values) * top.max().values, 

650 "zbottom": np.ones_like(dataframe["geometry"].values) * bottom.min().values, 

651 }, 

652 ) 

653 

654 model[key] = imod.mf6.HorizontalFlowBarrierResistance(barrier_gdf) 

655 

656 

657def merge_hfbs( 

658 horizontal_flow_barriers: List[imod.mf6.HorizontalFlowBarrierResistance], 

659): 

660 datasets = [] 

661 for horizontal_flow_barrier in horizontal_flow_barriers: 

662 datasets.append(horizontal_flow_barrier.dataset) 

663 

664 combined_dataset = xr.concat(datasets, "index") 

665 combined_dataset.coords["index"] = np.arange(combined_dataset.sizes["index"]) 

666 

667 combined_dataframe = cast(gpd.GeoDataFrame, combined_dataset.to_dataframe()) 

668 combined_dataframe.drop("print_input", axis=1, inplace=True) 

669 

670 return imod.mf6.HorizontalFlowBarrierResistance(combined_dataframe) 

671 

672 

673PKG_CONVERSION = { 

674 "chd": create_chd, 

675 "drn": create_drn, 

676 "evt": create_evt, 

677 "ghb": create_ghb, 

678 "hfb": create_hfb, 

679 "shd": create_ic, 

680 "rch": create_rch, 

681 "riv": create_riv, 

682 "wel": create_wel, 

683} 

684 

685 

686def expand_repetitions( 

687 repeat_stress: List[datetime], time_min: datetime, time_max: datetime 

688) -> Dict[datetime, datetime]: 

689 expanded = {} 

690 for year, date in itertools.product( 

691 range(time_min.year, time_max.year + 1), 

692 repeat_stress, 

693 ): 

694 newdate = date.replace(year=year) 

695 if newdate < time_max: 

696 expanded[newdate] = date 

697 return expanded 

698 

699 

700def convert_to_disv( 

701 projectfile_data, target, time_min=None, time_max=None, repeat_stress=None 

702): 

703 """ 

704 Convert the contents of a project file to a MODFLOW6 DISV model. 

705 

706 The ``time_min`` and ``time_max`` are **both** required when 

707 ``repeat_stress`` is given. The entries in the Periods section of the 

708 project file will be expanded to yearly repeats between ``time_min`` and 

709 ``time_max``. 

710 

711 Additionally, ``time_min`` and ``time_max`` may be used to slice the input 

712 to a specific time domain. 

713 

714 The returned model is steady-state if none of the packages contain a time 

715 dimension. The model is transient if any of the packages contain a time 

716 dimension. This can be changed by setting the "transient" value in the 

717 storage package of the returned model. Storage coefficient input is 

718 required for a transient model. 

719 

720 Parameters 

721 ---------- 

722 projectfile_data: dict 

723 Dictionary with the projectfile topics as keys, and the data 

724 as xarray.DataArray, pandas.DataFrame, or geopandas.GeoDataFrame. 

725 target: xu.Ugrid2d 

726 The unstructured target topology. All data is transformed to match this 

727 topology. 

728 time_min: datetime, optional 

729 Minimum starting time of a stress. 

730 Required when ``repeat_stress`` is provided. 

731 time_max: datetime, optional 

732 Maximum starting time of a stress. 

733 Required when ``repeat_stress`` is provided. 

734 repeat_stress: dict of dict of string to datetime, optional 

735 This dict contains contains, per topic, the period alias (a string) to 

736 its datetime. 

737 

738 Returns 

739 ------- 

740 disv_model: imod.mf6.GroundwaterFlowModel 

741 

742 """ 

743 if repeat_stress is not None: 

744 if time_min is None or time_max is None: 

745 raise ValueError( 

746 "time_min and time_max are required when repeat_stress is given" 

747 ) 

748 

749 for arg in (time_min, time_max): 

750 if arg is not None and not isinstance(arg, datetime): 

751 raise TypeError( 

752 "time_min and time_max must be datetime.datetime. " 

753 f"Received: {type(arg).__name__}" 

754 ) 

755 

756 data = projectfile_data.copy() 

757 model = imod.mf6.GroundwaterFlowModel() 

758 

759 # Setup the regridding weights cache. 

760 weights_cache = SingularTargetRegridderWeightsCache(data, target, cache_size=5) 

761 

762 # Mandatory packages first. 

763 ibound = data["bnd"]["ibound"].compute() 

764 disv, top, bottom, active, original2d = create_disv( 

765 cache=weights_cache, 

766 top=data["top"]["top"], 

767 bottom=data["bot"]["bottom"], 

768 ibound=ibound, 

769 ) 

770 

771 npf = create_npf( 

772 cache=weights_cache, 

773 k=data["khv"]["kh"], 

774 vertical_anisotropy=data["kva"]["vertical_anisotropy"], 

775 active=active, 

776 original2d=original2d, 

777 ) 

778 

779 model["npf"] = npf 

780 model["disv"] = disv 

781 model["oc"] = imod.mf6.OutputControl(save_head="all") 

782 

783 # Used in other package construction: 

784 k = npf["k"].compute() 

785 new_ibound = weights_cache.regrid(source=ibound, method="minimum").compute() 

786 

787 # Boundary conditions, one by one. 

788 for key, value in data.items(): 

789 pkg = key.split("-")[0] 

790 convert = PKG_CONVERSION.get(pkg) 

791 # Skip unsupported packages 

792 if convert is None: 

793 continue 

794 

795 if repeat_stress is None: 

796 repeat = None 

797 else: 

798 repeat = repeat_stress.get(key) 

799 if repeat is not None: 

800 repeat = expand_repetitions(repeat, time_min, time_max) 

801 

802 try: 

803 # conversion will update model instance 

804 convert( 

805 cache=weights_cache, 

806 model=model, 

807 key=key, 

808 value=value, 

809 ibound=new_ibound, 

810 active=active, 

811 original2d=original2d, 

812 top=top, 

813 bottom=bottom, 

814 k=k, 

815 repeat=repeat, 

816 ) 

817 except Exception as e: 

818 raise type(e)(f"{e}\nduring conversion of {key}") 

819 

820 # Treat hfb's separately: they must be merged into one, 

821 # as MODFLOW6 only supports a single HFB. 

822 hfb_keys = [key for key in model.keys() if key.split("-")[0] == "hfb"] 

823 hfbs = [model.pop(key) for key in hfb_keys] 

824 if hfbs: 

825 model["hfb"] = merge_hfbs(hfbs) 

826 

827 transient = any("time" in pkg.dataset.dims for pkg in model.values()) 

828 if transient and (time_min is not None or time_max is not None): 

829 model = model.clip_box(time_min=time_min, time_max=time_max) 

830 

831 sto_entry = data.get("sto") 

832 if sto_entry is None: 

833 if transient: 

834 raise ValueError("storage input is required for a transient run") 

835 storage_coefficient = None 

836 else: 

837 storage_coefficient = sto_entry["storage_coefficient"] 

838 

839 model["sto"] = create_sto( 

840 cache=weights_cache, 

841 storage_coefficient=storage_coefficient, 

842 active=active, 

843 original2d=original2d, 

844 transient=transient, 

845 ) 

846 

847 return model