import mikeio
import numpy as np
from mikeio.spatial import GeometryFM2D
Merging subdomain dfsu files
During simulation MIKE will commonly split simulation files into subdomains and output results with a p_# suffix. This script will merge dfsu files of this type into a single file.
Note: Below implementation considers a 2D dfsu file. For 3D dfsu file, the script needs to be modified accordingly.
Import libraries
# (optional) check first file, items etc.
open("../../data/SimA_HD_p0.dfsu") mikeio.
<mikeio.Dfsu2DH>
number of elements: 194
number of nodes: 120
projection: PROJCS["UTM-32",GEOGCS["Unused",DATUM["UTM Projections",SPHEROID["WGS 1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.0174532925199433]],PROJECTION["Transverse_Mercator"],PARAMETER["False_Easting",500000],PARAMETER["False_Northing",0],PARAMETER["Central_Meridian",9],PARAMETER["Scale_Factor",0.9996],PARAMETER["Latitude_Of_Origin",0],UNIT["Meter",1]]
items:
0: Surface elevation <Surface Elevation> (meter)
1: Current speed <Current Speed> (meter per sec)
2: Current direction <Current Direction> (radian)
time: 2014-01-01 00:00:00 - 2014-01-01 10:00:00 (3 records)
Choose items to process
# choose items to process (when in doubt look at one of the files you want to process with mikeio.open)
= ["Surface elevation", "Current speed", "Current direction"] items
Read files
Option A: automatically find all with _p# suffix
import glob
import os
= "../../data/SimA_HD" # basename of the dfsu files
basename
def find_dfsu_files(basename):
= f"{basename}_p*.dfsu"
pattern = sorted(glob.glob(pattern))
files if not files:
raise ValueError(f"No files found matching the pattern: {pattern}")
return files
= find_dfsu_files(basename)
dfs_files print(f"Found {len(dfs_files)} files:")
for file in dfs_files:
print(f" - {os.path.basename(file)}")
= [mikeio.read(file, items=items) for file in dfs_files] dfs_list
Found 4 files:
- SimA_HD_p0.dfsu
- SimA_HD_p1.dfsu
- SimA_HD_p2.dfsu
- SimA_HD_p3.dfsu
Option B: manually select files
# List of input dfsu files
= [
dfs_files "../../data/SimA_HD_p0.dfsu",
"../../data/SimA_HD_p1.dfsu",
"../../data/SimA_HD_p2.dfsu",
"../../data/SimA_HD_p3.dfsu",
]
# read all dfsu files
= [mikeio.read(file, items=items) for file in dfs_files] dfs_list
Extract data of all subdomains
# Create a dictionary to store data for each item
= {item: [] for item in items}
data_dict
# Get time steps (assuming all files have the same time steps)
= dfs_list[0][items[0]].time
time_steps
# loop over items and time steps and concatenate data from all subdomains
for item in items:
for i in range(len(time_steps)):
# Extract and combine data for the current time step from all subdomains
= np.concatenate([dfs[item].values[i, :] for dfs in dfs_list])
combined_data
data_dict[item].append(combined_data)
# Convert the list to a numpy array
= np.array(data_dict[item])
data_dict[item]
# Prepare Merged Data
= np.array([data_dict[item] for item in items]) merged_data
Merge geometry of all subdomains
= [dfs.geometry for dfs in dfs_list]
geometries
= []
combined_node_coordinates = []
combined_element_table = 0
node_offset
# loop through geometries to combine nodes and elements of all subdomains
for geom in geometries:
= geom.node_coordinates
current_node_coordinates = geom.element_table
current_element_table
combined_node_coordinates.extend(current_node_coordinates)= [element + node_offset for element in current_element_table]
adjusted_element_table
combined_element_table.extend(adjusted_element_table)
+= len(current_node_coordinates)
node_offset
= np.array(combined_node_coordinates)
combined_node_coordinates = np.array(combined_element_table, dtype=object)
combined_element_table = geometries[0]._projstr
projection
# create combined geometry
= GeometryFM2D(
combined_geometry =combined_node_coordinates,
node_coordinates=combined_element_table,
element_table=projection
projection )
combined_geometry.plot()
Merge everything into dataset
= mikeio.Dataset(
ds_out =merged_data, # n_items, timesteps, n_elements
data=items,
items=time_steps,
time=combined_geometry
geometry )
0]].sel(time=1).plot() # plot the first time step of the first item ds_out[items[
Write output to single file
= "area_merged.dfsu"
output_file ds_out.to_dfs(output_file)