From 7ff848c54ba29266b8a745cc604550ab6d1daa42 Mon Sep 17 00:00:00 2001 From: FaugereE Date: Mon, 9 Mar 2026 09:57:18 +0100 Subject: [PATCH 1/3] [EJP] delft3d Wrapper cleaning --- .../wrappers/delft3d/delft3d_example.py | 0 bluemath_tk/wrappers/delft3d/delft3d_utils.py | 324 +++++++++++++ .../wrappers/delft3d/delft3d_wrapper.py | 444 +----------------- 3 files changed, 335 insertions(+), 433 deletions(-) mode change 100755 => 100644 bluemath_tk/wrappers/delft3d/delft3d_example.py create mode 100644 bluemath_tk/wrappers/delft3d/delft3d_utils.py diff --git a/bluemath_tk/wrappers/delft3d/delft3d_example.py b/bluemath_tk/wrappers/delft3d/delft3d_example.py old mode 100755 new mode 100644 diff --git a/bluemath_tk/wrappers/delft3d/delft3d_utils.py b/bluemath_tk/wrappers/delft3d/delft3d_utils.py new file mode 100644 index 0000000..43f4f2c --- /dev/null +++ b/bluemath_tk/wrappers/delft3d/delft3d_utils.py @@ -0,0 +1,324 @@ +import os +import os.path as op +from typing import Union + +import numpy as np +import pandas as pd +import xarray as xr + +from ...core.operations import nautical_to_mathematical +from ...additive.additive import ( + create_triangle_mask_from_points, + generate_structured_points_vectorized, + point_to_segment_distance_vectorized, +) + +sbatch_file_greensurge = """#!/bin/bash +#SBATCH --ntasks=1 # Number of tasks (MPI processes) +#SBATCH --partition=geocean # Standard output and error log +#SBATCH --nodes=1 # Number of nodes to use +#SBATCH --mem=4gb # Memory per node in GB (see also --mem-per-cpu) +#SBATCH --time=24:00:00 + +source /nfs/home/geocean/faugeree/miniforge3/etc/profile.d/conda.sh +conda activate work + +case_dir=$(ls | awk "NR == $SLURM_ARRAY_TASK_ID") +launchDelft3dcomp.sh --case-dir $case_dir + +output_file="${case_dir}/dflowfmoutput/GreenSurge_GFDcase_map.nc" +output_file_raw="${case_dir}/dflowfmoutput/GreenSurge_GFDcase_map.raw" + +python3 - < 0.01 else "0" for x in line) for line in mat + ) + +def format_zeros(mat_shape): + return "\n".join("0 " * mat_shape[1] for _ in range(mat_shape[0])) \ No newline at end of file diff --git a/bluemath_tk/wrappers/delft3d/delft3d_wrapper.py b/bluemath_tk/wrappers/delft3d/delft3d_wrapper.py index 2f2e611..4196efd 100644 --- a/bluemath_tk/wrappers/delft3d/delft3d_wrapper.py +++ b/bluemath_tk/wrappers/delft3d/delft3d_wrapper.py @@ -7,54 +7,18 @@ import pandas as pd import xarray as xr -from ...additive.greensurge import ( - actualize_grid_info, - create_triangle_mask_from_points, - generate_structured_points_vectorized, +from ...additive.additive import ( get_regular_grid, - point_to_segment_distance_vectorized, ) -from ...core.operations import nautical_to_mathematical + from ...tcs.vortex import vortex2delft_3D_FM_nc from .._base_wrappers import BaseModelWrapper -sbatch_file_example = """#!/bin/bash -#SBATCH --ntasks=1 # Number of tasks (MPI processes) -#SBATCH --partition=geocean # Standard output and error log -#SBATCH --nodes=1 # Number of nodes to use -#SBATCH --mem=4gb # Memory per node in GB (see also --mem-per-cpu) -#SBATCH --time=24:00:00 - -source /nfs/home/geocean/faugeree/miniforge3/etc/profile.d/conda.sh -conda activate work - -case_dir=$(ls | awk "NR == $SLURM_ARRAY_TASK_ID") -launchDelft3dcomp.sh --case-dir $case_dir - -output_file="${case_dir}/dflowfmoutput/GreenSurge_GFDcase_map.nc" -output_file_raw="${case_dir}/dflowfmoutput/GreenSurge_GFDcase_map.raw" - -python3 - < 0.01 else "0" for x in line) for line in mat - ) - - -def format_zeros(mat_shape): - return "\n".join("0 " * mat_shape[1] for _ in range(mat_shape[0])) - - class GreenSurgeModelWrapper(Delft3dModelWrapper): """ Wrapper for the Delft3d model for Greensurge. """ - def generate_grid_forcing_file_D3DFM( - self, - case_context: dict, - case_dir: str, - ds_GFD_info: xr.Dataset, - ): - """ - Generate the wind files for a case. - - Parameters - ---------- - case_context : dict - The case context. - case_dir : str - The case directory. - ds_GFD_info : xr.Dataset - The dataset with the GFD information. - """ - dir_steps = case_context.get("dir_steps") - real_dirs = np.linspace(0, 360, dir_steps + 1)[:-1] - i_tes = case_context.get("tesela") - i_dir = case_context.get("direction") - real_dir = real_dirs[i_dir] - dt_forz = case_context.get("dt_forz") - wind_magnitude = case_context.get("wind_magnitude") - simul_time = case_context.get("simul_time") - - node_triangle = ds_GFD_info.triangle_forcing_connectivity.isel( - element_forcing_index=i_tes - ) - lon_teselas = ds_GFD_info.node_forcing_longitude.isel( - node_forcing_index=node_triangle - ).values - lat_teselas = ds_GFD_info.node_forcing_latitude.isel( - node_forcing_index=node_triangle - ).values - - lon_grid = ds_GFD_info.lon_grid.values - lat_grid = ds_GFD_info.lat_grid.values - - x_llcenter = lon_grid[0] - y_llcenter = lat_grid[0] - - n_cols = len(lon_grid) - n_rows = len(lat_grid) - - dx = (lon_grid[-1] - lon_grid[0]) / n_cols - dy = (lat_grid[-1] - lat_grid[0]) / n_rows - X0, X1, X2 = lon_teselas - Y0, Y1, Y2 = lat_teselas - - triangle = [(X0, Y0), (X1, Y1), (X2, Y2)] - mask = create_triangle_mask_from_points(lon_grid, lat_grid, triangle).astype( - int - ) - mask_int = np.flip(mask, axis=0) # Flip to match grid orientation - - u = -np.cos(nautical_to_mathematical(real_dir) * np.pi / 180) * wind_magnitude - v = -np.sin(nautical_to_mathematical(real_dir) * np.pi / 180) * wind_magnitude - u_mat = mask_int * u - v_mat = mask_int * v - - self.logger.info( - f"Creating cell {i_tes} direction {int(real_dir)} with u = {u} and v = {v}" - ) - - file_name_u = op.join(case_dir, "GFD_wind_file.amu") - file_name_v = op.join(case_dir, "GFD_wind_file.amv") - - with open(file_name_u, "w+") as fu, open(file_name_v, "w+") as fv: - fu.write( - "### START OF HEADER\n" - + "### This file is created by Deltares\n" - + "### Additional commments\n" - + "FileVersion = 1.03\n" - + "filetype = meteo_on_equidistant_grid\n" - + "NODATA_value = -9999.0\n" - + f"n_cols = {n_cols}\n" - + f"n_rows = {n_rows}\n" - + "grid_unit = degree\n" - + f"x_llcenter = {x_llcenter}\n" - + f"y_llcenter = {y_llcenter}\n" - + f"dx = {dx}\n" - + f"dy = {dy}\n" - + "n_quantity = 1\n" - + "quantity1 = x_wind\n" - + "unit1 = m s-1\n" - + "### END OF HEADER\n" - ) - fv.write( - "### START OF HEADER\n" - + "### This file is created by Deltares\n" - + "### Additional commments\n" - + "FileVersion = 1.03\n" - + "filetype = meteo_on_equidistant_grid\n" - + "NODATA_value = -9999.0\n" - + f"n_cols = {n_cols}\n" - + f"n_rows = {n_rows}\n" - + "grid_unit = degree\n" - + f"x_llcenter = {x_llcenter}\n" - + f"y_llcenter = {y_llcenter}\n" - + f"dx = {dx}\n" - + f"dy = {dy}\n" - + "n_quantity = 1\n" - + "quantity1 = y_wind\n" - + "unit1 = m s-1\n" - + "### END OF HEADER\n" - ) - for time in range(4): - if time == 0: - time_real = time - elif time == 1: - time_real = dt_forz - elif time == 2: - time_real = dt_forz + 0.01 - elif time == 3: - time_real = simul_time - fu.write(f"TIME = {time_real} hours since 2022-01-01 00:00:00 +00:00\n") - fv.write(f"TIME = {time_real} hours since 2022-01-01 00:00:00 +00:00\n") - if time in [0, 1]: - fu.write(format_matrix(u_mat) + "\n") - fv.write(format_matrix(v_mat) + "\n") - else: - fu.write(format_zeros(u_mat.shape) + "\n") - fv.write(format_zeros(v_mat.shape) + "\n") - - def generate_grid_forcing_file_netCDF_D3DFM( - self, - case_context: dict, - case_dir: str, - ds_GFD_info: xr.Dataset, - ): - """ - Generate the wind forcing files for a case in netCDF format (optimized version). - """ - triangle_index = case_context.get("tesela") - direction_index = case_context.get("direction") - wind_direction = ds_GFD_info.wind_directions.values[direction_index] - wind_speed = case_context.get("wind_magnitude") - - connectivity = ds_GFD_info.triangle_forcing_connectivity - triangle_longitude = ds_GFD_info.node_forcing_longitude.isel( - node_forcing_index=connectivity - ).values - triangle_latitude = ds_GFD_info.node_forcing_latitude.isel( - node_forcing_index=connectivity - ).values - - connectivity_compo = ds_GFD_info.triangle_computation_connectivity.values - node_lon = ds_GFD_info.node_computation_longitude.values.ravel() - node_lat = ds_GFD_info.node_computation_latitude.values.ravel() - - # Selection triangle vertices - x0, x1, x2 = triangle_longitude[triangle_index, :] - y0, y1, y2 = triangle_latitude[triangle_index, :] - triangle_vertices = np.array([(x0, y0), (x1, y1), (x2, y2)], dtype=float) - - # Compute centroid with NumPy (avoids shapely dependency) - centroid = triangle_vertices.mean(axis=0) - scale_factor = 1.001 - verts_buffered = centroid + (triangle_vertices - centroid) * scale_factor - - # Tolerance based on edge size - edge_lengths = np.linalg.norm( - np.roll(triangle_vertices, -1, axis=0) - triangle_vertices, axis=1 - ) - tol = np.mean(edge_lengths) * 0.001 - - # Vectorized distance to 3 edges (~100x faster than shapely loop) - dist_edge_01 = point_to_segment_distance_vectorized( - node_lon, node_lat, x0, y0, x1, y1 - ) - dist_edge_12 = point_to_segment_distance_vectorized( - node_lon, node_lat, x1, y1, x2, y2 - ) - dist_edge_20 = point_to_segment_distance_vectorized( - node_lon, node_lat, x2, y2, x0, y0 - ) - dist_to_boundary = np.minimum( - np.minimum(dist_edge_01, dist_edge_12), dist_edge_20 - ) - - # Points on the boundary - mask_on_edge = dist_to_boundary < tol - - # Triangles with at least one node on the boundary - mask_tri_to_refine = np.any(mask_on_edge[connectivity_compo], axis=1) - - # Vectorized version of generate_structured_points - lon_structured, lat_structured = generate_structured_points_vectorized( - connectivity_compo[mask_tri_to_refine], - node_lon, - node_lat, - ) - - # Concatenate points - longitude_points_computation = np.concatenate( - [node_lon, lon_structured.ravel()] - ) - latitude_points_computation = np.concatenate([node_lat, lat_structured.ravel()]) - - # Triangle mask (uses matplotlib Path) - triangle_mask = create_triangle_mask_from_points( - longitude_points_computation, - latitude_points_computation, - verts_buffered, - ) - - # Wind computation - angle_rad = nautical_to_mathematical(wind_direction) * np.pi / 180 - wind_u = -np.cos(angle_rad) * wind_speed - wind_v = -np.sin(angle_rad) * wind_speed - - # Initialize and assign wind arrays - n_points = len(longitude_points_computation) - windx = np.zeros((4, n_points)) - windy = np.zeros((4, n_points)) - windx[:2, triangle_mask] = wind_u - windy[:2, triangle_mask] = wind_v - - # Build forcing dataset - ds_forcing = xr.Dataset( - { - "time": ds_GFD_info["time_forcing_index"], - "node": xr.DataArray(np.arange(n_points), dims=["node"]), - "longitude": xr.DataArray( - longitude_points_computation, - dims=["node"], - attrs={ - "description": "Longitude of each mesh node of the computational grid", - "standard_name": "longitude", - "long_name": "longitude", - "units": "degrees_east", - }, - ), - "latitude": xr.DataArray( - latitude_points_computation, - dims=["node"], - attrs={ - "description": "Latitude of each mesh node of the computational grid", - "standard_name": "latitude", - "long_name": "latitude", - "units": "degrees_north", - }, - ), - "windx": xr.DataArray( - windx, - dims=["time", "node"], - attrs={ - "coordinates": "time node", - "long_name": "Wind speed in x direction", - "standard_name": "windx", - "units": "m s-1", - }, - ), - "windy": xr.DataArray( - windy, - dims=["time", "node"], - attrs={ - "coordinates": "time node", - "long_name": "Wind speed in y direction", - "standard_name": "windy", - "units": "m s-1", - }, - ), - } - ) - - ds_forcing.to_netcdf(op.join(case_dir, "forcing.nc")) - - self.logger.info( - f"Creating triangle {triangle_index} direction {int(wind_direction)} with u = {wind_u} and v = {wind_v}" - ) - def build_case( self, case_context: dict, @@ -489,7 +169,7 @@ def build_case( """ if case_context.get("SetupType") == "GreenSurge": if case_context.get("forcing_type") == "netCDF": - self.generate_grid_forcing_file_netCDF_D3DFM( + generate_grid_forcing_file_netCDF_D3DFM( case_context=case_context, case_dir=case_dir, ds_GFD_info=case_context.get("ds_GFD_info"), @@ -506,7 +186,7 @@ def build_case( self.ds_GFD_info["lon_grid"] = np.flip(lon_grid) self.ds_GFD_info["lat_grid"] = lat_grid - self.generate_grid_forcing_file_D3DFM( + generate_grid_forcing_file_D3DFM( case_context=case_context, case_dir=case_dir, ds_GFD_info=self.ds_GFD_info, @@ -515,106 +195,4 @@ def build_case( mesh = xr.open_dataset(case_context.get("mesh_path")) vortex = case_context.get("vortex") forcing = vortex2delft_3D_FM_nc(mesh, vortex) - forcing.to_netcdf(op.join(case_dir, "forcing.nc")) - - def postprocess_case(self, case_dir: str) -> None: - """ - Postprocess the case output file. - - Parameters - ---------- - case_dir : str - The case directory. - """ - - output_file = op.join(case_dir, "dflowfmoutput/GreenSurge_GFDcase_map.nc") - output_file_compressed = op.join( - case_dir, "dflowfmoutput/GreenSurge_GFDcase_map_compressed.nc" - ) - output_file_compressed_tmp = op.join( - case_dir, "dflowfmoutput/GreenSurge_GFDcase_map_compressed_tmp.nc" - ) - if case_dir == self.output_dir[0]: - # If the case_dir is the output_dir, we do not remove the original file - postprocess_command = f""" - ncap2 -s 'mesh2d_s1=float(mesh2d_s1)' -v -O "{output_file}" "{output_file_compressed_tmp}" - ncks -4 -L 4 "{output_file_compressed_tmp}" "{output_file_compressed}" - rm "{output_file_compressed_tmp}" - """ - else: - postprocess_command = f""" - ncap2 -s 'mesh2d_s1=float(mesh2d_s1)' -v -O "{output_file}" "{output_file_compressed_tmp}" - ncks -4 -L 4 "{output_file_compressed_tmp}" "{output_file_compressed}" - rm "{output_file_compressed_tmp}" - rm "{output_file}" - """ - - self._exec_bash_commands( - str_cmd=postprocess_command, - cwd=case_dir, - ) - - def postprocess_cases(self, ds_GFD_info: xr.Dataset, parallel: bool = False): - """ - Postprocess the cases output files. - - Parameters - ---------- - ds_GFD_info : xr.Dataset - The dataset with the GFD information. - parallel : bool, optional - Whether to run the postprocessing in parallel. Default is False. - """ - - if ( - self.monitor_cases( - dia_file_name="dflowfmoutput/GreenSurge_GFDcase.dia", - value_counts="percentage", - ) - .loc["FINISHED"] - .values - != 100.0 - ): - raise ValueError( - "Not all cases are finished. Please check the status of the cases." - ) - - path_computation = op.join( - self.cases_dirs[0], "dflowfmoutput/GreenSurge_GFDcase_map.nc" - ) - ds_GFD_info = actualize_grid_info(path_computation, ds_GFD_info) - dirname, basename = os.path.split(ds_GFD_info.attrs["source"]) - name, ext = os.path.splitext(basename) - new_filepath = os.path.join(dirname, f"{name}_updated{ext}") - ds_GFD_info.to_netcdf(new_filepath) - - case_ext = "dflowfmoutput/GreenSurge_GFDcase_map.nc" - - def preprocess(dataset): - file_name = dataset.encoding.get("source", None) - dir_i = int(file_name.split("_D_")[-1].split("/")[0]) - tes_i = int(file_name.split("_T_")[-1].split("_D_")[0]) - dataset = ( - dataset[["mesh2d_s1"]] - .expand_dims(["forcing_cell"]) - .assign_coords(forcing_cell=[tes_i]) - ) - self.logger.info( - f"Loaded {file_name} with forcing_cell={tes_i} and dir={dir_i}" - ) - return dataset - - folder_postprocess = op.join(self.output_dir, "GreenSurge_Postprocess") - os.makedirs(folder_postprocess, exist_ok=True) - - dir_steps = self.fixed_parameters["dir_steps"] - for idx in range(dir_steps): - paths = self.cases_dirs[idx::dir_steps] - file_paths = [op.join(case_dir, case_ext) for case_dir in paths] - DS = xr.open_mfdataset( - file_paths, - parallel=parallel, - combine="by_coords", - preprocess=preprocess, - ) - DS.load().to_netcdf(op.join(folder_postprocess, f"GreenSurge_DB_{idx}.nc")) + forcing.to_netcdf(op.join(case_dir, "forcing.nc")) \ No newline at end of file From e26409846cdcb8179dc0a26e552f0bc80d92d758 Mon Sep 17 00:00:00 2001 From: FaugereE Date: Mon, 9 Mar 2026 09:58:17 +0100 Subject: [PATCH 2/3] [EJP] add of GreenWavers wrapper --- bluemath_tk/wrappers/swan/swan_utils.py | 312 ++++++++++++++++++++++ bluemath_tk/wrappers/swan/swan_wrapper.py | 29 ++ 2 files changed, 341 insertions(+) create mode 100644 bluemath_tk/wrappers/swan/swan_utils.py diff --git a/bluemath_tk/wrappers/swan/swan_utils.py b/bluemath_tk/wrappers/swan/swan_utils.py new file mode 100644 index 0000000..b0d5320 --- /dev/null +++ b/bluemath_tk/wrappers/swan/swan_utils.py @@ -0,0 +1,312 @@ +import os +import os.path as op +from typing import Union + +import numpy as np +import pandas as pd +import xarray as xr + +from scipy.io import loadmat, whosmat +import pandas as pd +import warnings + +from ...core.operations import nautical_to_mathematical +from ...additive.additive import ( + create_triangle_mask_from_points, + read_adcirc_grd, +) + +sbatch_file_greenwaves = """#!/bin/bash +#SBATCH --ntasks=1 # Number of tasks (MPI processes) +#SBATCH --partition=geocean # Standard output and error log +#SBATCH --nodes=1 # Number of nodes to use +#SBATCH --mem=4gb # Memory per node in GB (see also --mem-per-cpu) +#SBATCH --time=24:00:00 + +source /nfs/home/geocean/faugeree/miniforge3/etc/profile.d/conda.sh +conda activate work + +case_dir=$(ls | awk "NR == $SLURM_ARRAY_TASK_ID") +launchSwan.sh --case-dir $case_dir + +output_file="${case_dir}/output.mat" +output_file_raw="${case_dir}/output.raw" + +python3 - <= 3: + varname = "_".join(parts[:-2]) + timestamp_str = f"{parts[-2]}_{parts[-1]}" + try: + timestamp = pd.to_datetime(timestamp_str, format="%Y%m%d_%H%M%S") + vars_with_keys.setdefault(varname, []).append((key, timestamp)) + except ValueError: + continue + + if variables is not None: + vars_with_keys = {k: v for k, v in vars_with_keys.items() if k in variables} + + if not vars_with_keys: + raise ValueError(f"No matching variables found. Available: {list(vars_with_keys.keys())}") + + if time_range is not None: + t_start, t_end = pd.to_datetime(time_range[0]), pd.to_datetime(time_range[1]) + for varname in vars_with_keys: + vars_with_keys[varname] = [ + (k, t) for k, t in vars_with_keys[varname] + if t_start <= t <= t_end + ] + + keys_to_load = {"Xp", "Yp"} + for varname, key_times in vars_with_keys.items(): + keys_to_load.update(k for k, t in key_times) + + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", message="Duplicate variable name") + mat = loadmat(path, variable_names=list(keys_to_load)) + + lon = mat["Xp"].ravel().astype(np.float32) + lat = mat["Yp"].ravel().astype(np.float32) + + ds_vars = {} + + for varname, key_times in vars_with_keys.items(): + key_times.sort(key=lambda x: x[1]) # Sort by timestamp + + n_times = len(key_times) + n_points = lon.size + data_array = np.empty((n_times, n_points), dtype=np.float32) + + for i, (key, _) in enumerate(key_times): + data_array[i] = mat[key].ravel() + + time_index = pd.DatetimeIndex([t for _, t in key_times]) + + unique_times, time_indices = np.unique(time_index, return_index=True) + data_array = data_array[time_indices] + time_index = pd.DatetimeIndex(unique_times) + + ds_vars[varname] = (("time", "points"), data_array) + + return xr.Dataset( + data_vars=ds_vars, + coords={ + "time": time_index, + "lon": ("points", lon), + "lat": ("points", lat), + }, + ) + + +def vortex2SWAN( + mesh: str, + ds_vortex: xr.Dataset, + output_path: str, + fill_value: float = 0, +) -> xr.Dataset: + """ + Convert the vortex dataset to a Delft3D FM compatible netCDF forcing file. + + Parameters + ---------- + mesh : str + The mesh path. + ds_vortex : xarray.Dataset + The vortex dataset containing wind speed and pressure data. + path_output : str + The output path where the netCDF file will be saved. + ds_name : str + The name of the output netCDF file, default is "forcing_Tonga_vortex.nc". + forcing_ext : str + The extension for the forcing file, default is "GreenSurge_GFDcase_wind.ext". + fill_value : float + The fill value to use for missing data, default is 0. + + Returns + ------- + xarray.Dataset + A dataset containing the interpolated wind speed and pressure data, + ready for use in Delft3D FM. + """ + + Nodes_calc, _, _ = read_adcirc_grd(mesh) + + longitude = Nodes_calc[:, 1] + latitude = Nodes_calc[:, 2] + + dt = (ds_vortex.time[1] - ds_vortex.time[0]).values + time = np.arange(ds_vortex.time.size) * dt + n_time = len(time) + + lat_interp = xr.DataArray(latitude, dims="node") + lon_interp = xr.DataArray(longitude, dims="node") + angle = np.deg2rad((270 - ds_vortex.Dir.values) % 360) + W = ds_vortex.W.values + + windx_data = (W * np.cos(angle)).astype(np.float32) + windy_data = (W * np.sin(angle)).astype(np.float32) + pressure_data = ds_vortex.p.values.astype(np.float32) + + if windx_data.ndim == 3: + windx_data = np.transpose(windx_data, (2, 0, 1)) + windy_data = np.transpose(windy_data, (2, 0, 1)) + pressure_data = np.transpose(pressure_data, (2, 0, 1)) + + ds_vortex_interp = xr.Dataset( + { + "windx": ( + ("time", "latitude", "longitude"), + np.nan_to_num(windx_data, nan=fill_value), + ), + "windy": ( + ("time", "latitude", "longitude"), + np.nan_to_num(windy_data, nan=fill_value), + ), + "airpressure": ( + ("time", "latitude", "longitude"), + np.nan_to_num(pressure_data, nan=fill_value), + ), + }, + coords={ + "time": time, + "latitude": ds_vortex.lat.values, + "longitude": ds_vortex.lon.values, + }, + ) + forcing_dataset = ds_vortex_interp.interp(latitude=lat_interp, longitude=lon_interp) + + windx = forcing_dataset.windx.values + windy = forcing_dataset.windy.values + + with open(output_path, "w") as f: + for t in range(n_time): + time_str = np.datetime_as_string(time[t], unit="s") + time_str = time_str.replace("T", ".").replace("-", "").replace(":", "") + f.write(f"{time_str}\n") + f.write("wind x component\n") + np.savetxt(f, windx[t, :].reshape(1, -1), fmt="%.3f") + f.write("wind y component\n") + np.savetxt(f, windy[t, :].reshape(1, -1), fmt="%.3f") \ No newline at end of file diff --git a/bluemath_tk/wrappers/swan/swan_wrapper.py b/bluemath_tk/wrappers/swan/swan_wrapper.py index 29ef549..0c9653d 100644 --- a/bluemath_tk/wrappers/swan/swan_wrapper.py +++ b/bluemath_tk/wrappers/swan/swan_wrapper.py @@ -11,6 +11,7 @@ from .._base_wrappers import BaseModelWrapper from .._utils_wrappers import write_array_in_file +from .swan_utils import generate_forcing_file_GreenWaves, sbatch_file_greenwaves class SwanModelWrapper(BaseModelWrapper): @@ -447,3 +448,31 @@ def build_case(self, case_dir: str, case_context: dict) -> None: wavespectra.SpecDataset(mono_input_spectrum).to_swan( os.path.join(case_dir, f"input_spectra_{side}.bnd") ) + +class GreenWavesWrapper(SwanModelWrapper): + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.sbatch_file_example = sbatch_file_greenwaves + + def build_case( + self, + case_context: dict, + case_dir: str, + ) -> None: + """ + Build the input files for a case. + + Parameters + ---------- + case_context : dict + The case context. + case_dir : str + The case directory. + """ + + generate_forcing_file_GreenWaves( + case_context=case_context, + case_dir=case_dir, + ds_GFD_info=case_context.get("ds_GFD_info"), + ) \ No newline at end of file From 5c707a23d77e538b867ed98646718de5c598185b Mon Sep 17 00:00:00 2001 From: FaugereE Date: Mon, 9 Mar 2026 09:59:15 +0100 Subject: [PATCH 3/3] [EJP] additive greensurge and greenvawes division for the creation of GreenWave's wrapper --- bluemath_tk/additive/additive.py | 1662 ++++++++++++++++++++++ bluemath_tk/additive/greensurge.py | 2075 ++-------------------------- bluemath_tk/additive/greenwaves.py | 186 +++ 3 files changed, 1982 insertions(+), 1941 deletions(-) create mode 100644 bluemath_tk/additive/additive.py create mode 100644 bluemath_tk/additive/greenwaves.py diff --git a/bluemath_tk/additive/additive.py b/bluemath_tk/additive/additive.py new file mode 100644 index 0000000..d04a69c --- /dev/null +++ b/bluemath_tk/additive/additive.py @@ -0,0 +1,1662 @@ +import warnings +from datetime import datetime +from typing import List, Tuple + +import cartopy.crs as ccrs +import matplotlib.gridspec as gridspec +import matplotlib.pyplot as plt +import numpy as np +import xarray as xr +from matplotlib.axes import Axes +from matplotlib.figure import Figure +from matplotlib.path import Path + +from ..core.operations import get_degrees_from_uv +from ..core.plotting.colors import hex_colors_land, hex_colors_water +from ..core.plotting.utils import join_colormaps + + +def add_forcing_edges_to_computational_domain(poly_clip, vert, tria): + """ + Add edges to the forcing mesh that align with the computational domain boundary. + - Edges with one endpoint inside: clip at the boundary. + - Edges with both inside: split at hole boundaries if they cross a hole. + - Edges with both outside: if the segment crosses the polygon, clip to the + interior portion(s) and add those segments as forcing edges. + """ + from shapely.geometry import Point, LineString + + edges_new_full = np.vstack([ + tria[:, [0, 1]], + tria[:, [1, 2]], + tria[:, [2, 0]] + ]) + edges_new = np.unique(np.sort(edges_new_full, axis=1), axis=0) + + mask_node_in = np.array([poly_clip.contains(Point(x, y)) for x, y in vert]) + + count_inside = mask_node_in[edges_new].sum(axis=1) + edges_both_inside = edges_new[count_inside == 2] + edges_one_inside = edges_new[count_inside == 1] + edges_both_outside = edges_new[count_inside == 0] + + new_points = [] + tol = 1e-10 + + def point_to_index(pt, n1, n2, vert, new_points, tol): + """Retourne l'index (dans vert_update) du point pt, ou ajoute un nouveau point.""" + p1, p2 = vert[n1], vert[n2] + if np.linalg.norm(np.asarray(pt) - p1) <= tol: + return n1 + if np.linalg.norm(np.asarray(pt) - p2) <= tol: + return n2 + pt = np.asarray(pt, dtype=float) + for k, q in enumerate(new_points): + if np.linalg.norm(pt - np.asarray(q)) <= tol: + return len(vert) + k + new_points.append(pt) + return len(vert) + len(new_points) - 1 + + # ---- Arêtes avec un seul sommet inside : clip à la frontière + edges_one_inside_new = edges_one_inside.copy() + for i, (n1, n2) in enumerate(edges_one_inside): + p1, p2 = vert[n1], vert[n2] + inside1 = mask_node_in[n1] + line = LineString([p1, p2]) + inter = line.intersection(poly_clip.boundary) + if inter.is_empty: + continue + if inter.geom_type == "MultiPoint": + points = list(inter.geoms) + ref = p1 if inside1 else p2 + dists = [Point(ref).distance(pt) for pt in points] + inter_pt = np.array(points[np.argmin(dists)].coords[0]) + else: + inter_pt = np.array(inter.coords[0]) + new_index = len(vert) + len(new_points) + new_points.append(inter_pt) + if inside1: + edges_one_inside_new[i, 1] = new_index + else: + edges_one_inside_new[i, 0] = new_index + + # ---- Arêtes avec les deux sommets inside : découper aux trous si le segment traverse un trou + edges_both_inside_result = [] + for (n1, n2) in edges_both_inside: + p1, p2 = vert[n1], vert[n2] + line = LineString([p1, p2]) + geom = line.intersection(poly_clip) + + if geom.is_empty or geom.geom_type == "Point": + continue + if geom.geom_type == "LineString": + coords = np.array(geom.coords) + if len(coords) >= 2 and np.linalg.norm(coords[0] - coords[-1]) > tol: + edges_both_inside_result.append((n1, n2)) + continue + if geom.geom_type == "MultiLineString": + for seg in geom.geoms: + coords = np.array(seg.coords) + if len(coords) < 2: + continue + start_pt = coords[0] + end_pt = coords[-1] + idx_start = point_to_index(start_pt, n1, n2, vert, new_points, tol) + idx_end = point_to_index(end_pt, n1, n2, vert, new_points, tol) + if idx_start != idx_end: + edges_both_inside_result.append((idx_start, idx_end)) + continue + edges_both_inside_result.append((n1, n2)) + + # ---- Arêtes avec les deux sommets outside : inclure si le segment traverse le polygone + edges_both_outside_result = [] + for (n1, n2) in edges_both_outside: + p1, p2 = vert[n1], vert[n2] + line = LineString([p1, p2]) + geom = line.intersection(poly_clip) + + if geom.is_empty or geom.geom_type == "Point": + continue + if geom.geom_type == "LineString": + coords = np.array(geom.coords) + if len(coords) >= 2 and np.linalg.norm(coords[0] - coords[-1]) > tol: + idx_start = point_to_index(coords[0], n1, n2, vert, new_points, tol) + idx_end = point_to_index(coords[-1], n1, n2, vert, new_points, tol) + if idx_start != idx_end: + edges_both_outside_result.append((idx_start, idx_end)) + continue + if geom.geom_type == "MultiLineString": + for seg in geom.geoms: + coords = np.array(seg.coords) + if len(coords) < 2: + continue + idx_start = point_to_index(coords[0], n1, n2, vert, new_points, tol) + idx_end = point_to_index(coords[-1], n1, n2, vert, new_points, tol) + if idx_start != idx_end: + edges_both_outside_result.append((idx_start, idx_end)) + continue + + # Sommets mis à jour et arêtes finales + if len(new_points) > 0: + vert_update = np.vstack([vert, np.array(new_points)]) + else: + vert_update = vert + + parts = [] + if len(edges_both_inside_result) > 0: + parts.append(np.array(edges_both_inside_result)) + parts.append(edges_one_inside_new) + if len(edges_both_outside_result) > 0: + parts.append(np.array(edges_both_outside_result)) + + edges_used = np.vstack(parts) if parts else np.empty((0, 2), dtype=int) + + if edges_used.shape[0] == 0: + return vert_update, np.empty((0, 2), dtype=int) + + used_nodes = np.unique(edges_used.flatten()) + vert_clean = vert_update[used_nodes] + mapping = {old_idx: new_idx for new_idx, old_idx in enumerate(used_nodes)} + edges_clean = np.vectorize(mapping.get)(edges_used) + + return vert_clean, edges_clean + +def read_adcirc_grd(grd_file: str) -> Tuple[np.ndarray, np.ndarray, List[str]]: + """ + Reads the ADCIRC grid file and returns the node and element data. + + Parameters + ---------- + grd_file : str + Path to the ADCIRC grid file. + + Returns + ------- + Tuple[np.ndarray, np.ndarray, List[str]] + A tuple containing: + - Nodes (np.ndarray): An array of shape (nnodes, 3) containing the coordinates of each node. + - Elmts (np.ndarray): An array of shape (nelmts, 3) containing the element connectivity, + with node indices adjusted (decremented by 1). + - lines (List[str]): The remaining lines in the file after reading the nodes and elements. + + Examples + -------- + >>> nodes, elmts, lines = read_adcirc_grd("path/to/grid.grd") + >>> print(nodes.shape, elmts.shape, len(lines)) + (1000, 3) (500, 3) 10 + """ + + with open(grd_file, "r") as f: + f.readline() # Skip header line + header1 = f.readline() + header_nums = list(map(float, header1.split())) + nelmts = int(header_nums[0]) + nnodes = int(header_nums[1]) + + Nodes = np.loadtxt(f, max_rows=nnodes) + Elmts = np.loadtxt(f, max_rows=nelmts) - 1 + lines = f.readlines() + + return Nodes, Elmts, lines + + +def get_regular_grid( + node_computation_longitude: np.ndarray, + node_computation_latitude: np.ndarray, + node_computation_elements: np.ndarray, + factor: float = 10.0, + margin_deg: float = 0, +) -> Tuple[np.ndarray, np.ndarray]: + """ + Generate a regular lon/lat grid slightly larger than the bounds of the node coordinates. + Grid resolution is derived from the smallest element size scaled by a factor. + + Parameters + ---------- + node_computation_longitude : np.ndarray + 1D array of longitudes for the nodes. + node_computation_latitude : np.ndarray + 1D array of latitudes for the nodes. + node_computation_elements : np.ndarray + 2D array of indices defining the triangular elements. + factor : float, optional + Resolution scaling factor: higher means coarser grid. + margin_deg : float, optional + Margin to add (in degrees) to each side of the bounding box. + + Returns + ------- + lon_grid : np.ndarray + 1D array of longitudes defining the grid. + lat_grid : np.ndarray + 1D array of latitudes defining the grid. + """ + + # Bounding box with margin + lon_min = node_computation_longitude.min() - margin_deg + lon_max = node_computation_longitude.max() + margin_deg + lat_min = node_computation_latitude.min() - margin_deg + lat_max = node_computation_latitude.max() + margin_deg + + # Get triangle node coordinates + lon_tri = node_computation_longitude[node_computation_elements] + lat_tri = node_computation_latitude[node_computation_elements] + + # Estimate resolution from max side of each triangle + dlon01 = np.abs(lon_tri[:, 0] - lon_tri[:, 1]) + dlon12 = np.abs(lon_tri[:, 1] - lon_tri[:, 2]) + dlon20 = np.abs(lon_tri[:, 2] - lon_tri[:, 0]) + max_dlon = np.stack([dlon01, dlon12, dlon20], axis=1).max(axis=1) + min_dx = np.min(max_dlon) * factor + + dlat01 = np.abs(lat_tri[:, 0] - lat_tri[:, 1]) + dlat12 = np.abs(lat_tri[:, 1] - lat_tri[:, 2]) + dlat20 = np.abs(lat_tri[:, 2] - lat_tri[:, 0]) + max_dlat = np.stack([dlat01, dlat12, dlat20], axis=1).max(axis=1) + min_dy = np.min(max_dlat) * factor + + # Create regular grid + lon_grid = np.arange(lon_min, lon_max + min_dx, min_dx) + lat_grid = np.arange(lat_min, lat_max + min_dy, min_dy) + + return lon_grid, lat_grid + + +def generate_structured_points( + triangle_connectivity: np.ndarray, + node_lon: np.ndarray, + node_lat: np.ndarray, +) -> Tuple[np.ndarray, np.ndarray]: + """ + Generate structured points for each triangle in the mesh. + Each triangle will have 10 points: vertices, centroid, midpoints of edges, + and midpoints of vertex-centroid segments. + + Parameters + ---------- + triangle_connectivity : np.ndarray + Array of shape (n_triangles, 3) containing indices of the vertices for each triangle. + node_lon : np.ndarray + Array of shape (n_nodes,) containing the longitudes of the nodes. + node_lat : np.ndarray + Array of shape (n_nodes,) containing the latitudes of the nodes. + + Returns + ------- + lon_all : np.ndarray + Array of shape (n_triangles, 10) containing the longitudes of the structured points for each triangle. + lat_all : np.ndarray + Array of shape (n_triangles, 10) containing the latitudes of the structured points for each triangle. + """ + + n_tri = triangle_connectivity.shape[0] + lon_all = np.empty((n_tri, 10)) + lat_all = np.empty((n_tri, 10)) + + for i, tri in enumerate(triangle_connectivity): + A = np.array([node_lon[tri[0]], node_lat[tri[0]]]) + B = np.array([node_lon[tri[1]], node_lat[tri[1]]]) + C = np.array([node_lon[tri[2]], node_lat[tri[2]]]) + + G = (A + B + C) / 3 + M_AB = (A + B) / 2 + M_BC = (B + C) / 2 + M_CA = (C + A) / 2 + M_AG = (A + G) / 2 + M_BG = (B + G) / 2 + M_CG = (C + G) / 2 + + points = [A, B, C, G, M_AB, M_BC, M_CA, M_AG, M_BG, M_CG] + lon_all[i, :] = [pt[0] for pt in points] + lat_all[i, :] = [pt[1] for pt in points] + + return lon_all, lat_all + + +def plot_additive_input_wind_partition( + xds_vortex_GS: xr.Dataset, + xds_vortex_interp: xr.Dataset, + ds_GFD_info: xr.Dataset, + i_time: int = 0, + SWATH: bool = False, + figsize=(10, 8), +) -> None: + """ + Plot the wind partition for GreenSurge input data. + + Parameters + ---------- + xds_vortex_GS : xr.Dataset + Dataset containing the vortex model data for GreenSurge. + xds_vortex_interp : xr.Dataset + Dataset containing the interpolated vortex model data. + ds_GFD_info : xr.Dataset + Dataset containing the GreenSurge forcing information. + i_time : int, optional + Index of the time step to plot. Default is 0. + figsize : tuple, optional + Figure size. Default is (10, 8). + """ + + simple_quiver = 20 + scale = 30 + width = 0.003 + + fig, (ax1, ax2) = plt.subplots( + 1, + 2, + figsize=figsize, + subplot_kw={"projection": ccrs.PlateCarree()}, + constrained_layout=True, + ) + time = xds_vortex_GS.time.isel(time=i_time) + + ax1.set_title("Vortex wind") + ax2.set_title("Wind partition (GreenSurge)") + + triangle_forcing_connectivity = ds_GFD_info.triangle_forcing_connectivity.values + node_forcing_longitude = ds_GFD_info.node_forcing_longitude.values + node_forcing_latitude = ds_GFD_info.node_forcing_latitude.values + + Lon = xds_vortex_GS.lon + Lat = xds_vortex_GS.lat + if not SWATH: + W = xds_vortex_interp.W.isel(time=i_time) + Dir = (270 - xds_vortex_interp.Dir.isel(time=i_time)) % 360 + W_reg = xds_vortex_GS.W.isel(time=i_time) + Dir_reg = (270 - xds_vortex_GS.Dir.isel(time=i_time)) % 360 + fig.suptitle( + f"Wind partition for {time.values.astype('datetime64[s]').astype(str)}", + ) + else: + W = xds_vortex_interp.W.max(dim="time") + W_reg = xds_vortex_GS.W.max(dim="time") + fig.suptitle( + "Wind partition SWATH", + ) + vmin = np.min((W.min(), W_reg.min())) + vmax = np.max((W.max(), W_reg.max())) + + cmap = join_colormaps( + cmap1="viridis", + cmap2="plasma_r", + name="wind_partition_cmap", + range1=(0.2, 1.0), + range2=(0.05, 0.8), + ) + + ax2.tripcolor( + node_forcing_longitude, + node_forcing_latitude, + triangle_forcing_connectivity, + facecolors=W, + cmap=cmap, + vmin=vmin, + vmax=vmax, + edgecolor="white", + shading="flat", + transform=ccrs.PlateCarree(), + ) + + pm1 = ax1.pcolormesh( + Lon, + Lat, + W_reg, + cmap=cmap, + vmin=vmin, + vmax=vmax, + shading="auto", + transform=ccrs.PlateCarree(), + ) + cbar = fig.colorbar( + pm1, ax=(ax1, ax2), orientation="horizontal", pad=0.03, aspect=50 + ) + cbar.set_label( + "{0} ({1})".format("Wind", "m.s⁻¹"), + rotation=0, + va="bottom", + fontweight="bold", + labelpad=15, + ) + if not SWATH: + ax2.quiver( + np.mean(node_forcing_longitude[triangle_forcing_connectivity], axis=1), + np.mean(node_forcing_latitude[triangle_forcing_connectivity], axis=1), + np.cos(np.deg2rad(Dir)), + np.sin(np.deg2rad(Dir)), + color="black", + scale=scale, + width=width, + transform=ccrs.PlateCarree(), + ) + + ax1.quiver( + Lon[::simple_quiver], + Lat[::simple_quiver], + (np.cos(np.deg2rad(Dir_reg)))[::simple_quiver, ::simple_quiver], + (np.sin(np.deg2rad(Dir_reg)))[::simple_quiver, ::simple_quiver], + color="black", + scale=scale, + width=width, + transform=ccrs.PlateCarree(), + ) + + ax1.set_extent([Lon.min(), Lon.max(), Lat.min(), Lat.max()], crs=ccrs.PlateCarree()) + ax2.set_extent([Lon.min(), Lon.max(), Lat.min(), Lat.max()], crs=ccrs.PlateCarree()) + + ax1.coastlines() + ax2.coastlines() + + +def plot_additive_setup( + info_ds: xr.Dataset, figsize: tuple = (10, 10), fig: Figure = None, ax: Axes = None +) -> Tuple[Figure, Axes]: + """ + Plot the GreenSurge mesh setup from the provided dataset. + + Parameters + ---------- + info_ds : xr.Dataset + Dataset containing the mesh information. + figsize : tuple, optional + Figure size. Default is (10, 10). + fig : Figure, optional + Figure object to plot on. If None, a new figure will be created. + ax : Axes, optional + Axes object to plot on. If None, a new axes will be created. + + Returns + ------- + fig : Figure + Figure object. + ax : Axes + Axes object. + """ + + # Extract data from the dataset + connectivity = info_ds.triangle_forcing_connectivity.values + node_forcing_longitude = info_ds.node_forcing_longitude.values + node_forcing_latitude = info_ds.node_forcing_latitude.values + node_computation_longitude = info_ds.node_computation_longitude.values + node_computation_latitude = info_ds.node_computation_latitude.values + + num_elements = len(connectivity) + if fig is None or ax is None: + fig, ax = plt.subplots( + subplot_kw={"projection": ccrs.PlateCarree()}, + figsize=figsize, + constrained_layout=True, + ) + + ax.triplot( + node_computation_longitude, + node_computation_latitude, + info_ds.triangle_computation_connectivity.values, + color="grey", + linestyle="-", + marker="", + linewidth=0.5, + label="Computational mesh", + ) + ax.triplot( + node_forcing_longitude, + node_forcing_latitude, + connectivity, + color="green", + linestyle="-", + marker="", + linewidth=1, + label=f"Forcing mesh ({num_elements} elements)", + ) + + bnd = [ + min(node_computation_longitude.min(), node_forcing_longitude.min()), + max(node_computation_longitude.max(), node_forcing_longitude.max()), + min(node_computation_latitude.min(), node_forcing_latitude.min()), + max(node_computation_latitude.max(), node_forcing_latitude.max()), + ] + ax.set_extent([*bnd], crs=ccrs.PlateCarree()) + plt.legend(loc="lower left", fontsize=10, markerscale=2.0) + ax.set_title("GreenSurge Mesh Setup") + + return fig, ax + + +def create_triangle_mask_from_points( + lon: np.ndarray, lat: np.ndarray, triangle: np.ndarray +) -> np.ndarray: + """ + Create a mask indicating which scattered points are inside a triangle. + + Parameters + ---------- + lon : np.ndarray + 1D array of longitudes of the points. + lat : np.ndarray + 1D array of latitudes of the points. + triangle : np.ndarray + (3, 2) array containing the triangle vertices as (lon, lat) pairs. + + Returns + ------- + mask : np.ndarray + 1D boolean array of same length as lon/lat indicating points inside the triangle. + """ + + points = np.column_stack((lon, lat)) # Shape (N, 2) + triangle_path = Path(triangle) + mask = triangle_path.contains_points(points) + + return mask + + +def plot_additive_vs_dynamic_windsetup_swath( + ds_WL_GS_WindSetUp: xr.Dataset, + ds_WL_dynamic_WindSetUp: xr.Dataset, + ds_gfd_metadata: xr.Dataset, + vmin: float = None, + vmax: float = None, + figsize: tuple = (10, 8), +) -> None: + """ + Plot the GreenSurge and dynamic wind setup from the provided datasets. + + Parameters + ---------- + ds_WL_GS_WindSetUp : xr.Dataset + Dataset containing the GreenSurge wind setup data. + ds_WL_dynamic_WindSetUp : xr.Dataset + Dataset containing the dynamic wind setup data. + ds_gfd_metadata : xr.Dataset + Dataset containing the metadata for the GFD mesh. + vmin : float, optional + Minimum value for the color scale. Default is None. + vmax : float, optional + Maximum value for the color scale. Default is None. + figsize : tuple, optional + Figure size. Default is (10, 8). + Returns + ------- + fig : Figure + The figure object containing the plots. + axs : list of Axes + List of Axes objects for the two subplots. + """ + + warnings.filterwarnings("ignore", message="All-NaN slice encountered") + + X = ds_gfd_metadata.node_computation_longitude.values + Y = ds_gfd_metadata.node_computation_latitude.values + triangles = ds_gfd_metadata.triangle_computation_connectivity.values + + Longitude_dynamic = ds_WL_dynamic_WindSetUp.mesh2d_node_x.values + Latitude_dynamic = ds_WL_dynamic_WindSetUp.mesh2d_node_y.values + + xds_GS = np.nanmax(ds_WL_GS_WindSetUp["WL"].values, axis=0) + xds_DY = np.nanmax(ds_WL_dynamic_WindSetUp["mesh2d_s1"].values, axis=0) + + if vmin is None: + vmin = 0 + if vmax is None: + vmax = float(np.nanmax(xds_GS)) + + fig, axs = plt.subplots( + nrows=1, + ncols=2, + figsize=figsize, + subplot_kw={"projection": ccrs.PlateCarree()}, + constrained_layout=True, + ) + + axs[0].tripcolor( + Longitude_dynamic, + Latitude_dynamic, + ds_WL_dynamic_WindSetUp.mesh2d_face_nodes.values - 1, + facecolors=xds_DY, + cmap="CMRmap_r", + vmin=vmin, + vmax=vmax, + transform=ccrs.PlateCarree(), + ) + + pm = axs[1].tripcolor( + X, + Y, + triangles, + facecolors=xds_GS, + cmap="CMRmap_r", + vmin=vmin, + vmax=vmax, + transform=ccrs.PlateCarree(), + ) + cbar = fig.colorbar(pm, ax=axs, orientation="horizontal", pad=0.03, aspect=50) + cbar.set_label( + "WL ({})".format("m"), rotation=0, va="bottom", fontweight="bold", labelpad=15 + ) + fig.suptitle("SWATH", fontsize=18, fontweight="bold") + + axs[0].set_title("Dynamic Wind SetUp", fontsize=14) + axs[1].set_title("GreenSurge Wind SetUp", fontsize=14) + + lon_min = np.nanmin(Longitude_dynamic) + lon_max = np.nanmax(Longitude_dynamic) + lat_min = np.nanmin(Latitude_dynamic) + lat_max = np.nanmax(Latitude_dynamic) + + for ax in axs: + ax.set_extent([lon_min, lon_max, lat_min, lat_max]) + return fig, axs + + +def plot_additive_vs_dynamic_windsetup( + ds_WL_GS_WindSetUp: xr.Dataset, + ds_WL_dynamic_WindSetUp: xr.Dataset, + ds_gfd_metadata: xr.Dataset, + time: datetime, + vmin: float = None, + vmax: float = None, + figsize: tuple = (10, 8), +) -> None: + """ + Plot the GreenSurge and dynamic wind setup from the provided datasets. + + Parameters + ---------- + ds_WL_GS_WindSetUp: xr.Dataset + xarray Dataset containing the GreenSurge wind setup data. + ds_WL_dynamic_WindSetUp: xr.Dataset + xarray Dataset containing the dynamic wind setup data. + ds_gfd_metadata: xr.Dataset + xarray Dataset containing the metadata for the GFD mesh. + time: datetime.datetime + The time point at which to plot the data. + vmin: float, optional + Minimum value for the color scale. Default is None. + vmax: float, optional + Maximum value for the color scale. Default is None. + figsize: tuple, optional + Tuple specifying the figure size. Default is (10, 8). + """ + + warnings.filterwarnings("ignore", message="All-NaN slice encountered") + + X = ds_gfd_metadata.node_computation_longitude.values + Y = ds_gfd_metadata.node_computation_latitude.values + triangles = ds_gfd_metadata.triangle_computation_connectivity.values + + Longitude_dynamic = ds_WL_dynamic_WindSetUp.mesh2d_node_x.values + Latitude_dynamic = ds_WL_dynamic_WindSetUp.mesh2d_node_y.values + + xds_GS = ds_WL_GS_WindSetUp["WL"].sel(time=time).values + xds_DY = ds_WL_dynamic_WindSetUp["mesh2d_s1"].sel(time=time).values + if vmin is None or vmax is None: + vmax = float(np.nanmax(xds_GS)) * 0.5 + vmin = -vmax + + fig, axs = plt.subplots( + nrows=1, + ncols=2, + figsize=figsize, + subplot_kw={"projection": ccrs.PlateCarree()}, + constrained_layout=True, + ) + + axs[0].tripcolor( + Longitude_dynamic, + Latitude_dynamic, + ds_WL_dynamic_WindSetUp.mesh2d_face_nodes.values - 1, + facecolors=xds_DY, + cmap="bwr", + vmin=vmin, + vmax=vmax, + transform=ccrs.PlateCarree(), + ) + + pm = axs[1].tripcolor( + X, + Y, + triangles, + facecolors=xds_GS, + cmap="bwr", + vmin=vmin, + vmax=vmax, + transform=ccrs.PlateCarree(), + ) + cbar = fig.colorbar(pm, ax=axs, orientation="horizontal", pad=0.03, aspect=50) + cbar.set_label( + "WL ({})".format("m"), rotation=0, va="bottom", fontweight="bold", labelpad=15 + ) + fig.suptitle( + f"Wind SetUp for {time.astype('datetime64[s]').astype(str)}", + fontsize=16, + fontweight="bold", + ) + + axs[0].set_title("Dynamic Wind SetUp") + axs[1].set_title("GreenSurge Wind SetUp") + + lon_min = np.nanmin(Longitude_dynamic) + lon_max = np.nanmax(Longitude_dynamic) + lat_min = np.nanmin(Latitude_dynamic) + lat_max = np.nanmax(Latitude_dynamic) + for ax in axs: + ax.set_extent([lon_min, lon_max, lat_min, lat_max]) + + +def plot_additive_TG_validation_timeseries( + ds_WL_GS_WindSetUp: xr.Dataset, + ds_WL_GS_IB: xr.Dataset, + ds_WL_dynamic_WindSetUp: xr.Dataset, + tide_gauge: xr.Dataset, + ds_GFD_info: xr.Dataset, + figsize: tuple = (20, 7), + WLmin: float = None, + WLmax: float = None, +) -> None: + """ + Plot a time series comparison of GreenSurge, dynamic wind setup, and tide gauge data with a bathymetry map. + + Parameters + ---------- + ds_WL_GS_WindSetUp : xr.Dataset + Dataset containing GreenSurge wind setup data with dimensions (nface, time). + ds_WL_GS_IB : xr.Dataset + Dataset containing inverse barometer data with dimensions (lat, lon, time). + ds_WL_dynamic_WindSetUp : xr.Dataset + Dataset containing dynamic wind setup data with dimensions (mesh2d_nFaces, time). + tide_gauge : xr.Dataset + Dataset containing tide gauge data with dimensions (time). + ds_GFD_info : xr.Dataset + Dataset containing grid information with longitude and latitude coordinates. + figsize : tuple, optional + Size of the figure for the plot. Default is (15, 7). + WLmin : float, optional + Minimum water level for the plot. Default is None. + WLmax : float, optional + Maximum water level for the plot. Default is None. + """ + + lon_obs = tide_gauge.lon.values + lat_obs = tide_gauge.lat.values + lon_obs = np.where(lon_obs > 180, lon_obs - 360, lon_obs) + + nface_index = extract_pos_nearest_points_tri(ds_GFD_info, lon_obs, lat_obs) + mesh2d_nFaces = extract_pos_nearest_points_tri( + ds_WL_dynamic_WindSetUp, lon_obs, lat_obs + ) + pos_lon_IB, pos_lat_IB = extract_pos_nearest_points(ds_WL_GS_IB, lon_obs, lat_obs) + + time = ds_WL_GS_WindSetUp.WL.time + ds_WL_dynamic_WindSetUp = ds_WL_dynamic_WindSetUp.sel(time=time) + ds_WL_GS_IB = ds_WL_GS_IB.interp(time=time) + + WL_GS = ds_WL_GS_WindSetUp.WL.sel(nface=nface_index).values.squeeze() + WL_dyn = ds_WL_dynamic_WindSetUp.mesh2d_s1.sel( + mesh2d_nFaces=mesh2d_nFaces + ).values.squeeze() + WL_IB = ds_WL_GS_IB.IB.values[pos_lat_IB, pos_lon_IB, :].squeeze() + WL_TG = tide_gauge.SS.values + + WL_SS_dyn = WL_dyn + WL_IB + WL_SS_GS = WL_GS + WL_IB + + fig = plt.figure(figsize=figsize) + gs = gridspec.GridSpec(1, 2, width_ratios=[1, 3], figure=fig) + + ax_map = fig.add_subplot(gs[0], projection=ccrs.PlateCarree()) + X = ds_WL_dynamic_WindSetUp.mesh2d_node_x.values + Y = ds_WL_dynamic_WindSetUp.mesh2d_node_y.values + triangles = ds_WL_dynamic_WindSetUp.mesh2d_face_nodes.values.astype(int) - 1 + Z = np.mean(ds_WL_dynamic_WindSetUp.mesh2d_node_z.values[triangles], axis=1) + vmin = np.nanmin(Z) + vmax = np.nanmax(Z) + + cmap, norm = join_colormaps( + cmap1=hex_colors_water, + cmap2=hex_colors_land, + value_range1=(vmin, 0.0), + value_range2=(0.0, vmax), + name="raster_cmap", + ) + ax_map.set_facecolor("#518134") + + ax_map.tripcolor( + X, + Y, + triangles, + facecolors=Z, + cmap=cmap, + norm=norm, + shading="flat", + transform=ccrs.PlateCarree(), + ) + + ax_map.scatter( + lon_obs, + lat_obs, + color="red", + marker="x", + transform=ccrs.PlateCarree(), + label="Tide Gauge", + ) + ax_map.set_extent([X.min(), X.max(), Y.min(), Y.max()], crs=ccrs.PlateCarree()) + ax_map.set_title("Bathymetry Map") + ax_map.legend(loc="upper right", fontsize="small") + + ax_ts = fig.add_subplot(gs[1]) + time_vals = time.values + ax_ts.plot(time_vals, WL_SS_dyn, c="blue", label="Dynamic simulation") + ax_ts.plot(time_vals, WL_SS_GS, c="tomato", label="GreenSurge") + ax_ts.plot(tide_gauge.time.values, WL_TG, c="green", label="Tide Gauge") + ax_ts.plot(time_vals, WL_GS, c="grey", label="GS WindSetup") + ax_ts.plot(time_vals, WL_IB, c="black", label="Inverse Barometer") + + if WLmin is None or WLmax is None: + WLmax = ( + max( + np.nanmax(WL_SS_dyn), + np.nanmax(WL_SS_GS), + np.nanmax(WL_TG), + np.nanmax(WL_GS), + ) + * 1.05 + ) + WLmin = ( + min( + np.nanmin(WL_SS_dyn), + np.nanmin(WL_SS_GS), + np.nanmin(WL_TG), + np.nanmin(WL_GS), + ) + * 1.05 + ) + + ax_ts.set_xlim(time_vals[0], time_vals[-1]) + ax_ts.set_ylim(WLmin, WLmax) + ax_ts.set_ylabel("Water Level (m)") + ax_ts.set_title("Tide Gauge Validation") + ax_ts.legend() + + plt.tight_layout() + plt.show() + + +def extract_pos_nearest_points_tri( + ds_mesh_info: xr.Dataset, lon_points: np.ndarray, lat_points: np.ndarray +) -> np.ndarray: + """ + Extract the nearest triangle index for given longitude and latitude points. + + Parameters + ---------- + ds_mesh_info : xr.Dataset + Dataset containing mesh information with longitude and latitude coordinates. + lon_points : np.ndarray + Array of longitudes for which to find the nearest triangle index. + lat_points : np.ndarray + Array of latitudes for which to find the nearest triangle index. + + Returns + ------- + np.ndarray + Array of nearest triangle indices corresponding to the input longitude and latitude points. + """ + + if "node_forcing_latitude" in ds_mesh_info.variables: + lon_mesh = ds_mesh_info.node_computation_longitude.values + lat_mesh = ds_mesh_info.node_computation_latitude.values + type_ds = 0 + else: + lon_mesh = ds_mesh_info.mesh2d_face_x.values + lat_mesh = ds_mesh_info.mesh2d_face_y.values + type_ds = 1 + + nface_index = [] + + for i in range(len(lon_points)): + lon = lon_points[i] + lat = lat_points[i] + + distances = np.sqrt((lon_mesh - lon) ** 2 + (lat_mesh - lat) ** 2) + min_idx = np.argmin(distances) + + if type_ds == 0: + nface_index.append( + ds_mesh_info.node_cumputation_index.values[min_idx].astype(int) + ) + elif type_ds == 1: + nface_index.append(ds_mesh_info.mesh2d_nFaces.values[min_idx].astype(int)) + + return nface_index + + +def extract_pos_nearest_points( + ds_mesh_info: xr.Dataset, lon_points: np.ndarray, lat_points: np.ndarray +) -> Tuple[np.ndarray, np.ndarray]: + """ + Extract the nearest point indices for given longitude and latitude points in a mesh dataset. + + Parameters + ---------- + ds_mesh_info : xr.Dataset + Dataset containing mesh information with longitude and latitude coordinates. + lon_points : np.ndarray + Array of longitudes for which to find the nearest point indices. + lat_points : np.ndarray + Array of latitudes for which to find the nearest point indices. + + Returns + ------- + pos_lon_points_mesh : np.ndarray + Array of longitude indices corresponding to the input longitude points in the mesh. + pos_lat_points_mesh : np.ndarray + Array of latitude indices corresponding to the input latitude points in the mesh. + """ + + lon_mesh = ds_mesh_info.lon.values + lat_mesh = ds_mesh_info.lat.values + + pos_lon_points_mesh = [] + pos_lat_points_mesh = [] + + for i in range(len(lon_points)): + lon = lon_points[i] + lat = lat_points[i] + + lat_index = np.nanargmin((lat - lat_mesh) ** 2) + lon_index = np.nanargmin((lon - lon_mesh) ** 2) + + pos_lon_points_mesh.append(lon_index.astype(int)) + pos_lat_points_mesh.append(lat_index.astype(int)) + + return pos_lon_points_mesh, pos_lat_points_mesh + + +def pressure_to_IB(xds_presure: xr.Dataset) -> xr.Dataset: + """ + Convert pressure data in a dataset to inverse barometer (IB) values. + + Parameters + ---------- + xds_presure : xr.Dataset + Dataset containing pressure data with dimensions (lat, lon, time). + + Returns + ------- + xr.Dataset + Dataset with an additional variable 'IB' representing the inverse barometer values. + """ + + p = xds_presure.p.values + IB = (101325 - p) / 10000 # Convert pressure (Pa) to inverse barometer (m) + + xds_presure_modified = xds_presure.copy() + xds_presure_modified["IB"] = (("lat", "lon", "time"), IB) + + return xds_presure_modified + + +def plot_additive_setup_with_raster( + simulation_dataset, + path_grd_calc, + figsize=(7, 7), +) -> None: + """ + Plot the GreenSurge setup with raster bathymetry. + + Parameters + ---------- + simulation_dataset : xr.Dataset + Dataset containing simulation information. + path_grd_calc : str + Path to the ADCIRC grid file for calculation. + figsize : tuple, optional + Size of the figure, by default (7, 7) + Returns + ------- + None + ----------- + This function plots the GreenSurge setup using raster bathymetry + and the ADCIRC grid for calculation. It uses Cartopy for geographic + projections and matplotlib for plotting. + """ + + Nodes_calc, Elmts_calc, _ = read_adcirc_grd(path_grd_calc) + + fig, ax = plt.subplots( + subplot_kw={"projection": ccrs.PlateCarree()}, + figsize=figsize, + constrained_layout=True, + ) + + Longitude_nodes_calc = Nodes_calc[:, 1] + Latitude_nodes_calc = Nodes_calc[:, 2] + Elements_calc = Elmts_calc[:, 2:5].astype(int) + depth = -np.mean(Nodes_calc[Elements_calc, 3], axis=1) + + vmin = np.nanmin(depth) + vmax = np.nanmax(depth) + + cmap, norm = join_colormaps( + cmap1=hex_colors_water, + cmap2=hex_colors_land, + value_range1=(vmin, 0.0), + value_range2=(0.0, vmax), + name="raster_cmap", + ) + + tpc = ax.tripcolor( + Longitude_nodes_calc, + Latitude_nodes_calc, + Elements_calc, + facecolors=depth, + cmap=cmap, + norm=norm, + shading="flat", + transform=ccrs.PlateCarree(), + ) + cbar = plt.colorbar(tpc, ax=ax) + cbar.set_label("Depth (m)") + + plot_additive_setup(simulation_dataset, figsize=(7, 7), ax=ax, fig=fig) + + +def interp_vortex_to_triangles( + xds_vortex_GS: xr.Dataset, + lon_all: np.ndarray, + lat_all: np.ndarray, + method: str = "tri_mean", +) -> xr.Dataset: + """ + Interpolate vortex model data to triangle points. + + Parameters + ---------- + xds_vortex_GS : xr.Dataset + Dataset containing the vortex model data. + lon_all : np.ndarray + Longitudes of the triangle points. + lat_all : np.ndarray + Latitudes of the triangle points. + method : str, optional + Interpolation method: "tri_mean" (default) or "tri_points". + + Returns + ------- + xr.Dataset + Dataset containing the interpolated vortex model data at the triangle points. + + Notes + ----- + This function interpolates vortex model data (wind speed, direction, and pressure) + to the triangle points defined by `lon_all` and `lat_all`. It reshapes the data + to match the number of triangles and points, and computes the mean values for each triangle. + """ + if method == "tri_mean": + n_tri, n_pts = lat_all.shape + lat_interp = lat_all.reshape(-1) + lon_interp = lon_all.reshape(-1) + elif method == "tri_points": + n_tri = lat_all.shape + lat_interp = lat_all + lon_interp = lon_all + + lat_interp = xr.DataArray(lat_interp, dims="point") + lon_interp = xr.DataArray(lon_interp, dims="point") + + if method == "tri_mean": + W_interp = xds_vortex_GS.W.interp(lat=lat_interp, lon=lon_interp) + Dir_interp = xds_vortex_GS.Dir.interp(lat=lat_interp, lon=lon_interp) + p_interp = xds_vortex_GS.p.interp(lat=lat_interp, lon=lon_interp) + + W_interp = W_interp.values.reshape(n_tri, n_pts, -1) + Dir_interp = Dir_interp.values.reshape(n_tri, n_pts, -1) + p_interp = p_interp.values.reshape(n_tri, n_pts, -1) + + theta_rad = np.deg2rad(Dir_interp) + u = np.cos(theta_rad) + v = np.sin(theta_rad) + u_mean = u.mean(axis=1) + v_mean = v.mean(axis=1) + Dir_out = (np.rad2deg(np.arctan2(v_mean, u_mean))) % 360 + W_out = W_interp.mean(axis=1) + p_out = p_interp.mean(axis=1) + elif method == "tri_points": + return xds_vortex_GS.interp(lat=lat_interp, lon=lon_interp) + + xds_vortex_interp = xr.Dataset( + data_vars={ + "W": (("element", "time"), W_out), + "Dir": (("element", "time"), Dir_out), + "p": (("element", "time"), p_out), + }, + coords={"time": xds_vortex_GS.time.values, "element": np.arange(n_tri)}, + ) + + return xds_vortex_interp + + +def plot_additive_validation_timeseries( + ds_WL_GS_WindSetUp: xr.Dataset, + ds_WL_GS_IB: xr.Dataset, + ds_WL_dynamic_WindSetUp: xr.Dataset, + ds_GFD_info: xr.Dataset, + lon_obs: float = [184.8], + lat_obs: float = [-21.14], + figsize: tuple = (20, 7), + WLmin: float = None, + WLmax: float = None, +) -> None: + """ + Plot a time series comparison of GreenSurge, dynamic wind setup, and tide gauge data with a bathymetry map. + + Parameters + ---------- + ds_WL_GS_WindSetUp : xr.Dataset + Dataset containing GreenSurge wind setup data with dimensions (nface, time). + ds_WL_GS_IB : xr.Dataset + Dataset containing inverse barometer data with dimensions (lat, lon, time). + ds_WL_dynamic_WindSetUp : xr.Dataset + Dataset containing dynamic wind setup data with dimensions (mesh2d_nFaces, time). + ds_GFD_info : xr.Dataset + Dataset containing grid information with longitude and latitude coordinates. + figsize : tuple, optional + Size of the figure for the plot. Default is (15, 7). + WLmin : float, optional + Minimum water level for the plot. Default is None. + WLmax : float, optional + Maximum water level for the plot. Default is None. + """ + + lon_obs = [lon - 360 if lon > 180 else lon for lon in lon_obs] + + nface_index = extract_pos_nearest_points_tri(ds_GFD_info, lon_obs, lat_obs) + mesh2d_nFaces = extract_pos_nearest_points_tri( + ds_WL_dynamic_WindSetUp, lon_obs, lat_obs + ) + pos_lon_IB, pos_lat_IB = extract_pos_nearest_points(ds_WL_GS_IB, lon_obs, lat_obs) + + time = ds_WL_GS_WindSetUp.WL.time + ds_WL_dynamic_WindSetUp = ds_WL_dynamic_WindSetUp.sel(time=time) + ds_WL_GS_IB = ds_WL_GS_IB.interp(time=time) + + WL_GS = ds_WL_GS_WindSetUp.WL.sel(nface=nface_index).values + WL_dyn = ds_WL_dynamic_WindSetUp.mesh2d_s1.sel(mesh2d_nFaces=mesh2d_nFaces).values + WL_IB = ds_WL_GS_IB.IB.values[pos_lat_IB, pos_lon_IB, :].T + + WL_SS_dyn = WL_dyn + WL_IB + WL_SS_GS = WL_GS + WL_IB + + fig = plt.figure(figsize=figsize) + gs = gridspec.GridSpec(1, 2, width_ratios=[1, 3], figure=fig) + + ax_map = fig.add_subplot(gs[0], projection=ccrs.PlateCarree()) + X = ds_WL_dynamic_WindSetUp.mesh2d_node_x.values + Y = ds_WL_dynamic_WindSetUp.mesh2d_node_y.values + triangles = ds_WL_dynamic_WindSetUp.mesh2d_face_nodes.values.astype(int) - 1 + Z = np.mean(ds_WL_dynamic_WindSetUp.mesh2d_node_z.values[triangles], axis=1) + vmin = np.nanmin(Z) + vmax = np.nanmax(Z) + + cmap, norm = join_colormaps( + cmap1=hex_colors_water, + cmap2=hex_colors_land, + value_range1=(vmin, 0.0), + value_range2=(0.0, vmax), + name="raster_cmap", + ) + ax_map.set_facecolor("#518134") + + ax_map.tripcolor( + X, + Y, + triangles, + facecolors=Z, + cmap=cmap, + norm=norm, + shading="flat", + transform=ccrs.PlateCarree(), + ) + + ax_map.scatter( + lon_obs, + lat_obs, + color="red", + marker="x", + transform=ccrs.PlateCarree(), + label="Observation Point", + ) + + ax_map.set_extent([X.min(), X.max(), Y.min(), Y.max()], crs=ccrs.PlateCarree()) + ax_map.set_title("Bathymetry Map") + ax_map.legend(loc="upper left", fontsize="small") + time_vals = time.values + n_series = len(lon_obs) + ax_ts = gridspec.GridSpecFromSubplotSpec( + n_series, 1, subplot_spec=gs[0, 1], hspace=0.3 + ) + auto_limits = WLmin is None or WLmax is None + + axes_right = [] + for i in range(n_series): + ax_map.text( + lon_obs[i], + lat_obs[i], + f"Point {i + 1}", + color="k", + fontsize=10, + transform=ccrs.PlateCarree(), + ha="left", + va="bottom", + ) + ax = fig.add_subplot(ax_ts[i, 0]) + ax.plot( + time_vals, + WL_SS_dyn[:, i], + c="blue", + label=f"Dynamic simulation Point {i + 1}", + ) + ax.plot( + time_vals, WL_SS_GS[:, i], c="tomato", label=f"GreenSurge Point {i + 1}" + ) + ax.set_ylabel("Water level (m)") + ax.legend() + if i != n_series - 1: + ax.set_xticklabels([]) + if auto_limits: + WLmax = ( + max( + np.nanmax(WL_SS_dyn[:, i]), + np.nanmax(WL_SS_GS[:, i]), + np.nanmax(WL_GS[:, i]), + ) + * 1.05 + ) + WLmin = ( + min( + np.nanmin(WL_SS_dyn[:, i]), + np.nanmin(WL_SS_GS[:, i]), + np.nanmin(WL_GS[:, i]), + ) + * 1.05 + ) + ax.set_ylim(WLmin, WLmax) + ax.set_xlim(time_vals[0], time_vals[-1]) + axes_right.append(ax) + axes_right[0].set_title("Tide Gauge Validation") + + plt.tight_layout() + plt.show() + + +def build_additive_infos_dataset( + Nodes_calc, + Elmts_calc, + Nodes_forz, + Elmts_forz, + site, + wind_speed, + direction_step, + simulation_duration_hours, + simulation_time_step_hours, + forcing_time_step, + reference_date_dt, + Eddy, + Chezy, +): + """Build a structured dataset for GreenSurge hybrid modeling. + + Parameters + ---------- + Nodes_calc : array + Computational mesh vertices (lon, lat) + Elmts_calc : array + Computational mesh triangles + Nodes_forz : array + Forcing mesh vertices (lon, lat) + Elmts_forz : array + Forcing mesh triangles + site : str + Study site name + wind_speed : float + Wind speed for each direction (m/s) + direction_step : float + Wind direction discretization step (degrees) + simulation_duration_hours : float + Total simulation duration (hours) + simulation_time_step_hours : float + Simulation time step (hours) + forcing_time_step : float + Forcing data time step (hours) + reference_date_dt : datetime + Reference date for simulation + Eddy : float + Eddy viscosity (m2/s) + Chezy : float + Chezy friction coefficient + + Returns + ------- + xr.Dataset + Structured dataset for GreenSurge + """ + num_elements = Elmts_forz.shape[0] + num_directions = int(360 / direction_step) + wind_directions = np.arange(0, 360, direction_step) + reference_date_str = reference_date_dt.strftime("%Y-%m-%d %H:%M:%S") + + time_forcing_index = [ + 0, + forcing_time_step, + forcing_time_step + 0.001, + simulation_duration_hours - 1, + ] + + ds = xr.Dataset( + coords=dict( + wind_direction_index=("wind_direction_index", np.arange(num_directions)), + time_forcing_index=("time_forcing_index", time_forcing_index), + node_computation_longitude=("node_cumputation_index", Nodes_calc[:, 0]), + node_computation_latitude=("node_cumputation_index", Nodes_calc[:, 1]), + triangle_nodes=("triangle_forcing_nodes", np.arange(3)), + node_forcing_index=("node_forcing_index", np.arange(len(Nodes_forz))), + element_forcing_index=("element_forcing_index", np.arange(num_elements)), + node_cumputation_index=( + "node_cumputation_index", + np.arange(len(Nodes_calc)), + ), + element_computation_index=( + "element_computation_index", + np.arange(len(Elmts_calc)), + ), + ), + data_vars=dict( + triangle_computation_connectivity=( + ("element_computation_index", "triangle_forcing_nodes"), + Elmts_calc.astype(int), + {"description": "Computational mesh triangle connectivity"}, + ), + node_forcing_longitude=( + "node_forcing_index", + Nodes_forz[:, 0], + {"units": "degrees_east", "description": "Forcing mesh node longitude"}, + ), + node_forcing_latitude=( + "node_forcing_index", + Nodes_forz[:, 1], + {"units": "degrees_north", "description": "Forcing mesh node latitude"}, + ), + triangle_forcing_connectivity=( + ("element_forcing_index", "triangle_forcing_nodes"), + Elmts_forz.astype(int), + {"description": "Forcing mesh triangle connectivity"}, + ), + wind_directions=( + "wind_direction_index", + wind_directions, + {"units": "degrees", "description": "Discretized wind directions"}, + ), + total_elements=( + (), + num_elements, + {"description": "Number of forcing elements"}, + ), + simulation_duration_hours=( + (), + simulation_duration_hours, + {"units": "hours"}, + ), + time_step_hours=((), simulation_time_step_hours, {"units": "hours"}), + wind_speed=((), wind_speed, {"units": "m/s"}), + location_name=((), site), + eddy_viscosity=((), Eddy, {"units": "m2/s"}), + chezy_coefficient=((), Chezy), + reference_date=((), reference_date_str), + forcing_time_step=((), forcing_time_step, {"units": "hour"}), + ), + attrs={ + "title": "GreenSurge Simulation Input Dataset", + "institution": "GeoOcean", + "model": "GreenSurge", + "created": datetime.now().strftime("%Y-%m-%d %H:%M:%S"), + }, + ) + + # Add coordinate attributes + ds["time_forcing_index"].attrs = { + "standard_name": "time", + "units": f"hours since {reference_date_str} +00:00", + "calendar": "gregorian", + } + ds["node_computation_longitude"].attrs = { + "standard_name": "longitude", + "units": "degrees_east", + } + ds["node_computation_latitude"].attrs = { + "standard_name": "latitude", + "units": "degrees_north", + } + + return ds + + +def point_to_segment_distance_vectorized( + px: np.ndarray, + py: np.ndarray, + ax: float, + ay: float, + bx: float, + by: float, +) -> np.ndarray: + """ + Compute vectorized distance from points (px, py) to segment [A, B]. + + Parameters + ---------- + px, py : np.ndarray + Arrays of point coordinates. + ax, ay : float + Coordinates of segment start point A. + bx, by : float + Coordinates of segment end point B. + + Returns + ------- + np.ndarray + Array of distances from each point to the segment. + """ + ab_x = bx - ax + ab_y = by - ay + ab_len_sq = ab_x**2 + ab_y**2 + + if ab_len_sq == 0: + return np.sqrt((px - ax) ** 2 + (py - ay) ** 2) + + t = np.clip(((px - ax) * ab_x + (py - ay) * ab_y) / ab_len_sq, 0, 1) + closest_x = ax + t * ab_x + closest_y = ay + t * ab_y + + return np.sqrt((px - closest_x) ** 2 + (py - closest_y) ** 2) + + +def generate_structured_points_vectorized( + triangle_connectivity: np.ndarray, + node_lon: np.ndarray, + node_lat: np.ndarray, +) -> Tuple[np.ndarray, np.ndarray]: + """ + Generate structured points for triangles (vectorized version, eliminates Python loop). + + Parameters + ---------- + triangle_connectivity : np.ndarray + Array of shape (n_triangles, 3) with vertex indices. + node_lon, node_lat : np.ndarray + Arrays of node coordinates. + + Returns + ------- + lon_all, lat_all : np.ndarray + Arrays of shape (n_triangles, 10) with structured point coordinates. + """ + A_lon = node_lon[triangle_connectivity[:, 0]] + A_lat = node_lat[triangle_connectivity[:, 0]] + B_lon = node_lon[triangle_connectivity[:, 1]] + B_lat = node_lat[triangle_connectivity[:, 1]] + C_lon = node_lon[triangle_connectivity[:, 2]] + C_lat = node_lat[triangle_connectivity[:, 2]] + + G_lon = (A_lon + B_lon + C_lon) / 3 + G_lat = (A_lat + B_lat + C_lat) / 3 + + M_AB_lon, M_AB_lat = (A_lon + B_lon) / 2, (A_lat + B_lat) / 2 + M_BC_lon, M_BC_lat = (B_lon + C_lon) / 2, (B_lat + C_lat) / 2 + M_CA_lon, M_CA_lat = (C_lon + A_lon) / 2, (C_lat + A_lat) / 2 + M_AG_lon, M_AG_lat = (A_lon + G_lon) / 2, (A_lat + G_lat) / 2 + M_BG_lon, M_BG_lat = (B_lon + G_lon) / 2, (B_lat + G_lat) / 2 + M_CG_lon, M_CG_lat = (C_lon + G_lon) / 2, (C_lat + G_lat) / 2 + + lon_all = np.column_stack( + [ + A_lon, + B_lon, + C_lon, + G_lon, + M_AB_lon, + M_BC_lon, + M_CA_lon, + M_AG_lon, + M_BG_lon, + M_CG_lon, + ] + ) + lat_all = np.column_stack( + [ + A_lat, + B_lat, + C_lat, + G_lat, + M_AB_lat, + M_BC_lat, + M_CA_lat, + M_AG_lat, + M_BG_lat, + M_CG_lat, + ] + ) + + return lon_all, lat_all + + +def wind_partition_triangular(ds_GFD_info, xds_vortex): + """ + Interpolate vortex model data to triangle elements using GreenSurge wind partitioning. + Parameters + ---------- + ds_GFD_info : xr.Dataset + Dataset containing GreenSurge grid information. + xds_vortex : xr.Dataset + Dataset containing vortex model data. + + Returns + ------- + xr.Dataset + Dataset containing interpolated vortex model data at triangle elements. + """ + element_forcing_index = ds_GFD_info.element_forcing_index.values + num_element = len(element_forcing_index) + triangle_forcing_connectivity = ds_GFD_info.triangle_forcing_connectivity.values + node_forcing_longitude = ds_GFD_info.node_forcing_longitude.values + node_forcing_latitude = ds_GFD_info.node_forcing_latitude.values + longitude_forcing_cells = node_forcing_longitude[triangle_forcing_connectivity] + latitude_forcing_cells = node_forcing_latitude[triangle_forcing_connectivity] + + # if np.abs(np.mean(lon_grid)-np.mean(lon_teselas))>180: + # lon_teselas = lon_teselas+360 + + # TC_info + time = xds_vortex.time.values + lon_grid = xds_vortex.lon.values + lat_grid = xds_vortex.lat.values + Ntime = len(time) + + # storage + U_tes = np.zeros((num_element, Ntime)) + V_tes = np.zeros((num_element, Ntime)) + p_tes = np.zeros((num_element, Ntime)) + Dir_tes = np.zeros((num_element, Ntime)) + Wspeed_tes = np.zeros((num_element, Ntime)) + + for i in range(Ntime): + W_grid = xds_vortex.W.values[:, :, i] + p_grid = xds_vortex.p.values[:, :, i] + Dir_grid = (270 - xds_vortex.Dir.values[:, :, i]) * np.pi / 180 + + u_sel_t = W_grid * np.cos(Dir_grid) + v_sel_t = W_grid * np.sin(Dir_grid) + + for element in element_forcing_index: + X0, X1, X2 = longitude_forcing_cells[element, :] + Y0, Y1, Y2 = latitude_forcing_cells[element, :] + + triangle = [(X0, Y0), (X1, Y1), (X2, Y2)] + + mask = create_triangle_mask(lon_grid, lat_grid, triangle) + + u_sel = u_sel_t[mask] + v_sel = v_sel_t[mask] + p_sel = p_grid[mask] + + p_mean = np.nanmean(p_sel) + u_mean = np.nanmean(u_sel) + v_mean = np.nanmean(v_sel) + + U_tes[element, i] = u_mean + V_tes[element, i] = v_mean + p_tes[element, i] = p_mean + + Dir_tes[element, i] = get_degrees_from_uv(-u_mean, -v_mean) + Wspeed_tes[element, i] = np.sqrt(u_mean**2 + v_mean**2) + + xds_vortex_interp = xr.Dataset( + data_vars={ + "Dir": (("element", "time"), Dir_tes), + "W": (("element", "time"), Wspeed_tes), + "p": (("element", "time"), p_tes), + }, + coords={ + "element": element_forcing_index, + "time": time, + }, + ) + return xds_vortex_interp + + +def create_triangle_mask( + lon_grid: np.ndarray, lat_grid: np.ndarray, triangle: np.ndarray +) -> np.ndarray: + """ + Create a mask for a triangle defined by its vertices. + + Parameters + ---------- + lon_grid : np.ndarray + The longitude grid. + lat_grid : np.ndarray + The latitude grid. + triangle : np.ndarray + The triangle vertices. + + Returns + ------- + np.ndarray + The mask for the triangle. + """ + + triangle_path = Path(triangle) + lon_grid, lat_grid = np.meshgrid(lon_grid, lat_grid) + points = np.vstack([lon_grid.flatten(), lat_grid.flatten()]).T + inside_mask = triangle_path.contains_points(points) + mask = inside_mask.reshape(lon_grid.shape) + + return mask \ No newline at end of file diff --git a/bluemath_tk/additive/greensurge.py b/bluemath_tk/additive/greensurge.py index 4ccc775..8a6d920 100644 --- a/bluemath_tk/additive/greensurge.py +++ b/bluemath_tk/additive/greensurge.py @@ -1,1945 +1,9 @@ -import struct -import warnings -from datetime import datetime -from functools import lru_cache -from typing import List, Tuple - -import cartopy.crs as ccrs -import matplotlib.gridspec as gridspec -import matplotlib.pyplot as plt -import numpy as np -import xarray as xr -from matplotlib.axes import Axes -from matplotlib.figure import Figure -from matplotlib.path import Path -from tqdm import tqdm -from shapely.geometry import LineString, Point - -from ..core.operations import get_degrees_from_uv -from ..core.plotting.colors import hex_colors_land, hex_colors_water -from ..core.plotting.utils import join_colormaps - - -def add_forcing_edges_to_computational_domain(poly_clip, vert, tria): - """ - Add edges to the forcing mesh that align with the computational domain boundary. - Parameters - ---------- - poly_clip : shapely.geometry.Polygon - The polygon representing the computational domain boundary. - vert : np.ndarray - Array of shape (n_vertices, 2) containing the coordinates of the vertices in the forcing mesh. - tria : np.ndarray - Array of shape (n_triangles, 3) containing the indices of the vertices for each triangle in the forcing mesh. - Returns - ------- - vert_clean : np.ndarray - Updated array of vertices including new intersection points. - edges_clean : np.ndarray - Updated array of edges including new edges along the computational domain boundary. - """ - - # Extract all edges from forcing mesh triangles - edges_new_full = np.vstack([ - tria[:, [0, 1]], - tria[:, [1, 2]], - tria[:, [2, 0]] - ]) - edges_new = np.unique(np.sort(edges_new_full, axis=1), axis=0) - - # Check which vertices are inside computational domain - mask_node_in = np.array([poly_clip.contains(Point(x, y)) for x, y in vert]) - - # Classify edges by how many endpoints are inside - count_inside = mask_node_in[edges_new].sum(axis=1) - edges_both_inside = edges_new[count_inside == 2] - edges_one_inside = edges_new[count_inside == 1] - - # Clip edges at computational domain boundary - edges_one_inside_new = edges_one_inside.copy() - new_points = [] - - for i, (n1, n2) in enumerate(edges_one_inside): - p1, p2 = vert[n1], vert[n2] - inside1 = mask_node_in[n1] - - line = LineString([p1, p2]) - inter = line.intersection(poly_clip.boundary) - - if inter.is_empty: - continue - - # Handle multiple intersection points - if inter.geom_type == "MultiPoint": - points = list(inter.geoms) - ref = p1 if inside1 else p2 - dists = [Point(ref).distance(pt) for pt in points] - inter_pt = np.array(points[np.argmin(dists)].coords[0]) - else: - inter_pt = np.array(inter.coords[0]) - - new_index = len(vert) + len(new_points) - new_points.append(inter_pt) - - if inside1: - edges_one_inside_new[i, 1] = new_index - else: - edges_one_inside_new[i, 0] = new_index - - # Update vertices with new intersection points - if len(new_points) > 0: - vert_update = np.vstack([vert, np.array(new_points)]) - - # Clean up: keep only used nodes - edges_used = np.vstack([edges_both_inside, edges_one_inside_new]) - used_nodes = np.unique(edges_used.flatten()) - vert_clean = vert_update[used_nodes] - mapping = {old_idx: new_idx for new_idx, old_idx in enumerate(used_nodes)} - edges_clean = np.vectorize(mapping.get)(edges_used) - - return vert_clean, edges_clean - -def read_adcirc_grd(grd_file: str) -> Tuple[np.ndarray, np.ndarray, List[str]]: - """ - Reads the ADCIRC grid file and returns the node and element data. - - Parameters - ---------- - grd_file : str - Path to the ADCIRC grid file. - - Returns - ------- - Tuple[np.ndarray, np.ndarray, List[str]] - A tuple containing: - - Nodes (np.ndarray): An array of shape (nnodes, 3) containing the coordinates of each node. - - Elmts (np.ndarray): An array of shape (nelmts, 3) containing the element connectivity, - with node indices adjusted (decremented by 1). - - lines (List[str]): The remaining lines in the file after reading the nodes and elements. - - Examples - -------- - >>> nodes, elmts, lines = read_adcirc_grd("path/to/grid.grd") - >>> print(nodes.shape, elmts.shape, len(lines)) - (1000, 3) (500, 3) 10 - """ - - with open(grd_file, "r") as f: - f.readline() # Skip header line - header1 = f.readline() - header_nums = list(map(float, header1.split())) - nelmts = int(header_nums[0]) - nnodes = int(header_nums[1]) - - Nodes = np.loadtxt(f, max_rows=nnodes) - Elmts = np.loadtxt(f, max_rows=nelmts) - 1 - lines = f.readlines() - - return Nodes, Elmts, lines - - -def get_regular_grid( - node_computation_longitude: np.ndarray, - node_computation_latitude: np.ndarray, - node_computation_elements: np.ndarray, - factor: float = 10.0, - margin_deg: float = 0, -) -> Tuple[np.ndarray, np.ndarray]: - """ - Generate a regular lon/lat grid slightly larger than the bounds of the node coordinates. - Grid resolution is derived from the smallest element size scaled by a factor. - - Parameters - ---------- - node_computation_longitude : np.ndarray - 1D array of longitudes for the nodes. - node_computation_latitude : np.ndarray - 1D array of latitudes for the nodes. - node_computation_elements : np.ndarray - 2D array of indices defining the triangular elements. - factor : float, optional - Resolution scaling factor: higher means coarser grid. - margin_deg : float, optional - Margin to add (in degrees) to each side of the bounding box. - - Returns - ------- - lon_grid : np.ndarray - 1D array of longitudes defining the grid. - lat_grid : np.ndarray - 1D array of latitudes defining the grid. - """ - - # Bounding box with margin - lon_min = node_computation_longitude.min() - margin_deg - lon_max = node_computation_longitude.max() + margin_deg - lat_min = node_computation_latitude.min() - margin_deg - lat_max = node_computation_latitude.max() + margin_deg - - # Get triangle node coordinates - lon_tri = node_computation_longitude[node_computation_elements] - lat_tri = node_computation_latitude[node_computation_elements] - - # Estimate resolution from max side of each triangle - dlon01 = np.abs(lon_tri[:, 0] - lon_tri[:, 1]) - dlon12 = np.abs(lon_tri[:, 1] - lon_tri[:, 2]) - dlon20 = np.abs(lon_tri[:, 2] - lon_tri[:, 0]) - max_dlon = np.stack([dlon01, dlon12, dlon20], axis=1).max(axis=1) - min_dx = np.min(max_dlon) * factor - - dlat01 = np.abs(lat_tri[:, 0] - lat_tri[:, 1]) - dlat12 = np.abs(lat_tri[:, 1] - lat_tri[:, 2]) - dlat20 = np.abs(lat_tri[:, 2] - lat_tri[:, 0]) - max_dlat = np.stack([dlat01, dlat12, dlat20], axis=1).max(axis=1) - min_dy = np.min(max_dlat) * factor - - # Create regular grid - lon_grid = np.arange(lon_min, lon_max + min_dx, min_dx) - lat_grid = np.arange(lat_min, lat_max + min_dy, min_dy) - - return lon_grid, lat_grid - - -def generate_structured_points( - triangle_connectivity: np.ndarray, - node_lon: np.ndarray, - node_lat: np.ndarray, -) -> Tuple[np.ndarray, np.ndarray]: - """ - Generate structured points for each triangle in the mesh. - Each triangle will have 10 points: vertices, centroid, midpoints of edges, - and midpoints of vertex-centroid segments. - - Parameters - ---------- - triangle_connectivity : np.ndarray - Array of shape (n_triangles, 3) containing indices of the vertices for each triangle. - node_lon : np.ndarray - Array of shape (n_nodes,) containing the longitudes of the nodes. - node_lat : np.ndarray - Array of shape (n_nodes,) containing the latitudes of the nodes. - - Returns - ------- - lon_all : np.ndarray - Array of shape (n_triangles, 10) containing the longitudes of the structured points for each triangle. - lat_all : np.ndarray - Array of shape (n_triangles, 10) containing the latitudes of the structured points for each triangle. - """ - - n_tri = triangle_connectivity.shape[0] - lon_all = np.empty((n_tri, 10)) - lat_all = np.empty((n_tri, 10)) - - for i, tri in enumerate(triangle_connectivity): - A = np.array([node_lon[tri[0]], node_lat[tri[0]]]) - B = np.array([node_lon[tri[1]], node_lat[tri[1]]]) - C = np.array([node_lon[tri[2]], node_lat[tri[2]]]) - - G = (A + B + C) / 3 - M_AB = (A + B) / 2 - M_BC = (B + C) / 2 - M_CA = (C + A) / 2 - M_AG = (A + G) / 2 - M_BG = (B + G) / 2 - M_CG = (C + G) / 2 - - points = [A, B, C, G, M_AB, M_BC, M_CA, M_AG, M_BG, M_CG] - lon_all[i, :] = [pt[0] for pt in points] - lat_all[i, :] = [pt[1] for pt in points] - - return lon_all, lat_all - - -def plot_GS_input_wind_partition( - xds_vortex_GS: xr.Dataset, - xds_vortex_interp: xr.Dataset, - ds_GFD_info: xr.Dataset, - i_time: int = 0, - SWATH: bool = False, - figsize=(10, 8), -) -> None: - """ - Plot the wind partition for GreenSurge input data. - - Parameters - ---------- - xds_vortex_GS : xr.Dataset - Dataset containing the vortex model data for GreenSurge. - xds_vortex_interp : xr.Dataset - Dataset containing the interpolated vortex model data. - ds_GFD_info : xr.Dataset - Dataset containing the GreenSurge forcing information. - i_time : int, optional - Index of the time step to plot. Default is 0. - figsize : tuple, optional - Figure size. Default is (10, 8). - """ - - simple_quiver = 20 - scale = 30 - width = 0.003 - - fig, (ax1, ax2) = plt.subplots( - 1, - 2, - figsize=figsize, - subplot_kw={"projection": ccrs.PlateCarree()}, - constrained_layout=True, - ) - time = xds_vortex_GS.time.isel(time=i_time) - - ax1.set_title("Vortex wind") - ax2.set_title("Wind partition (GreenSurge)") - - triangle_forcing_connectivity = ds_GFD_info.triangle_forcing_connectivity.values - node_forcing_longitude = ds_GFD_info.node_forcing_longitude.values - node_forcing_latitude = ds_GFD_info.node_forcing_latitude.values - - Lon = xds_vortex_GS.lon - Lat = xds_vortex_GS.lat - if not SWATH: - W = xds_vortex_interp.W.isel(time=i_time) - Dir = (270 - xds_vortex_interp.Dir.isel(time=i_time)) % 360 - W_reg = xds_vortex_GS.W.isel(time=i_time) - Dir_reg = (270 - xds_vortex_GS.Dir.isel(time=i_time)) % 360 - fig.suptitle( - f"Wind partition for {time.values.astype('datetime64[s]').astype(str)}", - ) - else: - W = xds_vortex_interp.W.max(dim="time") - W_reg = xds_vortex_GS.W.max(dim="time") - fig.suptitle( - "Wind partition SWATH", - ) - vmin = np.min((W.min(), W_reg.min())) - vmax = np.max((W.max(), W_reg.max())) - - cmap = join_colormaps( - cmap1="viridis", - cmap2="plasma_r", - name="wind_partition_cmap", - range1=(0.2, 1.0), - range2=(0.05, 0.8), - ) - - ax2.tripcolor( - node_forcing_longitude, - node_forcing_latitude, - triangle_forcing_connectivity, - facecolors=W, - cmap=cmap, - vmin=vmin, - vmax=vmax, - edgecolor="white", - shading="flat", - transform=ccrs.PlateCarree(), - ) - - pm1 = ax1.pcolormesh( - Lon, - Lat, - W_reg, - cmap=cmap, - vmin=vmin, - vmax=vmax, - shading="auto", - transform=ccrs.PlateCarree(), - ) - cbar = fig.colorbar( - pm1, ax=(ax1, ax2), orientation="horizontal", pad=0.03, aspect=50 - ) - cbar.set_label( - "{0} ({1})".format("Wind", "m.s⁻¹"), - rotation=0, - va="bottom", - fontweight="bold", - labelpad=15, - ) - if not SWATH: - ax2.quiver( - np.mean(node_forcing_longitude[triangle_forcing_connectivity], axis=1), - np.mean(node_forcing_latitude[triangle_forcing_connectivity], axis=1), - np.cos(np.deg2rad(Dir)), - np.sin(np.deg2rad(Dir)), - color="black", - scale=scale, - width=width, - transform=ccrs.PlateCarree(), - ) - - ax1.quiver( - Lon[::simple_quiver], - Lat[::simple_quiver], - (np.cos(np.deg2rad(Dir_reg)))[::simple_quiver, ::simple_quiver], - (np.sin(np.deg2rad(Dir_reg)))[::simple_quiver, ::simple_quiver], - color="black", - scale=scale, - width=width, - transform=ccrs.PlateCarree(), - ) - - ax1.set_extent([Lon.min(), Lon.max(), Lat.min(), Lat.max()], crs=ccrs.PlateCarree()) - ax2.set_extent([Lon.min(), Lon.max(), Lat.min(), Lat.max()], crs=ccrs.PlateCarree()) - - ax1.coastlines() - ax2.coastlines() - - -def plot_greensurge_setup( - info_ds: xr.Dataset, figsize: tuple = (10, 10), fig: Figure = None, ax: Axes = None -) -> Tuple[Figure, Axes]: - """ - Plot the GreenSurge mesh setup from the provided dataset. - - Parameters - ---------- - info_ds : xr.Dataset - Dataset containing the mesh information. - figsize : tuple, optional - Figure size. Default is (10, 10). - fig : Figure, optional - Figure object to plot on. If None, a new figure will be created. - ax : Axes, optional - Axes object to plot on. If None, a new axes will be created. - - Returns - ------- - fig : Figure - Figure object. - ax : Axes - Axes object. - """ - - # Extract data from the dataset - connectivity = info_ds.triangle_forcing_connectivity.values - node_forcing_longitude = info_ds.node_forcing_longitude.values - node_forcing_latitude = info_ds.node_forcing_latitude.values - node_computation_longitude = info_ds.node_computation_longitude.values - node_computation_latitude = info_ds.node_computation_latitude.values - - num_elements = len(connectivity) - if fig is None or ax is None: - fig, ax = plt.subplots( - subplot_kw={"projection": ccrs.PlateCarree()}, - figsize=figsize, - constrained_layout=True, - ) - - ax.triplot( - node_computation_longitude, - node_computation_latitude, - info_ds.triangle_computation_connectivity.values, - color="grey", - linestyle="-", - marker="", - linewidth=0.5, - label="Computational mesh", - ) - ax.triplot( - node_forcing_longitude, - node_forcing_latitude, - connectivity, - color="green", - linestyle="-", - marker="", - linewidth=1, - label=f"Forcing mesh ({num_elements} elements)", - ) - - bnd = [ - min(node_computation_longitude.min(), node_forcing_longitude.min()), - max(node_computation_longitude.max(), node_forcing_longitude.max()), - min(node_computation_latitude.min(), node_forcing_latitude.min()), - max(node_computation_latitude.max(), node_forcing_latitude.max()), - ] - ax.set_extent([*bnd], crs=ccrs.PlateCarree()) - plt.legend(loc="lower left", fontsize=10, markerscale=2.0) - ax.set_title("GreenSurge Mesh Setup") - - return fig, ax - - -def create_triangle_mask_from_points( - lon: np.ndarray, lat: np.ndarray, triangle: np.ndarray -) -> np.ndarray: - """ - Create a mask indicating which scattered points are inside a triangle. - - Parameters - ---------- - lon : np.ndarray - 1D array of longitudes of the points. - lat : np.ndarray - 1D array of latitudes of the points. - triangle : np.ndarray - (3, 2) array containing the triangle vertices as (lon, lat) pairs. - - Returns - ------- - mask : np.ndarray - 1D boolean array of same length as lon/lat indicating points inside the triangle. - """ - - points = np.column_stack((lon, lat)) # Shape (N, 2) - triangle_path = Path(triangle) - mask = triangle_path.contains_points(points) - - return mask - - -def plot_GS_vs_dynamic_windsetup_swath( - ds_WL_GS_WindSetUp: xr.Dataset, - ds_WL_dynamic_WindSetUp: xr.Dataset, - ds_gfd_metadata: xr.Dataset, - vmin: float = None, - vmax: float = None, - figsize: tuple = (10, 8), -) -> None: - """ - Plot the GreenSurge and dynamic wind setup from the provided datasets. - - Parameters - ---------- - ds_WL_GS_WindSetUp : xr.Dataset - Dataset containing the GreenSurge wind setup data. - ds_WL_dynamic_WindSetUp : xr.Dataset - Dataset containing the dynamic wind setup data. - ds_gfd_metadata : xr.Dataset - Dataset containing the metadata for the GFD mesh. - vmin : float, optional - Minimum value for the color scale. Default is None. - vmax : float, optional - Maximum value for the color scale. Default is None. - figsize : tuple, optional - Figure size. Default is (10, 8). - Returns - ------- - fig : Figure - The figure object containing the plots. - axs : list of Axes - List of Axes objects for the two subplots. - """ - - warnings.filterwarnings("ignore", message="All-NaN slice encountered") - - X = ds_gfd_metadata.node_computation_longitude.values - Y = ds_gfd_metadata.node_computation_latitude.values - triangles = ds_gfd_metadata.triangle_computation_connectivity.values - - Longitude_dynamic = ds_WL_dynamic_WindSetUp.mesh2d_node_x.values - Latitude_dynamic = ds_WL_dynamic_WindSetUp.mesh2d_node_y.values - - xds_GS = np.nanmax(ds_WL_GS_WindSetUp["WL"].values, axis=0) - xds_DY = np.nanmax(ds_WL_dynamic_WindSetUp["mesh2d_s1"].values, axis=0) - - if vmin is None: - vmin = 0 - if vmax is None: - vmax = float(np.nanmax(xds_GS)) - - fig, axs = plt.subplots( - nrows=1, - ncols=2, - figsize=figsize, - subplot_kw={"projection": ccrs.PlateCarree()}, - constrained_layout=True, - ) - - axs[0].tripcolor( - Longitude_dynamic, - Latitude_dynamic, - ds_WL_dynamic_WindSetUp.mesh2d_face_nodes.values - 1, - facecolors=xds_DY, - cmap="CMRmap_r", - vmin=vmin, - vmax=vmax, - transform=ccrs.PlateCarree(), - ) - - pm = axs[1].tripcolor( - X, - Y, - triangles, - facecolors=xds_GS, - cmap="CMRmap_r", - vmin=vmin, - vmax=vmax, - transform=ccrs.PlateCarree(), - ) - cbar = fig.colorbar(pm, ax=axs, orientation="horizontal", pad=0.03, aspect=50) - cbar.set_label( - "WL ({})".format("m"), rotation=0, va="bottom", fontweight="bold", labelpad=15 - ) - fig.suptitle("SWATH", fontsize=18, fontweight="bold") - - axs[0].set_title("Dynamic Wind SetUp", fontsize=14) - axs[1].set_title("GreenSurge Wind SetUp", fontsize=14) - - lon_min = np.nanmin(Longitude_dynamic) - lon_max = np.nanmax(Longitude_dynamic) - lat_min = np.nanmin(Latitude_dynamic) - lat_max = np.nanmax(Latitude_dynamic) - - for ax in axs: - ax.set_extent([lon_min, lon_max, lat_min, lat_max]) - return fig, axs - - -def plot_GS_vs_dynamic_windsetup( - ds_WL_GS_WindSetUp: xr.Dataset, - ds_WL_dynamic_WindSetUp: xr.Dataset, - ds_gfd_metadata: xr.Dataset, - time: datetime, - vmin: float = None, - vmax: float = None, - figsize: tuple = (10, 8), -) -> None: - """ - Plot the GreenSurge and dynamic wind setup from the provided datasets. - - Parameters - ---------- - ds_WL_GS_WindSetUp: xr.Dataset - xarray Dataset containing the GreenSurge wind setup data. - ds_WL_dynamic_WindSetUp: xr.Dataset - xarray Dataset containing the dynamic wind setup data. - ds_gfd_metadata: xr.Dataset - xarray Dataset containing the metadata for the GFD mesh. - time: datetime.datetime - The time point at which to plot the data. - vmin: float, optional - Minimum value for the color scale. Default is None. - vmax: float, optional - Maximum value for the color scale. Default is None. - figsize: tuple, optional - Tuple specifying the figure size. Default is (10, 8). - """ - - warnings.filterwarnings("ignore", message="All-NaN slice encountered") - - X = ds_gfd_metadata.node_computation_longitude.values - Y = ds_gfd_metadata.node_computation_latitude.values - triangles = ds_gfd_metadata.triangle_computation_connectivity.values - - Longitude_dynamic = ds_WL_dynamic_WindSetUp.mesh2d_node_x.values - Latitude_dynamic = ds_WL_dynamic_WindSetUp.mesh2d_node_y.values - - xds_GS = ds_WL_GS_WindSetUp["WL"].sel(time=time).values - xds_DY = ds_WL_dynamic_WindSetUp["mesh2d_s1"].sel(time=time).values - if vmin is None or vmax is None: - vmax = float(np.nanmax(xds_GS)) * 0.5 - vmin = -vmax - - fig, axs = plt.subplots( - nrows=1, - ncols=2, - figsize=figsize, - subplot_kw={"projection": ccrs.PlateCarree()}, - constrained_layout=True, - ) - - axs[0].tripcolor( - Longitude_dynamic, - Latitude_dynamic, - ds_WL_dynamic_WindSetUp.mesh2d_face_nodes.values - 1, - facecolors=xds_DY, - cmap="bwr", - vmin=vmin, - vmax=vmax, - transform=ccrs.PlateCarree(), - ) - - pm = axs[1].tripcolor( - X, - Y, - triangles, - facecolors=xds_GS, - cmap="bwr", - vmin=vmin, - vmax=vmax, - transform=ccrs.PlateCarree(), - ) - cbar = fig.colorbar(pm, ax=axs, orientation="horizontal", pad=0.03, aspect=50) - cbar.set_label( - "WL ({})".format("m"), rotation=0, va="bottom", fontweight="bold", labelpad=15 - ) - fig.suptitle( - f"Wind SetUp for {time.astype('datetime64[s]').astype(str)}", - fontsize=16, - fontweight="bold", - ) - - axs[0].set_title("Dynamic Wind SetUp") - axs[1].set_title("GreenSurge Wind SetUp") - - lon_min = np.nanmin(Longitude_dynamic) - lon_max = np.nanmax(Longitude_dynamic) - lat_min = np.nanmin(Latitude_dynamic) - lat_max = np.nanmax(Latitude_dynamic) - for ax in axs: - ax.set_extent([lon_min, lon_max, lat_min, lat_max]) - - -def plot_GS_TG_validation_timeseries( - ds_WL_GS_WindSetUp: xr.Dataset, - ds_WL_GS_IB: xr.Dataset, - ds_WL_dynamic_WindSetUp: xr.Dataset, - tide_gauge: xr.Dataset, - ds_GFD_info: xr.Dataset, - figsize: tuple = (20, 7), - WLmin: float = None, - WLmax: float = None, -) -> None: - """ - Plot a time series comparison of GreenSurge, dynamic wind setup, and tide gauge data with a bathymetry map. - - Parameters - ---------- - ds_WL_GS_WindSetUp : xr.Dataset - Dataset containing GreenSurge wind setup data with dimensions (nface, time). - ds_WL_GS_IB : xr.Dataset - Dataset containing inverse barometer data with dimensions (lat, lon, time). - ds_WL_dynamic_WindSetUp : xr.Dataset - Dataset containing dynamic wind setup data with dimensions (mesh2d_nFaces, time). - tide_gauge : xr.Dataset - Dataset containing tide gauge data with dimensions (time). - ds_GFD_info : xr.Dataset - Dataset containing grid information with longitude and latitude coordinates. - figsize : tuple, optional - Size of the figure for the plot. Default is (15, 7). - WLmin : float, optional - Minimum water level for the plot. Default is None. - WLmax : float, optional - Maximum water level for the plot. Default is None. - """ - - lon_obs = tide_gauge.lon.values - lat_obs = tide_gauge.lat.values - lon_obs = np.where(lon_obs > 180, lon_obs - 360, lon_obs) - - nface_index = extract_pos_nearest_points_tri(ds_GFD_info, lon_obs, lat_obs) - mesh2d_nFaces = extract_pos_nearest_points_tri( - ds_WL_dynamic_WindSetUp, lon_obs, lat_obs - ) - pos_lon_IB, pos_lat_IB = extract_pos_nearest_points(ds_WL_GS_IB, lon_obs, lat_obs) - - time = ds_WL_GS_WindSetUp.WL.time - ds_WL_dynamic_WindSetUp = ds_WL_dynamic_WindSetUp.sel(time=time) - ds_WL_GS_IB = ds_WL_GS_IB.interp(time=time) - - WL_GS = ds_WL_GS_WindSetUp.WL.sel(nface=nface_index).values.squeeze() - WL_dyn = ds_WL_dynamic_WindSetUp.mesh2d_s1.sel( - mesh2d_nFaces=mesh2d_nFaces - ).values.squeeze() - WL_IB = ds_WL_GS_IB.IB.values[pos_lat_IB, pos_lon_IB, :].squeeze() - WL_TG = tide_gauge.SS.values - - WL_SS_dyn = WL_dyn + WL_IB - WL_SS_GS = WL_GS + WL_IB - - fig = plt.figure(figsize=figsize) - gs = gridspec.GridSpec(1, 2, width_ratios=[1, 3], figure=fig) - - ax_map = fig.add_subplot(gs[0], projection=ccrs.PlateCarree()) - X = ds_WL_dynamic_WindSetUp.mesh2d_node_x.values - Y = ds_WL_dynamic_WindSetUp.mesh2d_node_y.values - triangles = ds_WL_dynamic_WindSetUp.mesh2d_face_nodes.values.astype(int) - 1 - Z = np.mean(ds_WL_dynamic_WindSetUp.mesh2d_node_z.values[triangles], axis=1) - vmin = np.nanmin(Z) - vmax = np.nanmax(Z) - - cmap, norm = join_colormaps( - cmap1=hex_colors_water, - cmap2=hex_colors_land, - value_range1=(vmin, 0.0), - value_range2=(0.0, vmax), - name="raster_cmap", - ) - ax_map.set_facecolor("#518134") - - ax_map.tripcolor( - X, - Y, - triangles, - facecolors=Z, - cmap=cmap, - norm=norm, - shading="flat", - transform=ccrs.PlateCarree(), - ) - - ax_map.scatter( - lon_obs, - lat_obs, - color="red", - marker="x", - transform=ccrs.PlateCarree(), - label="Tide Gauge", - ) - ax_map.set_extent([X.min(), X.max(), Y.min(), Y.max()], crs=ccrs.PlateCarree()) - ax_map.set_title("Bathymetry Map") - ax_map.legend(loc="upper right", fontsize="small") - - ax_ts = fig.add_subplot(gs[1]) - time_vals = time.values - ax_ts.plot(time_vals, WL_SS_dyn, c="blue", label="Dynamic simulation") - ax_ts.plot(time_vals, WL_SS_GS, c="tomato", label="GreenSurge") - ax_ts.plot(tide_gauge.time.values, WL_TG, c="green", label="Tide Gauge") - ax_ts.plot(time_vals, WL_GS, c="grey", label="GS WindSetup") - ax_ts.plot(time_vals, WL_IB, c="black", label="Inverse Barometer") - - if WLmin is None or WLmax is None: - WLmax = ( - max( - np.nanmax(WL_SS_dyn), - np.nanmax(WL_SS_GS), - np.nanmax(WL_TG), - np.nanmax(WL_GS), - ) - * 1.05 - ) - WLmin = ( - min( - np.nanmin(WL_SS_dyn), - np.nanmin(WL_SS_GS), - np.nanmin(WL_TG), - np.nanmin(WL_GS), - ) - * 1.05 - ) - - ax_ts.set_xlim(time_vals[0], time_vals[-1]) - ax_ts.set_ylim(WLmin, WLmax) - ax_ts.set_ylabel("Water Level (m)") - ax_ts.set_title("Tide Gauge Validation") - ax_ts.legend() - - plt.tight_layout() - plt.show() - - -def extract_pos_nearest_points_tri( - ds_mesh_info: xr.Dataset, lon_points: np.ndarray, lat_points: np.ndarray -) -> np.ndarray: - """ - Extract the nearest triangle index for given longitude and latitude points. - - Parameters - ---------- - ds_mesh_info : xr.Dataset - Dataset containing mesh information with longitude and latitude coordinates. - lon_points : np.ndarray - Array of longitudes for which to find the nearest triangle index. - lat_points : np.ndarray - Array of latitudes for which to find the nearest triangle index. - - Returns - ------- - np.ndarray - Array of nearest triangle indices corresponding to the input longitude and latitude points. - """ - - if "node_forcing_latitude" in ds_mesh_info.variables: - lon_mesh = ds_mesh_info.node_computation_longitude.values - lat_mesh = ds_mesh_info.node_computation_latitude.values - type_ds = 0 - else: - lon_mesh = ds_mesh_info.mesh2d_face_x.values - lat_mesh = ds_mesh_info.mesh2d_face_y.values - type_ds = 1 - - nface_index = [] - - for i in range(len(lon_points)): - lon = lon_points[i] - lat = lat_points[i] - - distances = np.sqrt((lon_mesh - lon) ** 2 + (lat_mesh - lat) ** 2) - min_idx = np.argmin(distances) - - if type_ds == 0: - nface_index.append( - ds_mesh_info.node_cumputation_index.values[min_idx].astype(int) - ) - elif type_ds == 1: - nface_index.append(ds_mesh_info.mesh2d_nFaces.values[min_idx].astype(int)) - - return nface_index - - -def extract_pos_nearest_points( - ds_mesh_info: xr.Dataset, lon_points: np.ndarray, lat_points: np.ndarray -) -> Tuple[np.ndarray, np.ndarray]: - """ - Extract the nearest point indices for given longitude and latitude points in a mesh dataset. - - Parameters - ---------- - ds_mesh_info : xr.Dataset - Dataset containing mesh information with longitude and latitude coordinates. - lon_points : np.ndarray - Array of longitudes for which to find the nearest point indices. - lat_points : np.ndarray - Array of latitudes for which to find the nearest point indices. - - Returns - ------- - pos_lon_points_mesh : np.ndarray - Array of longitude indices corresponding to the input longitude points in the mesh. - pos_lat_points_mesh : np.ndarray - Array of latitude indices corresponding to the input latitude points in the mesh. - """ - - lon_mesh = ds_mesh_info.lon.values - lat_mesh = ds_mesh_info.lat.values - - pos_lon_points_mesh = [] - pos_lat_points_mesh = [] - - for i in range(len(lon_points)): - lon = lon_points[i] - lat = lat_points[i] - - lat_index = np.nanargmin((lat - lat_mesh) ** 2) - lon_index = np.nanargmin((lon - lon_mesh) ** 2) - - pos_lon_points_mesh.append(lon_index.astype(int)) - pos_lat_points_mesh.append(lat_index.astype(int)) - - return pos_lon_points_mesh, pos_lat_points_mesh - - -def pressure_to_IB(xds_presure: xr.Dataset) -> xr.Dataset: - """ - Convert pressure data in a dataset to inverse barometer (IB) values. - - Parameters - ---------- - xds_presure : xr.Dataset - Dataset containing pressure data with dimensions (lat, lon, time). - - Returns - ------- - xr.Dataset - Dataset with an additional variable 'IB' representing the inverse barometer values. - """ - - p = xds_presure.p.values - IB = (101325 - p) / 10000 # Convert pressure (Pa) to inverse barometer (m) - - xds_presure_modified = xds_presure.copy() - xds_presure_modified["IB"] = (("lat", "lon", "time"), IB) - - return xds_presure_modified - - -def build_greensurge_infos_dataset( - path_grd_calc: str, - path_grd_forz: str, - site: str, - wind_speed: float, - direction_step: float, - simulation_duration_hours: float, - simulation_time_step_hours: float, - forcing_time_step: float, - reference_date_dt: datetime, - Eddy: float, - Chezy: float, -) -> xr.Dataset: - """ - Build a structured dataset containing simulation parameters for hybrid modeling. - - Parameters - ---------- - path_grd_calc : str - Path to the computational grid file. - path_grd_forz : str - Path to the forcing grid file. - site : str - Name of the case study location. - wind_speed : float - Wind speed for each discretized direction. - direction_step : float - Step size for wind direction discretization in degrees. - simulation_duration_hours : float - Total duration of the simulation in hours. - simulation_time_step_hours : float - Time step used in the simulation in hours. - forcing_time_step : float - Time step used for applying external forcing data in hours. - reference_date_dt : datetime - Reference start date of the simulation. - Eddy : float - Eddy viscosity used in the simulation in m²/s. - Chezy : float - Chezy coefficient used for bottom friction. - Returns - ------- - xr.Dataset - A structured dataset containing simulation parameters for hybrid modeling. - """ - - Nodes_calc, Elmts_calc, _ = read_adcirc_grd(path_grd_calc) - Nodes_forz, Elmts_forz, _ = read_adcirc_grd(path_grd_forz) - - num_elements = Elmts_forz.shape[0] - - triangle_node_indices = np.arange(3) - - num_directions = int(360 / direction_step) - wind_directions = np.arange(0, 360, direction_step) - wind_direction_indices = np.arange(0, num_directions) - - element_forcing_indices = np.arange(0, num_elements) - element_computation_indices = np.arange(0, len(Elmts_calc[:, 1])) - - node_forcing_indices = np.arange(0, len(Nodes_forz[:, 1])) - - time_forcing_index = [ - 0, - forcing_time_step, - forcing_time_step + 0.001, - simulation_duration_hours - 1, - ] - - node_cumputation_index = np.arange(0, len(Nodes_calc[:, 1])) - - reference_date_str = reference_date_dt.strftime("%Y-%m-%d %H:%M:%S") - - simulation_dataset = xr.Dataset( - coords=dict( - wind_direction_index=("wind_direction_index", wind_direction_indices), - time_forcing_index=("time_forcing_index", time_forcing_index), - node_computation_longitude=("node_cumputation_index", Nodes_calc[:, 1]), - node_computation_latitude=("node_cumputation_index", Nodes_calc[:, 2]), - triangle_nodes=("triangle_forcing_nodes", triangle_node_indices), - node_forcing_index=("node_forcing_index", node_forcing_indices), - element_forcing_index=("element_forcing_index", element_forcing_indices), - node_cumputation_index=("node_cumputation_index", node_cumputation_index), - element_computation_index=( - "element_computation_index", - element_computation_indices, - ), - ), - data_vars=dict( - triangle_computation_connectivity=( - ("element_computation_index", "triangle_forcing_nodes"), - Elmts_calc[:, 2:5].astype(int), - { - "description": "Indices of nodes forming each triangular element of the computational grid (counter-clockwise order)" - }, - ), - node_forcing_longitude=( - "node_forcing_index", - Nodes_forz[:, 1], - { - "units": "degrees_east", - "description": "Longitude of each mesh node of the forcing grid", - }, - ), - node_forcing_latitude=( - "node_forcing_index", - Nodes_forz[:, 2], - { - "units": "degrees_north", - "description": "Latitude of each mesh node of the forcing grid", - }, - ), - triangle_forcing_connectivity=( - ("element_forcing_index", "triangle_forcing_nodes"), - Elmts_forz[:, 2:5].astype(int), - { - "description": "Indices of nodes forming each triangular element of the forcing grid (counter-clockwise order)" - }, - ), - wind_directions=( - "wind_direction_index", - wind_directions, - { - "units": "degrees", - "description": "Discretized wind directions (0 to 360°)", - }, - ), - total_elements=( - (), - num_elements, - {"description": "Total number of triangular elements in the mesh"}, - ), - simulation_duration_hours=( - (), - simulation_duration_hours, - {"units": "hours", "description": "Total duration of the simulation"}, - ), - time_step_hours=( - (), - simulation_time_step_hours, - {"units": "hours", "description": "Time step used in the simulation"}, - ), - wind_speed=( - (), - wind_speed, - { - "units": "m/s", - "description": "Wind speed for each discretized direction", - }, - ), - location_name=((), site, {"description": "Name of case study location"}), - eddy_viscosity=( - (), - Eddy, - { - "units": "m²/s", - "description": "Eddy viscosity used in the simulation", - }, - ), - chezy_coefficient=( - (), - Chezy, - {"description": "Chezy coefficient used for bottom friction"}, - ), - reference_date=( - (), - reference_date_str, - {"description": "Reference start date of the simulation"}, - ), - forcing_time_step=( - (), - forcing_time_step, - { - "units": "hour", - "description": "Time step used for applying external forcing data", - }, - ), - ), - ) - - simulation_dataset["time_forcing_index"].attrs = { - "standard_name": "time", - "long_name": f"Time - hours since {reference_date_str} +00:00", - "time_origin": f"{reference_date_str}", - "units": f"hours since {reference_date_str} +00:00", - "calendar": "gregorian", - "description": "Time definition for the forcing data", - } - - simulation_dataset["node_computation_longitude"].attrs = { - "description": "Longitude of each mesh node of the computational grid", - "standard_name": "longitude", - "long_name": "longitude", - "units": "degrees_east", - } - simulation_dataset["node_computation_latitude"].attrs = { - "description": "Latitude of each mesh node of the computational grid", - "standard_name": "latitude", - "long_name": "latitude", - "units": "degrees_north", - } - - simulation_dataset.attrs = { - "title": "Hybrid Simulation Input Dataset", - "description": "Structured dataset containing simulation parameters for hybrid modeling.", - "created": datetime.now().strftime("%Y-%m-%d %H:%M:%S"), - "institution": "GeoOcean", - "model": "GreenSurge", - } - return simulation_dataset - - -def plot_greensurge_setup_with_raster( - simulation_dataset, - path_grd_calc, - figsize=(7, 7), -) -> None: - """ - Plot the GreenSurge setup with raster bathymetry. - - Parameters - ---------- - simulation_dataset : xr.Dataset - Dataset containing simulation information. - path_grd_calc : str - Path to the ADCIRC grid file for calculation. - figsize : tuple, optional - Size of the figure, by default (7, 7) - Returns - ------- - None - ----------- - This function plots the GreenSurge setup using raster bathymetry - and the ADCIRC grid for calculation. It uses Cartopy for geographic - projections and matplotlib for plotting. - """ - - Nodes_calc, Elmts_calc, _ = read_adcirc_grd(path_grd_calc) - - fig, ax = plt.subplots( - subplot_kw={"projection": ccrs.PlateCarree()}, - figsize=figsize, - constrained_layout=True, - ) - - Longitude_nodes_calc = Nodes_calc[:, 1] - Latitude_nodes_calc = Nodes_calc[:, 2] - Elements_calc = Elmts_calc[:, 2:5].astype(int) - depth = -np.mean(Nodes_calc[Elements_calc, 3], axis=1) - - vmin = np.nanmin(depth) - vmax = np.nanmax(depth) - - cmap, norm = join_colormaps( - cmap1=hex_colors_water, - cmap2=hex_colors_land, - value_range1=(vmin, 0.0), - value_range2=(0.0, vmax), - name="raster_cmap", - ) - - tpc = ax.tripcolor( - Longitude_nodes_calc, - Latitude_nodes_calc, - Elements_calc, - facecolors=depth, - cmap=cmap, - norm=norm, - shading="flat", - transform=ccrs.PlateCarree(), - ) - cbar = plt.colorbar(tpc, ax=ax) - cbar.set_label("Depth (m)") - - plot_greensurge_setup(simulation_dataset, figsize=(7, 7), ax=ax, fig=fig) - - -def interp_vortex_to_triangles( - xds_vortex_GS: xr.Dataset, - lon_all: np.ndarray, - lat_all: np.ndarray, - method: str = "tri_mean", -) -> xr.Dataset: - """ - Interpolate vortex model data to triangle points. - - Parameters - ---------- - xds_vortex_GS : xr.Dataset - Dataset containing the vortex model data. - lon_all : np.ndarray - Longitudes of the triangle points. - lat_all : np.ndarray - Latitudes of the triangle points. - method : str, optional - Interpolation method: "tri_mean" (default) or "tri_points". - - Returns - ------- - xr.Dataset - Dataset containing the interpolated vortex model data at the triangle points. - - Notes - ----- - This function interpolates vortex model data (wind speed, direction, and pressure) - to the triangle points defined by `lon_all` and `lat_all`. It reshapes the data - to match the number of triangles and points, and computes the mean values for each triangle. - """ - if method == "tri_mean": - n_tri, n_pts = lat_all.shape - lat_interp = lat_all.reshape(-1) - lon_interp = lon_all.reshape(-1) - elif method == "tri_points": - n_tri = lat_all.shape - lat_interp = lat_all - lon_interp = lon_all - - lat_interp = xr.DataArray(lat_interp, dims="point") - lon_interp = xr.DataArray(lon_interp, dims="point") - - if method == "tri_mean": - W_interp = xds_vortex_GS.W.interp(lat=lat_interp, lon=lon_interp) - Dir_interp = xds_vortex_GS.Dir.interp(lat=lat_interp, lon=lon_interp) - p_interp = xds_vortex_GS.p.interp(lat=lat_interp, lon=lon_interp) - - W_interp = W_interp.values.reshape(n_tri, n_pts, -1) - Dir_interp = Dir_interp.values.reshape(n_tri, n_pts, -1) - p_interp = p_interp.values.reshape(n_tri, n_pts, -1) - - theta_rad = np.deg2rad(Dir_interp) - u = np.cos(theta_rad) - v = np.sin(theta_rad) - u_mean = u.mean(axis=1) - v_mean = v.mean(axis=1) - Dir_out = (np.rad2deg(np.arctan2(v_mean, u_mean))) % 360 - W_out = W_interp.mean(axis=1) - p_out = p_interp.mean(axis=1) - elif method == "tri_points": - return xds_vortex_GS.interp(lat=lat_interp, lon=lon_interp) - - xds_vortex_interp = xr.Dataset( - data_vars={ - "W": (("element", "time"), W_out), - "Dir": (("element", "time"), Dir_out), - "p": (("element", "time"), p_out), - }, - coords={"time": xds_vortex_GS.time.values, "element": np.arange(n_tri)}, - ) - - return xds_vortex_interp - - -def plot_GS_validation_timeseries( - ds_WL_GS_WindSetUp: xr.Dataset, - ds_WL_GS_IB: xr.Dataset, - ds_WL_dynamic_WindSetUp: xr.Dataset, - ds_GFD_info: xr.Dataset, - lon_obs: float = [184.8], - lat_obs: float = [-21.14], - figsize: tuple = (20, 7), - WLmin: float = None, - WLmax: float = None, -) -> None: - """ - Plot a time series comparison of GreenSurge, dynamic wind setup, and tide gauge data with a bathymetry map. - - Parameters - ---------- - ds_WL_GS_WindSetUp : xr.Dataset - Dataset containing GreenSurge wind setup data with dimensions (nface, time). - ds_WL_GS_IB : xr.Dataset - Dataset containing inverse barometer data with dimensions (lat, lon, time). - ds_WL_dynamic_WindSetUp : xr.Dataset - Dataset containing dynamic wind setup data with dimensions (mesh2d_nFaces, time). - ds_GFD_info : xr.Dataset - Dataset containing grid information with longitude and latitude coordinates. - figsize : tuple, optional - Size of the figure for the plot. Default is (15, 7). - WLmin : float, optional - Minimum water level for the plot. Default is None. - WLmax : float, optional - Maximum water level for the plot. Default is None. - """ - - lon_obs = [lon - 360 if lon > 180 else lon for lon in lon_obs] - - nface_index = extract_pos_nearest_points_tri(ds_GFD_info, lon_obs, lat_obs) - mesh2d_nFaces = extract_pos_nearest_points_tri( - ds_WL_dynamic_WindSetUp, lon_obs, lat_obs - ) - pos_lon_IB, pos_lat_IB = extract_pos_nearest_points(ds_WL_GS_IB, lon_obs, lat_obs) - - time = ds_WL_GS_WindSetUp.WL.time - ds_WL_dynamic_WindSetUp = ds_WL_dynamic_WindSetUp.sel(time=time) - ds_WL_GS_IB = ds_WL_GS_IB.interp(time=time) - - WL_GS = ds_WL_GS_WindSetUp.WL.sel(nface=nface_index).values - WL_dyn = ds_WL_dynamic_WindSetUp.mesh2d_s1.sel(mesh2d_nFaces=mesh2d_nFaces).values - WL_IB = ds_WL_GS_IB.IB.values[pos_lat_IB, pos_lon_IB, :].T - - WL_SS_dyn = WL_dyn + WL_IB - WL_SS_GS = WL_GS + WL_IB - - fig = plt.figure(figsize=figsize) - gs = gridspec.GridSpec(1, 2, width_ratios=[1, 3], figure=fig) - - ax_map = fig.add_subplot(gs[0], projection=ccrs.PlateCarree()) - X = ds_WL_dynamic_WindSetUp.mesh2d_node_x.values - Y = ds_WL_dynamic_WindSetUp.mesh2d_node_y.values - triangles = ds_WL_dynamic_WindSetUp.mesh2d_face_nodes.values.astype(int) - 1 - Z = np.mean(ds_WL_dynamic_WindSetUp.mesh2d_node_z.values[triangles], axis=1) - vmin = np.nanmin(Z) - vmax = np.nanmax(Z) - - cmap, norm = join_colormaps( - cmap1=hex_colors_water, - cmap2=hex_colors_land, - value_range1=(vmin, 0.0), - value_range2=(0.0, vmax), - name="raster_cmap", - ) - ax_map.set_facecolor("#518134") - - ax_map.tripcolor( - X, - Y, - triangles, - facecolors=Z, - cmap=cmap, - norm=norm, - shading="flat", - transform=ccrs.PlateCarree(), - ) - - ax_map.scatter( - lon_obs, - lat_obs, - color="red", - marker="x", - transform=ccrs.PlateCarree(), - label="Observation Point", - ) - - ax_map.set_extent([X.min(), X.max(), Y.min(), Y.max()], crs=ccrs.PlateCarree()) - ax_map.set_title("Bathymetry Map") - ax_map.legend(loc="upper left", fontsize="small") - time_vals = time.values - n_series = len(lon_obs) - ax_ts = gridspec.GridSpecFromSubplotSpec( - n_series, 1, subplot_spec=gs[0, 1], hspace=0.3 - ) - auto_limits = WLmin is None or WLmax is None - - axes_right = [] - for i in range(n_series): - ax_map.text( - lon_obs[i], - lat_obs[i], - f"Point {i + 1}", - color="k", - fontsize=10, - transform=ccrs.PlateCarree(), - ha="left", - va="bottom", - ) - ax = fig.add_subplot(ax_ts[i, 0]) - ax.plot( - time_vals, - WL_SS_dyn[:, i], - c="blue", - label=f"Dynamic simulation Point {i + 1}", - ) - ax.plot( - time_vals, WL_SS_GS[:, i], c="tomato", label=f"GreenSurge Point {i + 1}" - ) - ax.set_ylabel("Water level (m)") - ax.legend() - if i != n_series - 1: - ax.set_xticklabels([]) - if auto_limits: - WLmax = ( - max( - np.nanmax(WL_SS_dyn[:, i]), - np.nanmax(WL_SS_GS[:, i]), - np.nanmax(WL_GS[:, i]), - ) - * 1.05 - ) - WLmin = ( - min( - np.nanmin(WL_SS_dyn[:, i]), - np.nanmin(WL_SS_GS[:, i]), - np.nanmin(WL_GS[:, i]), - ) - * 1.05 - ) - ax.set_ylim(WLmin, WLmax) - ax.set_xlim(time_vals[0], time_vals[-1]) - axes_right.append(ax) - axes_right[0].set_title("Tide Gauge Validation") - - plt.tight_layout() - plt.show() - - -@lru_cache(maxsize=256) -def read_raw_with_header(raw_path: str) -> np.ndarray: - """ - Read a .raw file with a 256-byte header and return a numpy float32 array. - - Parameters - ---------- - raw_path : str - Path to the .raw file. - - Returns - ------- - np.ndarray - The data array reshaped according to the header dimensions. - """ - with open(raw_path, "rb") as f: - header_bytes = f.read(256) - dims = list(struct.unpack("4i", header_bytes[:16])) - dims = [d for d in dims if d > 0] - if len(dims) == 0: - raise ValueError(f"{raw_path}: invalid header, no dimension > 0 found") - data = np.fromfile(f, dtype=np.float32) - expected_size = np.prod(dims) - if data.size != expected_size: - raise ValueError( - f"{raw_path}: size mismatch (data={data.size}, expected={expected_size}, shape={dims})" - ) - return np.reshape(data, dims) - - -def greensurge_wind_setup_reconstruction_raw( - greensurge_dataset: str, - ds_GFD_info_update: xr.Dataset, - xds_vortex_interp: xr.Dataset, -) -> xr.Dataset: - """ - Compute the GreenSurge wind contribution and return an xarray Dataset with the results. - - Parameters - ---------- - greensurge_dataset : str - Path to the GreenSurge dataset directory. - ds_GFD_info_update : xr.Dataset - Updated GreenSurge information dataset. - xds_vortex_interp : xr.Dataset - Interpolated vortex dataset. - - Returns - ------- - xr.Dataset - Dataset containing the GreenSurge wind setup contribution. - """ - - ds_gfd_metadata = ds_GFD_info_update - wind_direction_input = xds_vortex_interp - velocity_thresholds = np.array([0, 100, 100]) - drag_coefficients = np.array([0.00063, 0.00723, 0.00723]) - - direction_bins = ds_gfd_metadata.wind_directions.values - forcing_cell_indices = ds_gfd_metadata.element_forcing_index.values - wind_speed_reference = ds_gfd_metadata.wind_speed.values.item() - base_drag_coeff = GS_LinearWindDragCoef( - wind_speed_reference, drag_coefficients, velocity_thresholds - ) - time_step_hours = ds_gfd_metadata.time_step_hours.values - - time_start = wind_direction_input.time.values.min() - time_end = wind_direction_input.time.values.max() - duration_in_steps = ( - int((ds_gfd_metadata.simulation_duration_hours.values) / time_step_hours) + 1 - ) - output_time_vector = np.arange( - time_start, time_end, np.timedelta64(int(time_step_hours * 60), "m") - ) - num_output_times = len(output_time_vector) - - direction_data = wind_direction_input.Dir.values - wind_speed_data = wind_direction_input.W.values - - sample_path = ( - f"{greensurge_dataset}/GF_T_0_D_0/dflowfmoutput/GreenSurge_GFDcase_map.raw" - ) - sample_data = read_raw_with_header(sample_path) - n_faces = sample_data.shape[-1] - wind_setup_output = np.zeros((num_output_times, n_faces), dtype=np.float32) - water_level_accumulator = np.zeros(sample_data.shape, dtype=np.float32) - - for time_index in tqdm(range(num_output_times), desc="Processing time steps"): - water_level_accumulator[:] = 0 - for cell_index in forcing_cell_indices.astype(int): - current_dir = direction_data[cell_index, time_index] % 360 - adjusted_bins = np.where(direction_bins == 0, 360, direction_bins) - closest_direction_index = np.abs(adjusted_bins - current_dir).argmin() - - raw_path = f"{greensurge_dataset}/GF_T_{cell_index}_D_{closest_direction_index}/dflowfmoutput/GreenSurge_GFDcase_map.raw" - water_level_case = read_raw_with_header(raw_path) - - water_level_case = np.nan_to_num(water_level_case, nan=0) - - wind_speed_value = wind_speed_data[cell_index, time_index] - drag_coeff_value = GS_LinearWindDragCoef( - wind_speed_value, drag_coefficients, velocity_thresholds - ) - - scaling_factor = (wind_speed_value**2 / wind_speed_reference**2) * ( - drag_coeff_value / base_drag_coeff - ) - water_level_accumulator += water_level_case * scaling_factor - - step_window = min(duration_in_steps, num_output_times - time_index) - if (num_output_times - time_index) > step_window: - wind_setup_output[time_index : time_index + step_window] += ( - water_level_accumulator - ) - else: - shift_counter = step_window - (num_output_times - time_index) - wind_setup_output[ - time_index : time_index + step_window - shift_counter - ] += water_level_accumulator[: step_window - shift_counter] - - ds_wind_setup = xr.Dataset( - {"WL": (["time", "nface"], wind_setup_output)}, - coords={ - "time": output_time_vector, - "nface": np.arange(wind_setup_output.shape[1]), - }, - ) - return ds_wind_setup - - -def build_greensurge_infos_dataset_pymesh2d( - Nodes_calc, - Elmts_calc, - Nodes_forz, - Elmts_forz, - site, - wind_speed, - direction_step, - simulation_duration_hours, - simulation_time_step_hours, - forcing_time_step, - reference_date_dt, - Eddy, - Chezy, -): - """Build a structured dataset for GreenSurge hybrid modeling. - - Parameters - ---------- - Nodes_calc : array - Computational mesh vertices (lon, lat) - Elmts_calc : array - Computational mesh triangles - Nodes_forz : array - Forcing mesh vertices (lon, lat) - Elmts_forz : array - Forcing mesh triangles - site : str - Study site name - wind_speed : float - Wind speed for each direction (m/s) - direction_step : float - Wind direction discretization step (degrees) - simulation_duration_hours : float - Total simulation duration (hours) - simulation_time_step_hours : float - Simulation time step (hours) - forcing_time_step : float - Forcing data time step (hours) - reference_date_dt : datetime - Reference date for simulation - Eddy : float - Eddy viscosity (m2/s) - Chezy : float - Chezy friction coefficient - - Returns - ------- - xr.Dataset - Structured dataset for GreenSurge - """ - num_elements = Elmts_forz.shape[0] - num_directions = int(360 / direction_step) - wind_directions = np.arange(0, 360, direction_step) - reference_date_str = reference_date_dt.strftime("%Y-%m-%d %H:%M:%S") - - time_forcing_index = [ - 0, - forcing_time_step, - forcing_time_step + 0.001, - simulation_duration_hours - 1, - ] - - ds = xr.Dataset( - coords=dict( - wind_direction_index=("wind_direction_index", np.arange(num_directions)), - time_forcing_index=("time_forcing_index", time_forcing_index), - node_computation_longitude=("node_cumputation_index", Nodes_calc[:, 0]), - node_computation_latitude=("node_cumputation_index", Nodes_calc[:, 1]), - triangle_nodes=("triangle_forcing_nodes", np.arange(3)), - node_forcing_index=("node_forcing_index", np.arange(len(Nodes_forz))), - element_forcing_index=("element_forcing_index", np.arange(num_elements)), - node_cumputation_index=( - "node_cumputation_index", - np.arange(len(Nodes_calc)), - ), - element_computation_index=( - "element_computation_index", - np.arange(len(Elmts_calc)), - ), - ), - data_vars=dict( - triangle_computation_connectivity=( - ("element_computation_index", "triangle_forcing_nodes"), - Elmts_calc.astype(int), - {"description": "Computational mesh triangle connectivity"}, - ), - node_forcing_longitude=( - "node_forcing_index", - Nodes_forz[:, 0], - {"units": "degrees_east", "description": "Forcing mesh node longitude"}, - ), - node_forcing_latitude=( - "node_forcing_index", - Nodes_forz[:, 1], - {"units": "degrees_north", "description": "Forcing mesh node latitude"}, - ), - triangle_forcing_connectivity=( - ("element_forcing_index", "triangle_forcing_nodes"), - Elmts_forz.astype(int), - {"description": "Forcing mesh triangle connectivity"}, - ), - wind_directions=( - "wind_direction_index", - wind_directions, - {"units": "degrees", "description": "Discretized wind directions"}, - ), - total_elements=( - (), - num_elements, - {"description": "Number of forcing elements"}, - ), - simulation_duration_hours=( - (), - simulation_duration_hours, - {"units": "hours"}, - ), - time_step_hours=((), simulation_time_step_hours, {"units": "hours"}), - wind_speed=((), wind_speed, {"units": "m/s"}), - location_name=((), site), - eddy_viscosity=((), Eddy, {"units": "m2/s"}), - chezy_coefficient=((), Chezy), - reference_date=((), reference_date_str), - forcing_time_step=((), forcing_time_step, {"units": "hour"}), - ), - attrs={ - "title": "GreenSurge Simulation Input Dataset", - "institution": "GeoOcean", - "model": "GreenSurge", - "created": datetime.now().strftime("%Y-%m-%d %H:%M:%S"), - }, - ) - - # Add coordinate attributes - ds["time_forcing_index"].attrs = { - "standard_name": "time", - "units": f"hours since {reference_date_str} +00:00", - "calendar": "gregorian", - } - ds["node_computation_longitude"].attrs = { - "standard_name": "longitude", - "units": "degrees_east", - } - ds["node_computation_latitude"].attrs = { - "standard_name": "latitude", - "units": "degrees_north", - } - - return ds - - -def point_to_segment_distance_vectorized( - px: np.ndarray, - py: np.ndarray, - ax: float, - ay: float, - bx: float, - by: float, -) -> np.ndarray: - """ - Compute vectorized distance from points (px, py) to segment [A, B]. - - Parameters - ---------- - px, py : np.ndarray - Arrays of point coordinates. - ax, ay : float - Coordinates of segment start point A. - bx, by : float - Coordinates of segment end point B. - - Returns - ------- - np.ndarray - Array of distances from each point to the segment. - """ - ab_x = bx - ax - ab_y = by - ay - ab_len_sq = ab_x**2 + ab_y**2 - - if ab_len_sq == 0: - return np.sqrt((px - ax) ** 2 + (py - ay) ** 2) - - t = np.clip(((px - ax) * ab_x + (py - ay) * ab_y) / ab_len_sq, 0, 1) - closest_x = ax + t * ab_x - closest_y = ay + t * ab_y - - return np.sqrt((px - closest_x) ** 2 + (py - closest_y) ** 2) - - -def generate_structured_points_vectorized( - triangle_connectivity: np.ndarray, - node_lon: np.ndarray, - node_lat: np.ndarray, -) -> Tuple[np.ndarray, np.ndarray]: - """ - Generate structured points for triangles (vectorized version, eliminates Python loop). - - Parameters - ---------- - triangle_connectivity : np.ndarray - Array of shape (n_triangles, 3) with vertex indices. - node_lon, node_lat : np.ndarray - Arrays of node coordinates. - - Returns - ------- - lon_all, lat_all : np.ndarray - Arrays of shape (n_triangles, 10) with structured point coordinates. - """ - A_lon = node_lon[triangle_connectivity[:, 0]] - A_lat = node_lat[triangle_connectivity[:, 0]] - B_lon = node_lon[triangle_connectivity[:, 1]] - B_lat = node_lat[triangle_connectivity[:, 1]] - C_lon = node_lon[triangle_connectivity[:, 2]] - C_lat = node_lat[triangle_connectivity[:, 2]] - - G_lon = (A_lon + B_lon + C_lon) / 3 - G_lat = (A_lat + B_lat + C_lat) / 3 - - M_AB_lon, M_AB_lat = (A_lon + B_lon) / 2, (A_lat + B_lat) / 2 - M_BC_lon, M_BC_lat = (B_lon + C_lon) / 2, (B_lat + C_lat) / 2 - M_CA_lon, M_CA_lat = (C_lon + A_lon) / 2, (C_lat + A_lat) / 2 - M_AG_lon, M_AG_lat = (A_lon + G_lon) / 2, (A_lat + G_lat) / 2 - M_BG_lon, M_BG_lat = (B_lon + G_lon) / 2, (B_lat + G_lat) / 2 - M_CG_lon, M_CG_lat = (C_lon + G_lon) / 2, (C_lat + G_lat) / 2 - - lon_all = np.column_stack( - [ - A_lon, - B_lon, - C_lon, - G_lon, - M_AB_lon, - M_BC_lon, - M_CA_lon, - M_AG_lon, - M_BG_lon, - M_CG_lon, - ] - ) - lat_all = np.column_stack( - [ - A_lat, - B_lat, - C_lat, - G_lat, - M_AB_lat, - M_BC_lat, - M_CA_lat, - M_AG_lat, - M_BG_lat, - M_CG_lat, - ] - ) - - return lon_all, lat_all - - -def GS_wind_partition_tri(ds_GFD_info, xds_vortex): - """ - Interpolate vortex model data to triangle elements using GreenSurge wind partitioning. - Parameters - ---------- - ds_GFD_info : xr.Dataset - Dataset containing GreenSurge grid information. - xds_vortex : xr.Dataset - Dataset containing vortex model data. - - Returns - ------- - xr.Dataset - Dataset containing interpolated vortex model data at triangle elements. - """ - element_forcing_index = ds_GFD_info.element_forcing_index.values - num_element = len(element_forcing_index) - triangle_forcing_connectivity = ds_GFD_info.triangle_forcing_connectivity.values - node_forcing_longitude = ds_GFD_info.node_forcing_longitude.values - node_forcing_latitude = ds_GFD_info.node_forcing_latitude.values - longitude_forcing_cells = node_forcing_longitude[triangle_forcing_connectivity] - latitude_forcing_cells = node_forcing_latitude[triangle_forcing_connectivity] - - # if np.abs(np.mean(lon_grid)-np.mean(lon_teselas))>180: - # lon_teselas = lon_teselas+360 - - # TC_info - time = xds_vortex.time.values - lon_grid = xds_vortex.lon.values - lat_grid = xds_vortex.lat.values - Ntime = len(time) - - # storage - U_tes = np.zeros((num_element, Ntime)) - V_tes = np.zeros((num_element, Ntime)) - p_tes = np.zeros((num_element, Ntime)) - Dir_tes = np.zeros((num_element, Ntime)) - Wspeed_tes = np.zeros((num_element, Ntime)) - - for i in range(Ntime): - W_grid = xds_vortex.W.values[:, :, i] - p_grid = xds_vortex.p.values[:, :, i] - Dir_grid = (270 - xds_vortex.Dir.values[:, :, i]) * np.pi / 180 - - u_sel_t = W_grid * np.cos(Dir_grid) - v_sel_t = W_grid * np.sin(Dir_grid) - - for element in element_forcing_index: - X0, X1, X2 = longitude_forcing_cells[element, :] - Y0, Y1, Y2 = latitude_forcing_cells[element, :] - - triangle = [(X0, Y0), (X1, Y1), (X2, Y2)] - - mask = create_triangle_mask(lon_grid, lat_grid, triangle) - - u_sel = u_sel_t[mask] - v_sel = v_sel_t[mask] - p_sel = p_grid[mask] - - p_mean = np.nanmean(p_sel) - u_mean = np.nanmean(u_sel) - v_mean = np.nanmean(v_sel) - - U_tes[element, i] = u_mean - V_tes[element, i] = v_mean - p_tes[element, i] = p_mean - - Dir_tes[element, i] = get_degrees_from_uv(-u_mean, -v_mean) - Wspeed_tes[element, i] = np.sqrt(u_mean**2 + v_mean**2) - - xds_vortex_interp = xr.Dataset( - data_vars={ - "Dir": (("element", "time"), Dir_tes), - "W": (("element", "time"), Wspeed_tes), - "p": (("element", "time"), p_tes), - }, - coords={ - "element": element_forcing_index, - "time": time, - }, - ) - return xds_vortex_interp - - -def create_triangle_mask( - lon_grid: np.ndarray, lat_grid: np.ndarray, triangle: np.ndarray -) -> np.ndarray: - """ - Create a mask for a triangle defined by its vertices. - - Parameters - ---------- - lon_grid : np.ndarray - The longitude grid. - lat_grid : np.ndarray - The latitude grid. - triangle : np.ndarray - The triangle vertices. - - Returns - ------- - np.ndarray - The mask for the triangle. - """ - - triangle_path = Path(triangle) - lon_grid, lat_grid = np.meshgrid(lon_grid, lat_grid) - points = np.vstack([lon_grid.flatten(), lat_grid.flatten()]).T - inside_mask = triangle_path.contains_points(points) - mask = inside_mask.reshape(lon_grid.shape) +from functools import lru_cache +import struct +import numpy as np +import xarray as xr +from tqdm import tqdm - return mask def GS_LinearWindDragCoef( @@ -2135,3 +199,132 @@ def GS_windsetup_reconstruction_nc( ds_wind_setup.attrs["description"] = "Wind setup from GreenSurge methodology" return ds_wind_setup + +@lru_cache(maxsize=256) +def read_raw_with_header(raw_path: str) -> np.ndarray: + """ + Read a .raw file with a 256-byte header and return a numpy float32 array. + + Parameters + ---------- + raw_path : str + Path to the .raw file. + + Returns + ------- + np.ndarray + The data array reshaped according to the header dimensions. + """ + with open(raw_path, "rb") as f: + header_bytes = f.read(256) + dims = list(struct.unpack("4i", header_bytes[:16])) + dims = [d for d in dims if d > 0] + if len(dims) == 0: + raise ValueError(f"{raw_path}: invalid header, no dimension > 0 found") + data = np.fromfile(f, dtype=np.float32) + expected_size = np.prod(dims) + if data.size != expected_size: + raise ValueError( + f"{raw_path}: size mismatch (data={data.size}, expected={expected_size}, shape={dims})" + ) + return np.reshape(data, dims) + + +def greensurge_wind_setup_reconstruction_raw( + greensurge_dataset: str, + ds_GFD_info_update: xr.Dataset, + xds_vortex_interp: xr.Dataset, +) -> xr.Dataset: + """ + Compute the GreenSurge wind contribution and return an xarray Dataset with the results. + + Parameters + ---------- + greensurge_dataset : str + Path to the GreenSurge dataset directory. + ds_GFD_info_update : xr.Dataset + Updated GreenSurge information dataset. + xds_vortex_interp : xr.Dataset + Interpolated vortex dataset. + + Returns + ------- + xr.Dataset + Dataset containing the GreenSurge wind setup contribution. + """ + + ds_gfd_metadata = ds_GFD_info_update + wind_direction_input = xds_vortex_interp + velocity_thresholds = np.array([0, 100, 100]) + drag_coefficients = np.array([0.00063, 0.00723, 0.00723]) + + direction_bins = ds_gfd_metadata.wind_directions.values + forcing_cell_indices = ds_gfd_metadata.element_forcing_index.values + wind_speed_reference = ds_gfd_metadata.wind_speed.values.item() + base_drag_coeff = GS_LinearWindDragCoef( + wind_speed_reference, drag_coefficients, velocity_thresholds + ) + time_step_hours = ds_gfd_metadata.time_step_hours.values + + time_start = wind_direction_input.time.values.min() + time_end = wind_direction_input.time.values.max() + duration_in_steps = ( + int((ds_gfd_metadata.simulation_duration_hours.values) / time_step_hours) + 1 + ) + output_time_vector = np.arange( + time_start, time_end, np.timedelta64(int(time_step_hours * 60), "m") + ) + num_output_times = len(output_time_vector) + + direction_data = wind_direction_input.Dir.values + wind_speed_data = wind_direction_input.W.values + + sample_path = ( + f"{greensurge_dataset}/GF_T_0_D_0/dflowfmoutput/GreenSurge_GFDcase_map.raw" + ) + sample_data = read_raw_with_header(sample_path) + n_faces = sample_data.shape[-1] + wind_setup_output = np.zeros((num_output_times, n_faces), dtype=np.float32) + water_level_accumulator = np.zeros(sample_data.shape, dtype=np.float32) + + for time_index in tqdm(range(num_output_times), desc="Processing time steps"): + water_level_accumulator[:] = 0 + for cell_index in forcing_cell_indices.astype(int): + current_dir = direction_data[cell_index, time_index] % 360 + adjusted_bins = np.where(direction_bins == 0, 360, direction_bins) + closest_direction_index = np.abs(adjusted_bins - current_dir).argmin() + + raw_path = f"{greensurge_dataset}/GF_T_{cell_index}_D_{closest_direction_index}/dflowfmoutput/GreenSurge_GFDcase_map.raw" + water_level_case = read_raw_with_header(raw_path) + + water_level_case = np.nan_to_num(water_level_case, nan=0) + + wind_speed_value = wind_speed_data[cell_index, time_index] + drag_coeff_value = GS_LinearWindDragCoef( + wind_speed_value, drag_coefficients, velocity_thresholds + ) + + scaling_factor = (wind_speed_value**2 / wind_speed_reference**2) * ( + drag_coeff_value / base_drag_coeff + ) + water_level_accumulator += water_level_case * scaling_factor + + step_window = min(duration_in_steps, num_output_times - time_index) + if (num_output_times - time_index) > step_window: + wind_setup_output[time_index : time_index + step_window] += ( + water_level_accumulator + ) + else: + shift_counter = step_window - (num_output_times - time_index) + wind_setup_output[ + time_index : time_index + step_window - shift_counter + ] += water_level_accumulator[: step_window - shift_counter] + + ds_wind_setup = xr.Dataset( + {"WL": (["time", "nface"], wind_setup_output)}, + coords={ + "time": output_time_vector, + "nface": np.arange(wind_setup_output.shape[1]), + }, + ) + return ds_wind_setup \ No newline at end of file diff --git a/bluemath_tk/additive/greenwaves.py b/bluemath_tk/additive/greenwaves.py new file mode 100644 index 0000000..62bc553 --- /dev/null +++ b/bluemath_tk/additive/greenwaves.py @@ -0,0 +1,186 @@ +from functools import lru_cache +import struct +import numpy as np +import xarray as xr +from tqdm import tqdm + +def greenwaves_wind_setup_reconstruction_raw( + greensurge_dataset: str, + ds_GFD_info_update: xr.Dataset, + xds_vortex_interp: xr.Dataset, +) -> xr.Dataset: + """ + Compute the GreenSurge wind contribution and return an xarray Dataset with the results. + + Parameters + ---------- + greensurge_dataset : str + Path to the GreenSurge dataset directory. + ds_GFD_info_update : xr.Dataset + Updated GreenSurge information dataset. + xds_vortex_interp : xr.Dataset + Interpolated vortex dataset. + + Returns + ------- + xr.Dataset + Dataset containing the GreenSurge wind setup contribution. + """ + + ds_gfd_metadata = ds_GFD_info_update + wind_direction_input = xds_vortex_interp + velocity_thresholds = np.array([0, 100, 100]) + drag_coefficients = np.array([0.00063, 0.00723, 0.00723]) + + direction_bins = ds_gfd_metadata.wind_directions.values + forcing_cell_indices = ds_gfd_metadata.element_forcing_index.values + wind_speed_reference = ds_gfd_metadata.wind_speed.values.item() + base_drag_coeff = GS_LinearWindDragCoef( + wind_speed_reference, drag_coefficients, velocity_thresholds + ) + time_step_hours = ds_gfd_metadata.time_step_hours.values + + time_start = wind_direction_input.time.values.min() + time_end = wind_direction_input.time.values.max() + duration_in_steps = ( + int((ds_gfd_metadata.simulation_duration_hours.values) / time_step_hours) + 1 + ) + output_time_vector = np.arange( + time_start, time_end, np.timedelta64(int(time_step_hours * 60), "m") + ) + num_output_times = len(output_time_vector) + + direction_data = wind_direction_input.Dir.values + wind_speed_data = wind_direction_input.W.values + + sample_path = ( + f"{greensurge_dataset}/GF_T_0_D_0/output.raw" + ) + sample_data = read_raw_with_header(sample_path) + n_faces = sample_data.shape[-1] + wind_setup_output = np.zeros((num_output_times, n_faces), dtype=np.float32) + water_level_accumulator = np.zeros(sample_data.shape, dtype=np.float32) + + for time_index in tqdm(range(num_output_times), desc="Processing time steps"): + water_level_accumulator[:] = 0 + for cell_index in forcing_cell_indices.astype(int): + current_dir = direction_data[cell_index, time_index] % 360 + adjusted_bins = np.where(direction_bins == 0, 360, direction_bins) + closest_direction_index = np.abs(adjusted_bins - current_dir).argmin() + + raw_path = f"{greensurge_dataset}/GF_T_{cell_index}_D_{closest_direction_index}/output.raw" + water_level_case = read_raw_with_header(raw_path) + + water_level_case = np.nan_to_num(water_level_case, nan=0) + + wind_speed_value = wind_speed_data[cell_index, time_index] + drag_coeff_value = GS_LinearWindDragCoef( + wind_speed_value, drag_coefficients, velocity_thresholds + ) + + scaling_factor = (wind_speed_value**2 / wind_speed_reference**2) * ( + drag_coeff_value / base_drag_coeff + ) + water_level_accumulator += water_level_case * scaling_factor + + step_window = min(duration_in_steps, num_output_times - time_index) + if (num_output_times - time_index) > step_window: + wind_setup_output[time_index : time_index + step_window] += ( + water_level_accumulator + ) + else: + shift_counter = step_window - (num_output_times - time_index) + wind_setup_output[ + time_index : time_index + step_window - shift_counter + ] += water_level_accumulator[: step_window - shift_counter] + + ds_wind_setup = xr.Dataset( + {"WL": (["time", "nface"], wind_setup_output)}, + coords={ + "time": output_time_vector, + "nface": np.arange(wind_setup_output.shape[1]), + }, + ) + return ds_wind_setup + +@lru_cache(maxsize=256) +def read_raw_with_header(raw_path: str) -> np.ndarray: + """ + Read a .raw file with a 256-byte header and return a numpy float32 array. + + Parameters + ---------- + raw_path : str + Path to the .raw file. + + Returns + ------- + np.ndarray + The data array reshaped according to the header dimensions. + """ + with open(raw_path, "rb") as f: + header_bytes = f.read(256) + dims = list(struct.unpack("4i", header_bytes[:16])) + dims = [d for d in dims if d > 0] + if len(dims) == 0: + raise ValueError(f"{raw_path}: invalid header, no dimension > 0 found") + data = np.fromfile(f, dtype=np.float32) + expected_size = np.prod(dims) + if data.size != expected_size: + raise ValueError( + f"{raw_path}: size mismatch (data={data.size}, expected={expected_size}, shape={dims})" + ) + return np.reshape(data, dims) + +def GS_LinearWindDragCoef( + Wspeed: np.ndarray, CD_Wl_abc: np.ndarray, Wl_abc: np.ndarray +) -> np.ndarray: + """ + Calculate the linear drag coefficient based on wind speed and specified thresholds. + + Parameters + ---------- + Wspeed : np.ndarray + Wind speed values (1D array). + CD_Wl_abc : np.ndarray + Coefficients for the drag coefficient calculation, should be a 1D array of length 3. + Wl_abc : np.ndarray + Wind speed thresholds for the drag coefficient calculation, should be a 1D array of length 3. + + Returns + ------- + np.ndarray + Calculated drag coefficient values based on the input wind speed. + """ + + Wla = Wl_abc[0] + Wlb = Wl_abc[1] + Wlc = Wl_abc[2] + CDa = CD_Wl_abc[0] + CDb = CD_Wl_abc[1] + CDc = CD_Wl_abc[2] + + # coefs lines y=ax+b + if not Wla == Wlb: + a_CDline_ab = (CDa - CDb) / (Wla - Wlb) + b_CDline_ab = CDb - a_CDline_ab * Wlb + else: + a_CDline_ab = 0 + b_CDline_ab = CDa + if not Wlb == Wlc: + a_CDline_bc = (CDb - CDc) / (Wlb - Wlc) + b_CDline_bc = CDc - a_CDline_bc * Wlc + else: + a_CDline_bc = 0 + b_CDline_bc = CDb + a_CDline_cinf = 0 + b_CDline_cinf = CDc + + if Wspeed <= Wlb: + CD = a_CDline_ab * Wspeed + b_CDline_ab + elif Wspeed > Wlb and Wspeed <= Wlc: + CD = a_CDline_bc * Wspeed + b_CDline_bc + else: + CD = a_CDline_cinf * Wspeed + b_CDline_cinf + + return CD \ No newline at end of file