forked from NOAA-EMC/WW3-tools
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Adding polygon based mesh refinement, addressing issue NOAA-EMC#55
- Loading branch information
Showing
5 changed files
with
110 additions
and
62 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,84 +1,116 @@ | ||
""" Jigsaw meshes for WW3 with global bathymetry | ||
""" | ||
|
||
# Authors: Ali Salimi, Darren | ||
# Authors: Ali Salimi-Tarazouj, Darren Engwirda | ||
|
||
# This script will create mesh spacing based on user defined windows in json format. | ||
# This script will create mesh spacing based on user defined windows in json format or based on polygons in shapefile format | ||
|
||
import numpy as np | ||
import netCDF4 as nc | ||
import argparse | ||
import json | ||
import geopandas as gpd | ||
from shapely.geometry import Point | ||
|
||
def create_mask_file(windows): | ||
# Open the data file | ||
data = nc.Dataset("RTopo_2_0_4_GEBCO_v2023_60sec_pixel.nc", "r") | ||
|
||
# Extract longitude and latitude arrays | ||
def create_mask_file(data_filename, output_filename, windows=None, shapefiles=None, default_scale=20): | ||
# Load DEM data | ||
data = nc.Dataset(data_filename, "r") | ||
xlon = np.asarray(data["lon"][:]) | ||
ylat = np.asarray(data["lat"][:]) | ||
elev = np.asarray(data["bed_elevation"][:], dtype=np.float32) + np.asarray(data["ice_thickness"][:], dtype=np.float32) | ||
|
||
# Compute midpoints for longitude and latitude | ||
xmid = 0.5 * (xlon[:-1] + xlon[+1:]) | ||
ymid = 0.5 * (ylat[:-1] + ylat[+1:]) | ||
|
||
# Create a meshgrid of midpoints | ||
xmid = 0.5 * (xlon[:-1] + xlon[1:]) | ||
ymid = 0.5 * (ylat[:-1] + ylat[1:]) | ||
xmat, ymat = np.meshgrid(xmid, ymid) | ||
|
||
# Initialize scal with a default condition | ||
#scal = np.where(ymat > 50, 9, np.where(ymat < -20, 30, 20)) | ||
|
||
# Define the boundaries and scaling values: The globe is divided to three regions based on latitude and corresponding resolutions | ||
upper_bound = 50 # Lattitude that starts the upper section (ie lat > upper_bound) | ||
middle_bound = -20 # Determines the boundary between the upper and middle sections. The middle section is for upper_bound > lat > middle_bound | ||
lower_bound = -90 # Boundary of lowest section, likely -90, Lower section is: middle_bound > lat > lower_bound | ||
scale_north = 9 # mesh resolution in km for upper section ( lat > upper_bound ) | ||
scale_middle = 20 # mesh resolution in km for middle section for middle_bound < lat < upper_boun | ||
# Mesh resolution of the lower section is linear from scale_south_upper to scale_south_upper | ||
scale_south_upper = 30 # km resolution for upper south/lower section | ||
scale_south_lower = 9 # km mesh resolution at lower south/lower section | ||
|
||
# Calculate the scaling using conditions | ||
scal = np.where(ymat > upper_bound, | ||
scale_north, # Use 9 km for ymat > 50 | ||
np.where(ymat > middle_bound, | ||
scale_middle, # Use 20 km for -20 < ymat <= 50 | ||
# Gradual decrease from 30 km to 9 km as latitude decreases from -20 to -90 | ||
scale_south_upper + (scale_south_lower - scale_south_upper) * | ||
(ymat - middle_bound) / (lower_bound - middle_bound) | ||
)) | ||
|
||
# Process each window | ||
for window in windows: | ||
# Apply the 'hshr' value for the current window where conditions are met | ||
window_mask_lon = np.logical_and(xmat >= window["min_lon"], xmat <= window["max_lon"]) | ||
window_mask_lat = np.logical_and(ymat >= window["min_lat"], ymat <= window["max_lat"]) | ||
window_mask = np.logical_and(window_mask_lon, window_mask_lat) | ||
|
||
mask_elevation = np.logical_and(elev >= -4., elev <= +8.) | ||
mask_final = np.logical_and(mask_elevation, window_mask) | ||
scal[mask_final] = window["hshr"] | ||
|
||
# Create a new NetCDF file to store the mask | ||
data_out = nc.Dataset("wmask.nc", "w") | ||
# Initialize scal with a default condition based on original logic | ||
upper_bound = 50 | ||
middle_bound = -20 | ||
lower_bound = -90 | ||
scale_north = 9 | ||
scale_middle = 20 | ||
scale_south_upper = 30 | ||
scale_south_lower = 9 | ||
|
||
# Apply scaling based on latitude bands | ||
scal = np.where(ymat > upper_bound, scale_north, | ||
np.where(ymat > middle_bound, scale_middle, | ||
scale_south_upper + (scale_south_lower - scale_south_upper) * | ||
(ymat - middle_bound) / (lower_bound - middle_bound))) | ||
|
||
# Apply window-based refinement if provided | ||
if windows: | ||
for window in windows: | ||
window_mask_lon = np.logical_and(xmat >= window["min_lon"], xmat <= window["max_lon"]) | ||
window_mask_lat = np.logical_and(ymat >= window["min_lat"], ymat <= window["max_lat"]) | ||
window_mask = np.logical_and(window_mask_lon, window_mask_lat) | ||
|
||
mask_elevation = np.logical_and(elev >= -4., elev <= +8.) | ||
mask_final = np.logical_and(mask_elevation, window_mask) | ||
scal[mask_final] = window["hshr"] | ||
|
||
# Process shapefiles if provided | ||
if shapefiles: | ||
for shapefile, scale in shapefiles: | ||
gdf = gpd.read_file(shapefile) | ||
minx, miny, maxx, maxy = gdf.total_bounds | ||
# Subset the meshgrid | ||
mask_x = (xmid >= minx) & (xmid <= maxx) | ||
mask_y = (ymid >= miny) & (ymid <= maxy) | ||
x_sub, y_sub = xmid[mask_x], ymid[mask_y] | ||
xmat_sub, ymat_sub = np.meshgrid(x_sub, y_sub) | ||
|
||
# Create GeoDataFrame for the points in the subset | ||
points_sub = [Point(x, y) for x, y in zip(xmat_sub.flatten(), ymat_sub.flatten())] | ||
points_gdf_sub = gpd.GeoDataFrame(geometry=points_sub) | ||
points_gdf_sub.set_crs(gdf.crs, inplace=True) | ||
|
||
# Spatial join only for the subset area | ||
overlay_sub = gpd.sjoin(points_gdf_sub, gdf, how="left", op='intersects') | ||
overlay_sub['scale'] = overlay_sub['index_right'].apply(lambda x: scale if x >= 0 else default_scale) | ||
scales_sub = overlay_sub['scale'].to_numpy().reshape(xmat_sub.shape) | ||
|
||
# Map the scales back to the original full matrix | ||
x_indices = np.searchsorted(xmid, x_sub) | ||
y_indices = np.searchsorted(ymid, y_sub) | ||
scal[np.ix_(mask_y, mask_x)] = scales_sub | ||
|
||
# Write results to a NetCDF file | ||
data_out = nc.Dataset(output_filename, "w") | ||
data_out.createDimension("nlon", xmid.size) | ||
data_out.createDimension("nlat", ymid.size) | ||
if "val" not in data_out.variables.keys(): | ||
data_out.createVariable("val", "f4", ("nlat", "nlon")) | ||
data_out["val"][:, :] = scal | ||
var = data_out.createVariable("val", "f4", ("nlat", "nlon")) | ||
var[:, :] = scal | ||
data_out.close() | ||
|
||
def parse_windows_from_args(): | ||
parser = argparse.ArgumentParser(description='Create a mask file with multiple windows.') | ||
parser.add_argument('--windows', type=str, required=True, | ||
help='JSON string with window definitions. Example: \'[{"min_lon": -130, "max_lon": -64, "min_lat": 24.5, "max_lat": 47.5, "hshr": 5}]\'') | ||
|
||
def parse_input_args(): | ||
parser = argparse.ArgumentParser(description='Create a mask file with multiple methods.') | ||
parser.add_argument('--windows', type=str, help='JSON string with window definitions.') | ||
parser.add_argument('--shapefiles', type=str, help='JSON string with shapefile paths and scales.') | ||
args = parser.parse_args() | ||
windows = json.loads(args.windows) | ||
return windows | ||
|
||
windows = json.loads(args.windows) if args.windows else None | ||
shapefiles = json.loads(args.shapefiles) if args.shapefiles else None | ||
|
||
if shapefiles: | ||
shapefiles = [(shp['path'], shp['scale']) for shp in shapefiles] | ||
print("Parsed Shapefiles:", shapefiles) # Debugging print | ||
|
||
return windows, shapefiles | ||
|
||
if __name__ == "__main__": | ||
windows = parse_windows_from_args() | ||
create_mask_file(windows) | ||
windows, shapefiles = parse_input_args() | ||
""" | ||
# Fallback to hardcoded shapefiles if none provided via command line | ||
if not shapefiles: | ||
shapefiles = [ | ||
("./Shapefiles/Arctic.shp", 10), | ||
("./Shapefiles/Gulf.shp", 5), | ||
("./Shapefiles/Hawaii.shp", 5), | ||
("./Shapefiles/PurtoRico.shp", 10), | ||
("./Shapefiles/WestCoast.shp", 5) | ||
] | ||
""" | ||
create_mask_file("./RTopo_2_0_4_GEBCO_v2023_60sec_pixel.nc", "wmask.nc", windows, shapefiles) | ||
|