Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Make setup rundir more robust #177

Closed
wants to merge 7 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
56 changes: 43 additions & 13 deletions regional_mom6/regional_mom6.py
Original file line number Diff line number Diff line change
Expand Up @@ -198,9 +198,9 @@ def hyperbolictan_thickness_profile(nlayers, ratio, total_depth):
54.91569316, 63.76993631, 70.3615673 , 74.6060059 , 77.08936249,
78.46160957, 79.19600916, 79.58232451, 79.7836947 , 79.88816229])
>>> dz.sum()
1000.0
np.float64(1000.0)
>>> dz[-1] / dz[0]
3.9721960481753706
np.float64(3.9721960481753706)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

what is this np.float64 thing?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

it's elsewhere also....

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That was required as part of the numpy 2.0.0 compatibility to make the doctest pass: it prints out the types of return values. I think that is useful, but given we're not going to use numpy 2.0.0 for a little bit, it'll disappear for now anyway!


If we want the top layer to be thicker then we need to prescribe ``ratio < 1``.

Expand All @@ -214,9 +214,9 @@ def hyperbolictan_thickness_profile(nlayers, ratio, total_depth):
45.08430684, 36.23006369, 29.6384327 , 25.3939941 , 22.91063751,
21.53839043, 20.80399084, 20.41767549, 20.2163053 , 20.11183771])
>>> dz.sum()
1000.0
np.float64(1000.0)
>>> dz[-1] / dz[0]
0.25174991059652
np.float64(0.25174991059652)

Now how about a grid with the same total depth as above but with equally-spaced
layers.
Expand Down Expand Up @@ -437,6 +437,7 @@ def __init__(
self.grid_type = grid_type
self.repeat_year_forcing = repeat_year_forcing
self.ocean_mask = None
self.layout = None # This should be a tuple. Leaving in a dummy 'None' makes it easy to remind the user to provide a value later on.
navidcy marked this conversation as resolved.
Show resolved Hide resolved
if read_existing_grids:
try:
self.hgrid = xr.open_dataset(self.mom_input_dir / "hgrid.nc")
Expand Down Expand Up @@ -1425,6 +1426,21 @@ def setup_run_directory(
premade_rundir_path = Path(
importlib.resources.files("regional_mom6") / "demos/premade_run_directories"
)
if not premade_rundir_path.exists():
print("Could not find premade run directories at ", premade_rundir_path)
print(
"Perhaps the package was imported directly rather than installed with conda. Checking if this is the case... "
)

premade_rundir_path = Path(
importlib.resources.files("regional_mom6").parent
/ "demos/premade_run_directories"
)
if not premade_rundir_path.exists():
raise ValueError(
f"Cannot find the premade run directory files at {premade_rundir_path} either.\n\n"
+ "There may be an issue with package installation. Check that the `premade_run_directory` folder is present in one of these two locations"
)

# Define the locations of the directories we'll copy files across from. Base contains most of the files, and overwrite replaces files in the base directory.
base_run_dir = premade_rundir_path / "common_files"
Expand Down Expand Up @@ -1490,18 +1506,32 @@ def setup_run_directory(
mask_table = p.name
x, y = (int(v) for v in layout.split("x"))
ncpus = (x * y) - int(masked)
if mask_table == None:
layout = (
x,
y,
) # This is a local variable keeping track of the layout as read from the mask table. Not to be confused with self.layout which is unchanged and may differ.

print(
"No mask table found! This suggests the domain is mostly water, so there are "
+ "no `non compute` cells that are entirely land. If this doesn't seem right, "
+ "ensure you've already run `FRE_tools`."
f"Mask table {p.name} read. Using this to infer the cpu layout {layout}, total masked out cells {masked}, and total number of CPUs {ncpus}."
)
if not hasattr(self, "layout"):

if mask_table == None:
if self.layout == None:
raise AttributeError(
"No layout information found. This suggests that `FRE_tools()` hasn't been called yet. "
+ "Please do so, in order for the number of processors required is computed."
"No mask table found, and the cpu layout has not been set. At least one of these is requiret to set up the experiment."
)
ncpus = self.layout[0] * self.layout[1]
print(
f"No mask table found, but the cpu layout has been set to {self.layout} This suggests the domain is mostly water, so there are "
+ "no `non compute` cells that are entirely land. If this doesn't seem right, "
+ "ensure you've already run the `FRE_tools` method which sets up the cpu mask table. Keep an eye on any errors that might print while"
+ "the FRE tools (which run C++ in the background) are running."
)
# Here we define a local copy of the layout just for use within this function.
# This prevents the layout from being overwritten in the main class in case
# in case the user accidentally loads in the wrong mask table.
layout = self.layout
ncpus = layout[0] * layout[1]

print("Number of CPUs required: ", ncpus)

## Modify the input namelists to give the correct layouts
Expand All @@ -1515,7 +1545,7 @@ def setup_run_directory(
else:
lines[jj] = "# MASKTABLE = no mask table"
if "LAYOUT =" in lines[jj] and "IO" not in lines[jj]:
lines[jj] = f"LAYOUT = {self.layout[1]},{self.layout[0]}\n"
lines[jj] = f"LAYOUT = {layout[1]},{layout[0]}\n"

if "NIGLOBAL" in lines[jj]:
lines[jj] = f"NIGLOBAL = {self.hgrid.nx.shape[0]//2}\n"
Expand Down
14 changes: 7 additions & 7 deletions regional_mom6/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,10 @@ def angle_between(v1, v2, v3):
>>> v2 = (1, 0, 0)
>>> v3 = (0, 1, 0)
>>> angle_between(v1, v2, v3)
1.5707963267948966
np.float64(1.5707963267948966)
>>> from numpy import rad2deg
>>> rad2deg(angle_between(v1, v2, v3))
90.0
np.float64(90.0)
"""

v1xv2 = np.cross(v1, v2)
Expand Down Expand Up @@ -58,10 +58,10 @@ def quadrilateral_area(v1, v2, v3, v4):
>>> v3 = latlon_to_cartesian(90, 0, R)
>>> v4 = latlon_to_cartesian(0, -90, R)
>>> quadrilateral_area(v1, v2, v3, v4)
592556.1793298927
np.float64(592556.1793298927)
>>> from numpy import pi
>>> quadrilateral_area(v1, v2, v3, v4) == pi * R**2
True
np.True_
"""

v1 = np.array(v1)
Expand Down Expand Up @@ -106,14 +106,14 @@ def latlon_to_cartesian(lat, lon, R=1):

>>> from regional_mom6.utils import latlon_to_cartesian
>>> latlon_to_cartesian(0, 0)
(1.0, 0.0, 0.0)
(np.float64(1.0), np.float64(0.0), np.float64(0.0))

Now let's do the same on a sphere with Earth's radius

>>> from regional_mom6.utils import latlon_to_cartesian
>>> R = 6371e3
>>> latlon_to_cartesian(0, 0, R)
(6371000.0, 0.0, 0.0)
(np.float64(6371000.0), np.float64(0.0), np.float64(0.0))
"""

x = R * np.cos(np.deg2rad(lat)) * np.cos(np.deg2rad(lon))
Expand Down Expand Up @@ -169,7 +169,7 @@ def quadrilateral_areas(lat, lon, R=1):
[1.96911611e+13, 1.96911611e+13, 1.96911611e+13, 1.96911611e+13,
1.96911611e+13, 1.96911611e+13]])
>>> np.isclose(areas.sum(), 4 * np.pi * R**2, atol=np.finfo(areas.dtype).eps)
True
np.True_
"""

coords = np.dstack(latlon_to_cartesian(lat, lon, R))
Expand Down
Loading