Skip to content

Commit

Permalink
better capturing of warnings
Browse files Browse the repository at this point in the history
  • Loading branch information
robertjwilson committed Oct 25, 2024
1 parent ad8cacb commit 98ad62c
Showing 1 changed file with 27 additions and 23 deletions.
50 changes: 27 additions & 23 deletions ecoval/matchall.py
Original file line number Diff line number Diff line change
Expand Up @@ -1298,7 +1298,8 @@ def write_report(x):
if global_grid is None:
final_extension = extension_of_directory(sim_dir)
path = glob.glob(sim_dir + final_extension + all_df.pattern[0])[0]
ds = nc.open_data(path, checks=False).to_xarray()
with warnings.catch_warnings(record=True) as w:
ds = nc.open_data(path, checks=False).to_xarray()
lon_name = [x for x in ds.coords if "lon" in x]
lat_name = [x for x in ds.coords if "lat" in x]
lon = ds[lon_name[0]].values
Expand Down Expand Up @@ -1370,20 +1371,21 @@ def write_report(x):
for exc in exclude:
ensemble = [x for x in ensemble if f"{exc}" not in os.path.basename(x)]

ds = nc.open_data(ensemble[0], checks = False)
if "e3t" in ds.variables:
print(
f"Extracting and saving thickness from {ensemble[0]} as matched/e3t.nc"
)
ds.subset(variable="e3t")
ds.subset(time=0)
ds.as_missing(0)
if os.path.exists("matched/e3t.nc"):
os.remove("matched/e3t.nc")
ds.to_nc("matched/e3t.nc", zip=True, overwrite=True)
thickness = "matched/e3t.nc"
thick_found = True
break
with warnings.catch_warnings(record=True) as w:
ds = nc.open_data(ensemble[0], checks = False)
if "e3t" in ds.variables:
print(
f"Extracting and saving thickness from {ensemble[0]} as matched/e3t.nc"
)
ds.subset(variable="e3t")
ds.subset(time=0)
ds.as_missing(0)
if os.path.exists("matched/e3t.nc"):
os.remove("matched/e3t.nc")
ds.to_nc("matched/e3t.nc", zip=True, overwrite=True)
thickness = "matched/e3t.nc"
thick_found = True
break
if not thick_found:
if thickness is None:
print("It was not. Assuming files have z-levels for any vertical matchups.")
Expand Down Expand Up @@ -1457,12 +1459,15 @@ def write_report(x):
pickle.dump(times_dict, f)

print("********************************")
print("Extracting the geographic extent of the model output")
print("********************************")

# figure out the lon/lat extent in the model
if fvcom is False:
ds_extent = get_extent(ensemble[0])
lons = [ds_extent[0], ds_extent[1]]
lats = [ds_extent[2], ds_extent[3]]
with warnings.catch_warnings(record=True) as w:
ds_extent = get_extent(ensemble[0])
lons = [ds_extent[0], ds_extent[1]]
lats = [ds_extent[2], ds_extent[3]]
else:
drop_variables = ["siglay", "siglev"]
ds= xr.open_dataset( ff, drop_variables=drop_variables, decode_times=False)
Expand Down Expand Up @@ -1529,7 +1534,6 @@ def write_report(x):
point_vars.sort()

for vv in point_vars:
print(vv)
all_df = df_mapping
all_df = all_df.query("model_variable in @good_model_vars").reset_index(
drop=True
Expand Down Expand Up @@ -2102,6 +2106,10 @@ def point_match(variable, layer="all", ds_depths=None, df_times = None):
if ww is not None:
if ww in output_warnings:
continue
if "CDO found more than one time variable" in ww:
continue
if "coordinates variable time" in ww:
continue
output_warnings.append(str(ww))

if len(output_warnings) > 0:
Expand All @@ -2113,10 +2121,6 @@ def point_match(variable, layer="all", ds_depths=None, df_times = None):
while len(session_warnings) > 0:
session_warnings.pop()

# print the time dictionary
#print("********************************")
#print(times_dict)

gridded_matchup(
df_mapping=df_mapping,
folder=sim_dir,
Expand Down

0 comments on commit 98ad62c

Please sign in to comment.