Skip to content

Commit

Permalink
updating regen_expected_results.py to regenerate fabm0d and gotm expe…
Browse files Browse the repository at this point in the history
…cted results
  • Loading branch information
wathen committed Jul 24, 2024
1 parent 8eee5e7 commit cd97ff6
Showing 1 changed file with 32 additions and 25 deletions.
Original file line number Diff line number Diff line change
@@ -1,9 +1,3 @@
"""
Script that regenerates expected results. You will need to install GOTM on your machine
first and use those results to regenerate the expected values
"""


import argparse
import json
import netCDF4 as nc
Expand All @@ -18,27 +12,39 @@ def default(self, obj):

parser = argparse.ArgumentParser()
parser.add_argument('-p', '--data-path', type=str, required=True,
help='Path to output file from GOTM run')
help='Path to output file from model run')
parser.add_argument('-m', '--model-run', type=str, required=True,
help='Type of model run that has been run',
choices=["gotm", "fabm0d"])
args, _ = parser.parse_known_args()
data_path = args.data_path
model_run = args.model_run

state_vars = \
["N1_p" , "N3_n" , "N4_n" , "N5_s" , "O2_o" , "O3_c" , "O3_bioalk" , "R1_c" , "R1_n" ,
"R1_p" , "R2_c" , "R3_c" , "R4_c" , "R4_n" , "R4_p" , "R6_c" , "R6_n" , "R6_p" ,
"R6_s" , "R8_c" , "R8_n" , "R8_p" , "R8_s" , "B1_c" , "B1_n" , "B1_p" , "P1_c" ,
"P1_n" , "P1_p" , "P1_Chl" , "P1_s" , "P2_c" , "P2_n" , "P2_p" , "P2_Chl" , "P3_c" ,
"P3_n" , "P3_p" , "P3_Chl" , "P4_c" , "P4_n" , "P4_p" , "P4_Chl" , "Z4_c" , "Z5_c" ,
"Z5_n" , "Z5_p" , "Z6_c" , "Z6_n" , "Z6_p" , "L2_c" , "Q1_c" , "Q1_p" , "Q1_n" ,
"Q6_c" , "Q6_p" , "Q6_n" , "Q6_s" , "Q6_pen_depth_c" , "Q6_pen_depth_n" ,
"Q6_pen_depth_p" , "Q6_pen_depth_s" , "Q7_c" , "Q7_p" , "Q7_n" , "Q7_pen_depth_c" ,
"Q7_pen_depth_n" , "Q7_pen_depth_p" , "Q17_c" , "Q17_p" , "Q17_n" , "bL2_c" ,
"ben_col_D1m" , "ben_col_D2m" , "K1_p" , "K3_n" , "K4_n" , "K5_s" , "G2_o" ,
"G2_o_deep" , "G3_c" , "ben_nit_G4n" , "H1_c" , "H2_c" , "Y2_c" , "Y3_c" ,
"Y4_c"]
dir_mapping = {"fabm0d": "fabm0d-gotm-ersem", "gotm": "gotm-fabm-ersem"}
if model_run == "gotm":
state_vars = [
"N1_p", "N3_n", "N4_n", "N5_s", "O2_o", "O3_c", "O3_bioalk", "R1_c", "R1_n",
"R1_p", "R2_c", "R3_c", "R4_c", "R4_n", "R4_p", "R6_c", "R6_n", "R6_p",
"R6_s", "R8_c", "R8_n", "R8_p", "R8_s", "B1_c", "B1_n", "B1_p", "P1_c",
"P1_n", "P1_p", "P1_Chl", "P1_s", "P2_c", "P2_n", "P2_p", "P2_Chl", "P3_c",
"P3_n", "P3_p", "P3_Chl", "P4_c", "P4_n", "P4_p", "P4_Chl", "Z4_c", "Z5_c",
"Z5_n", "Z5_p", "Z6_c", "Z6_n", "Z6_p", "L2_c", "Q1_c", "Q1_p", "Q1_n",
"Q6_c", "Q6_p", "Q6_n", "Q6_s", "Q6_pen_depth_c", "Q6_pen_depth_n",
"Q6_pen_depth_p", "Q6_pen_depth_s", "Q7_c", "Q7_p", "Q7_n", "Q7_pen_depth_c",
"Q7_pen_depth_n", "Q7_pen_depth_p", "Q17_c", "Q17_p", "Q17_n", "bL2_c",
"ben_col_D1m", "ben_col_D2m", "K1_p", "K3_n", "K4_n", "K5_s", "G2_o",
"G2_o_deep", "G3_c", "ben_nit_G4n", "H1_c", "H2_c", "Y2_c", "Y3_c",
"Y4_c"
]
vars_of_interest = ["dates", "N1_p", "N3_n", "N5_s"]
data_dict = {"expected": vars_of_interest, "expected_state": state_vars}

gotm_vars_test = ["dates", "N1_p", "N3_n", "N5_s"]
elif model_run == "fabm0d":
vars_of_interest = [
"dates", "light_parEIR", "temp", "salt", "N1_p", "N3_n", "B1_c", "P2_c"
]

data_dict = {"expected": gotm_vars_test, "expected_state": state_vars}
data_dict = {"expected": vars_of_interest}

for key, items in data_dict.items():
data = nc.Dataset(data_path, 'r')
Expand All @@ -53,6 +59,8 @@ def default(self, obj):
calendar=times.calendar)
dates = [str(d).split(" ")[0] for d in dates]
expected_results[v] = dates
elif v == "time":
print(data.variables[v])
else:
depth = 0.0
var = data.variables[v]
Expand All @@ -71,8 +79,7 @@ def default(self, obj):
else:
raise RuntimeError
except Exception as e:
print(f"Did not update {v} since it was not in the output file")
print(f"Did not update {v} since {e}")

with open(f'{key}.json', 'w') as f:
with open(f'{dir_mapping[model_run]}/{key}.json', 'w') as f:
json.dump(expected_results, f, cls=NumpyEncoder)

0 comments on commit cd97ff6

Please sign in to comment.