Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Workflow offshore #17

Merged
merged 2 commits into from
Nov 26, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,2 +1,10 @@
cache/
tmp/

__pycache__/
*.py[cod]

.vscode/

DT_flood/workflows/deltares_sfincs-cpu:sfincs-v2.0.3-Cauberg.sfincs
DT_flood/workflows/deltares_wflow:v0.7.3.sif
Binary file modified DT_flood/utils/__pycache__/fa_scenario_utils.cpython-311.pyc
Binary file not shown.
Binary file modified DT_flood/utils/__pycache__/workflow_utils.cpython-311.pyc
Binary file not shown.
38 changes: 23 additions & 15 deletions DT_flood/utils/fa_scenario_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,10 @@
from flood_adapt.api import measures
from flood_adapt.api import strategies
from flood_adapt.api import scenarios
from flood_adapt.config import Settings
from flood_adapt.object_model.interface.database import IDatabase
from flood_adapt.object_model.interface.events import IEvent
from flood_adapt.object_model.interface.projections import IProjection
# from flood_adapt.object_model.interface.measures import IMeasure
from flood_adapt.object_model.interface.strategies import IStrategy
from flood_adapt.object_model.interface.scenarios import IScenario

Expand Down Expand Up @@ -45,6 +45,12 @@ def init_scenario(database_path: Union[str, os.PathLike], scenario_config_name:

scenario_path = database_path / scenario_config_name

Settings(
database_root = database_path.parent,
database_name = database_path.stem,
system_folder = database_path/"system",
delete_crashed_runs = False,
)
db = static.read_database(database_path=database_path.parent, site_name=database_path.stem)
with open(scenario_path, 'rb') as f:
scenario = tomli.load(f)
Expand Down Expand Up @@ -147,15 +153,14 @@ def create_event_config(database: IDatabase, scenario_config: dict) -> IEvent:
"""

# Set event type
# if not database.site.attrs.sfincs.offshore_model:
# event_type = "Historical_nearshore"
# else:
# # event_type = "offshore"
# raise NotImplementedError("Offshore models not (yet) supported here")
event_type = "Historical_nearshore"
if not database.site.attrs.sfincs.offshore_model:
event_type = "Historical_nearshore"
else:
event_type = "Historical_offshore"

# Set meteo forcing type
dc = DataCatalog(scenario_config["event"]["data_catalogues"])
if scenario_config['event']['data_catalogues']:
dc = DataCatalog(scenario_config["event"]["data_catalogues"])
if 'meteo' in scenario_config["event"]["sfincs_forcing"].keys():
wind_type = 'none'
rainfall_type = 'none'
Expand All @@ -178,16 +183,19 @@ def create_event_config(database: IDatabase, scenario_config: dict) -> IEvent:


# Set waterlevel forcing type
tide_type = 'timeseries'
tide_dict = {'source': tide_type}
if (
if event_type == "Historical_nearshore":
tide_type = "timeseries"
if (
scenario_config["event"]["sfincs_forcing"]["waterlevel"] in dc.sources or
Path(scenario_config["event"]["sfincs_forcing"]["waterlevel"]).suffix == '.nc'
):
placeholder_path = database.input_path/'events'/scenario_config["event"]["name"]/'placeholder.csv'
tide_dict['timeseries_file'] = str(placeholder_path.name)
else:
tide_dict["timeseries_file"] = scenario_config["event"]["sfincs_forcing"]["waterlevel"]
placeholder_path = database.input_path/'events'/scenario_config["event"]["name"]/'placeholder.csv'
tide_dict['timeseries_file'] = str(placeholder_path.name)
else:
tide_dict["timeseries_file"] = scenario_config["event"]["sfincs_forcing"]["waterlevel"]
elif event_type =="Historical_offshore":
tide_type = "model"
tide_dict = {'source': tide_type}

event_dict = {
'name': scenario_config["event"]["name"],
Expand Down
120 changes: 120 additions & 0 deletions DT_flood/utils/sfincs_utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
import pandas as pd

from cht.misc.deltares_ini import IniStruct
from cht.sfincs.sfincs import FlowBoundaryPoint
from cht.tide.tide_predict import predict

from earthkit.data import from_source
from earthkit.regrid import interpolate


def read_flow_boundary_points(bnd_fn):
flow_boundary_point = []

# if not os.path.exists(bnd_fn):
# return

# Read the bnd file
df = pd.read_csv(bnd_fn, index_col=False, header=None,
delim_whitespace=True, names=['x', 'y'])

# Loop through points
for ind in range(len(df.x.values)):
name = str(ind + 1).zfill(4)
point = FlowBoundaryPoint(df.x.values[ind],
df.y.values[ind],
name=name)
flow_boundary_point.append(point)
return flow_boundary_point


def read_astro_boundary_conditions(flow_boundary_point,bca_fn):

# Read SFINCS bca file
# if not os.path.exists(bca_fn):
# return

d = IniStruct(filename=bca_fn)
for ind, point in enumerate(flow_boundary_point):
point.astro = d.section[ind].data
return flow_boundary_point

def generate_bzs_from_bca(flow_boundary_point,
tref,
tstart,
tstop,
bzs_fn=None,
dt:int=600,
offset=0.0,
write_file=True):

if bzs_fn is None:
bzs_fn = "sfincs.bzs"

try:
times = pd.date_range(start=tstart,
end=tstop,
freq=f"{dt}S")
except:
print("Dates fall outside pandas date_range, year 2000 used instead")
tref = tref.replace(year=2000)
tstart = tstart.replace(year=2000)
tstop = tstop.replace(year=2000)
times = pd.date_range(start=tstart,
end=tstop,
freq=f"{dt}S")

# Make boundary conditions based on bca file
df = pd.DataFrame()

for i, point in enumerate(flow_boundary_point):
v = predict(point.astro, times) + offset
ts = pd.Series(v, index=times)
point.data = pd.Series(v, index=times)
df = pd.concat([df, point.data], axis=1)
tmsec = pd.to_timedelta(df.index.values - tref, unit="s")
df.index = tmsec.total_seconds()

if write_file:
# Build a new DataFrame
df.to_csv(bzs_fn,
index=True,
sep=" ",
header=False,
float_format="%0.3f")
return df

def process_dt_climate(filepath, tstart, tend, bounds, res=.1):
# bounds are list/array in order [minx, miny, maxx, maxy]

start = int(tstart.strftime("%Y%m%d"))
end = int(tend.strftime("%Y%m%d"))

print("Loading data")
data = from_source("file", filepath).sel(dataDate=slice(start,end))
print(f"Interpolating data to grid with resolution {res} deg")
data = interpolate(data, out_grid={"grid": [res,res]}, method='linear')
ds = data.to_xarray(xarray_open_dataset_kwargs={'chunks': {"time": 1}}).squeeze()

ds = ds.assign_coords(
{"longitude": ((ds.longitude+180)%360)-180}
)
ds.sortby("longitude")
ds.sortby("latitude")
ds = ds.rename(
{
"longitude": "x",
"latitude": "y",
"sp": "press_msl",
"u10": "wind10_u",
"v10": "wind10_v"
}
)
ds.raster.set_crs(4326)

ds = ds.sel(
x=slice(bounds[0], bounds[2]),
y=slice(bounds[1], bounds[3])
)

return ds
9 changes: 6 additions & 3 deletions DT_flood/workflows/cwl/fa_sfincs_workflow_docker.cwl
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ inputs:
scenario: string
sfincs_update_script: File
arrange_script: File
mode: string

outputs:
fa_database_out:
Expand All @@ -32,6 +33,7 @@ steps:
in:
fa_database: setup_sfincs/fa_database_out
scenario: scenario
mode: mode
out:
[sfincs_dir]
run:
Expand All @@ -47,11 +49,13 @@ steps:
type: Directory
scenario:
type: string
mode:
type: string
outputs:
sfincs_dir:
type: Directory
outputBinding:
glob: $(inputs.fa_database.basename+"/output/Scenarios/"+inputs.scenario+"/Flooding/simulations/overland")
glob: $(inputs.fa_database.basename+"/output/scenarios/"+inputs.scenario+"/Flooding/simulations/"+inputs.mode)
run_sfincs:
in:
sfincs_files:
Expand All @@ -64,8 +68,7 @@ steps:
fetch_sfincs_files:
in:
files: run_sfincs/sfincs_files_out
dir_name:
default: "overland"
dir_name: mode
out:
[dir]
run:
Expand Down
33 changes: 33 additions & 0 deletions DT_flood/workflows/cwl/postprocess_fiat.cwl
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
cwlVersion: v1.2
class: CommandLineTool

baseCommand: ["python"]

requirements:
InlineJavascriptRequirement: {}
InitialWorkDirRequirement:
listing:
- $(inputs.pyscript)
- $(inputs.fa_database)

inputs:
pyscript:
type: File
inputBinding:
position: 1
fa_database:
type: Directory
inputBinding:
position: 2
scenario:
type: string
inputBinding:
position: 3



outputs:
fa_database_out:
type: Directory
outputBinding:
glob: "$(inputs.fa_database.basename)"
13 changes: 13 additions & 0 deletions DT_flood/workflows/cwl/ra2ce_workflow_config.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
update_script:
class: File
path: "../pyscripts/update_ra2ce.py"
run_script:
class: File
path: "../pyscripts/run_ra2ce.py"
fa_database:
class: Directory
path: "/home/wotromp/InterTwin/FloodAdapt_database/Humber"
scenario: "Empty_Event_current_no_measures"
utils_script:
class: File
path: "../../utils/ra2ce_utils_docker.py"
7 changes: 7 additions & 0 deletions DT_flood/workflows/cwl/run_ra2ce_args.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
pyscript:
class: File
path: "../pyscripts/run_ra2ce.py"
fa_database:
class: Directory
path: "/home/wotromp/InterTwin/FloodAdapt_database/Humber"
scenario: "Empty_Event_current_no_measures"
7 changes: 2 additions & 5 deletions DT_flood/workflows/cwl/run_sfincs_docker.cwl
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ requirements:

hints:
DockerRequirement:
dockerPull: deltares/sfincs-cpu:sfincs-v2.0.5
dockerPull: deltares/sfincs-cpu:sfincs-v2.1.1-Dollerup-Release

inputs:
sfincs_files:
Expand All @@ -19,7 +19,6 @@ inputs:
- Directory
- File

# stdout: sfincs.log

outputs:
sfincs_files_out:
Expand All @@ -29,6 +28,4 @@ outputs:
- Directory
- File
outputBinding:
glob: "*"
# sfincs_log:
# type: stdout
glob: "*"
10 changes: 10 additions & 0 deletions DT_flood/workflows/cwl/update_ra2ce_args.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
pyscript:
class: File
path: "../pyscripts/update_ra2ce.py"
fa_database:
class: Directory
path: "/home/wotromp/InterTwin/FloodAdapt_database/Humber"
scenario: "Empty_Event_current_no_measures"
utils_script:
class: File
path: "../../utils/ra2ce_utils_docker.py"
3 changes: 1 addition & 2 deletions DT_flood/workflows/pyscripts/arrange_fa_folders.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,5 +6,4 @@
fa_database = Path(argv[2])
scenario = argv[3]

copytree(in_folder,(fa_database/ "output" / "Scenarios" / scenario / "Flooding" / "simulations" / in_folder.stem), dirs_exist_ok=True)
# in_folder.rename(fa_database/ "output" / "Scenarios" / scenario / "Flooding" / "simulations" / in_folder.stem)
copytree(in_folder,(fa_database/ "output" / "scenarios" / scenario / "Flooding" / "simulations" / in_folder.stem), dirs_exist_ok=True)
3 changes: 3 additions & 0 deletions DT_flood/workflows/pyscripts/init_fa_database.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from sys import argv
from os import makedirs
from DT_flood.utils.fa_scenario_utils import init_scenario, create_scenario
from flood_adapt.object_model.utils import write_finished_file
from shutil import copytree, rmtree

print("Make extra copies of input folders")
Expand All @@ -20,3 +21,5 @@

print(f"Creating output folder at {new_scenario.results_path}")
makedirs(new_scenario.results_path)

write_finished_file(new_scenario.results_path)
11 changes: 11 additions & 0 deletions DT_flood/workflows/pyscripts/postprocess_fiat.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
from pathlib import Path
from sys import argv

from flood_adapt.api import scenarios
from DT_flood.utils.fa_scenario_utils import init_scenario

database, scenario_config = init_scenario(Path(argv[1]), (argv[2]+"_toplevel.toml"))

scenario = scenarios.get_scenario(scenario_config['name'])

scenario.direct_impacts.postprocess_fiat()
Loading
Loading