From a8b8a79eee32a34f903f3a27dbb5e61d4efbbd43 Mon Sep 17 00:00:00 2001 From: yisangriB Date: Mon, 23 Sep 2024 11:24:25 -0700 Subject: [PATCH 1/3] sy - fixing dakota for R2D and EE-UQ --- modules/performUQ/dakota/dakotaProcedures.cpp | 39 ++++++++++--------- 1 file changed, 20 insertions(+), 19 deletions(-) diff --git a/modules/performUQ/dakota/dakotaProcedures.cpp b/modules/performUQ/dakota/dakotaProcedures.cpp index be7538025..07758ec59 100755 --- a/modules/performUQ/dakota/dakotaProcedures.cpp +++ b/modules/performUQ/dakota/dakotaProcedures.cpp @@ -89,25 +89,26 @@ writeRV(std::ostream &dakotaFile, struct randomVariables &theRandomVariables, st dakotaFile << "\n\n"; } - int numRealDiscrete = theRandomVariables.discreteDesignSetRVs.size(); - if (numRealDiscrete > 0) { - dakotaFile << " discrete_design_set \n real = " << numRealDiscrete; - dakotaFile << "\n elements_per_variable = "; - for (auto it = theRandomVariables.discreteDesignSetRVs.begin(); it != theRandomVariables.discreteDesignSetRVs.end(); it++) - dakotaFile << it->elements.size() << " "; //std::list::iterator it; - dakotaFile << "\n elements = "; - for (auto it = theRandomVariables.discreteDesignSetRVs.begin(); it != theRandomVariables.discreteDesignSetRVs.end(); it++) { - it->elements.sort(); - for (auto element = it->elements.begin(); element != it->elements.end(); element++) - dakotaFile << " \'" << *element << "\'"; - } - dakotaFile << "\n descriptors = "; - for (auto it = theRandomVariables.discreteDesignSetRVs.begin(); it != theRandomVariables.discreteDesignSetRVs.end(); it++) { - dakotaFile << "\'" << it->name << "\' "; - rvList.push_back(it->name); - } - dakotaFile << "\n\n"; - } + // sy - below is not needed. Both ground motion events and multiple FEM/EVT works without it + // int numRealDiscrete = theRandomVariables.discreteDesignSetRVs.size(); + // if (numRealDiscrete > 0) { + // dakotaFile << " discrete_design_set \n real = " << numRealDiscrete; + // dakotaFile << "\n elements_per_variable = "; + // for (auto it = theRandomVariables.discreteDesignSetRVs.begin(); it != theRandomVariables.discreteDesignSetRVs.end(); it++) + // dakotaFile << it->elements.size() << " "; //std::list::iterator it; + // dakotaFile << "\n elements = "; + // for (auto it = theRandomVariables.discreteDesignSetRVs.begin(); it != theRandomVariables.discreteDesignSetRVs.end(); it++) { + // it->elements.sort(); + // for (auto element = it->elements.begin(); element != it->elements.end(); element++) + // dakotaFile << " \'" << *element << "\'"; + // } + // dakotaFile << "\n descriptors = "; + // for (auto it = theRandomVariables.discreteDesignSetRVs.begin(); it != theRandomVariables.discreteDesignSetRVs.end(); it++) { + // dakotaFile << "\'" << it->name << "\' "; + // rvList.push_back(it->name); + // } + // dakotaFile << "\n\n"; + // } From 3b5c3edd6e2773d956b7a6078a84ce1354e38f37 Mon Sep 17 00:00:00 2001 From: Amin Date: Mon, 23 Sep 2024 16:38:00 -0700 Subject: [PATCH 2/3] adding shakermaker backend applications --- modules/tools/CMakeLists.txt | 1 + modules/tools/ShakerMaker/CMakeLists.txt | 5 + .../tools/ShakerMaker/DatabaseMetadata.json | 19 + .../tools/ShakerMaker/Examples/CMakeLists.txt | 4 + .../ShakerMaker/Examples/DRM_Sanramon.json | 40 ++ .../ShakerMaker/Examples/Single_Sanramon.json | 34 ++ .../tools/ShakerMaker/ShakerMakersubmitjob.py | 169 ++++++++ .../ShakerMaker/TapisFiles/CMakeLists.txt | 3 + .../TapisFiles/ShakerMakermodel.py | 350 ++++++++++++++++ modules/tools/ShakerMaker/faultplotter.py | 387 ++++++++++++++++++ 10 files changed, 1012 insertions(+) create mode 100644 modules/tools/ShakerMaker/CMakeLists.txt create mode 100644 modules/tools/ShakerMaker/DatabaseMetadata.json create mode 100644 modules/tools/ShakerMaker/Examples/CMakeLists.txt create mode 100644 modules/tools/ShakerMaker/Examples/DRM_Sanramon.json create mode 100644 modules/tools/ShakerMaker/Examples/Single_Sanramon.json create mode 100644 modules/tools/ShakerMaker/ShakerMakersubmitjob.py create mode 100644 modules/tools/ShakerMaker/TapisFiles/CMakeLists.txt create mode 100644 modules/tools/ShakerMaker/TapisFiles/ShakerMakermodel.py create mode 100644 modules/tools/ShakerMaker/faultplotter.py diff --git a/modules/tools/CMakeLists.txt b/modules/tools/CMakeLists.txt index 8bfe402f4..9d754e467 100644 --- a/modules/tools/CMakeLists.txt +++ b/modules/tools/CMakeLists.txt @@ -1,3 +1,4 @@ add_subdirectory(BRAILS) +add_subdirectory(ShakerMaker) diff --git a/modules/tools/ShakerMaker/CMakeLists.txt b/modules/tools/ShakerMaker/CMakeLists.txt new file mode 100644 index 000000000..7d99df06c --- /dev/null +++ b/modules/tools/ShakerMaker/CMakeLists.txt @@ -0,0 +1,5 @@ +add_subdirectory(Examples) +add_subdirectory(TapisFiles) +simcenter_add_python_script(SCRIPT faultplotter.py) +simcenter_add_python_script(SCRIPT ShakerMakersubmitjob.py) +simcenter_add_python_script(SCRIPT DatabaseMetadata.json) \ No newline at end of file diff --git a/modules/tools/ShakerMaker/DatabaseMetadata.json b/modules/tools/ShakerMaker/DatabaseMetadata.json new file mode 100644 index 000000000..9cfb274d3 --- /dev/null +++ b/modules/tools/ShakerMaker/DatabaseMetadata.json @@ -0,0 +1,19 @@ +{ + "Contries":[ + "Chile", + "United States" + ], + "Chile": { + "Faults":["Sanramon"], + "Sanramon": { + "Magnitudes": ["6.7"], + "6.7":{ + "Types": ["bl"], + "bl":{"Realizations": ["1","2","3","4","5","6","7","8","9","10"]} + } + } + }, + "United States": {} + + +} diff --git a/modules/tools/ShakerMaker/Examples/CMakeLists.txt b/modules/tools/ShakerMaker/Examples/CMakeLists.txt new file mode 100644 index 000000000..6a6f22dbe --- /dev/null +++ b/modules/tools/ShakerMaker/Examples/CMakeLists.txt @@ -0,0 +1,4 @@ +simcenter_add_python_script(SCRIPT DRM_Sanramon.json) +simcenter_add_python_script(SCRIPT Single_Sanramon.json) + + diff --git a/modules/tools/ShakerMaker/Examples/DRM_Sanramon.json b/modules/tools/ShakerMaker/Examples/DRM_Sanramon.json new file mode 100644 index 000000000..fbfba5157 --- /dev/null +++ b/modules/tools/ShakerMaker/Examples/DRM_Sanramon.json @@ -0,0 +1,40 @@ +{ + "dt": 0.005, + "nfft": 2048, + "dk": 0.2, + "tmin":0, + "tmax": 60, + "dh": 40.0, + "dv_rec": 5.0, + "dv_src": 200, + "layers":[ + {"Layer Name":"Layer1", "Thickness":0.20, "Vp":1.32, "Vs":0.75, "Density":2.4, "Qp": 1000, "Qs": 1000}, + {"Layer Name":"Layer2", "Thickness":0.80, "Vp":2.75, "Vs":1.57, "Density":2.5, "Qp": 1000, "Qs": 1000}, + {"Layer Name":"Layer3", "Thickness":14.5, "Vp":5.50, "Vs":3.14, "Density":2.5, "Qp": 1000, "Qs": 1000}, + {"Layer Name":"Half Space Layer","Thickness":0, "Vp":7.00, "Vs":4.00, "Density":2.67,"Qp": 1000, "Qs": 1000} + ], + "station_type": "DRM", + "station_info": { + "Latitude": -33.40657, + "Longitude": -70.50653, + "Width X": 105, + "Width Y": 105, + "Depth" : 30, + "Mesh Size X" : 2.5, + "Mesh Size Y" : 2.5, + "Mesh Size Z" : 2.5 + }, + "fault_type": "Database", + "fault_info": { + "Country": "Chile", + "Fault": "Sanramon", + "Magnitude": "6.7", + "Fault Type": "bl", + "Realization": "3" + }, + "System": "Frontera", + "Queue": "development", + "Number of Nodes": 40, + "Cores per Node": 50, + "Max Run Time": "02:00:00" +} \ No newline at end of file diff --git a/modules/tools/ShakerMaker/Examples/Single_Sanramon.json b/modules/tools/ShakerMaker/Examples/Single_Sanramon.json new file mode 100644 index 000000000..9710b9659 --- /dev/null +++ b/modules/tools/ShakerMaker/Examples/Single_Sanramon.json @@ -0,0 +1,34 @@ +{ + "dt": 0.1, + "nfft": 1024, + "dk": 0.1, + "tmin":0, + "tmax": 10, + "dh": 0.1, + "dv_rec": 0.1, + "dv_src": 0.1, + "layers":[ + {"Layer Name":"Layer1", "Thickness":10, "Vp":2000, "Vs":1000, "Density":2000, "Qp": 100, "Qs": 50}, + {"Layer Name":"Layer2", "Thickness":20, "Vp":2500, "Vs":1200, "Density":2200, "Qp": 150, "Qs": 75}, + {"Layer Name":"Layer3", "Thickness":30, "Vp":3000, "Vs":1500, "Density":2400, "Qp": 200, "Qs": 100}, + {"Layer Name":"Half Space Layer","Thickness":0, "Vp":3500, "Vs":1800, "Density":2600, "Qp": 250, "Qs": 125} + ], + "station_type": "Single", + "station_info": [ + {"Latitude": -33.5, "Longitude": -70.5, "Depth": 5}, + {"Latitude": -33.6, "Longitude": -70.6, "Depth": 1} + ], + "fault_type": "Database", + "fault_info": { + "Country": "Chile", + "Fault": "Sanramon", + "Magnitude": "6.7", + "Fault Type": "bl", + "Realization": 1 + }, + "System": "Frontera", + "Queue": "normal", + "Number of Nodes": 40, + "Cores per Node": 50, + "Max Run Time": "04:00:00" +} \ No newline at end of file diff --git a/modules/tools/ShakerMaker/ShakerMakersubmitjob.py b/modules/tools/ShakerMaker/ShakerMakersubmitjob.py new file mode 100644 index 000000000..264a75047 --- /dev/null +++ b/modules/tools/ShakerMaker/ShakerMakersubmitjob.py @@ -0,0 +1,169 @@ +# %% +from tapipy.tapis import Tapis +import os +import time +import json +import argparse + +#get the number of cores from argparser and runtime +parser = argparse.ArgumentParser(description='Submit a job to DesignSafe') +parser.add_argument('--metadata', type=str, help='metadatafile for submitting the job') +parser.add_argument('--tapisfolder', type=str, help='folder to upload the files to') +parser.add_argument('--username', type=str, help='username for DesignSafe') +parser.add_argument('--password', type=str, help='password for DesignSafe') + + +args = parser.parse_args() +# load the metadata file +with open(args.metadata + "/metadata.json") as json_file: + metadata = json.load(json_file) + +jobinfo = metadata['jobdata'] +maxminutes = jobinfo['maxruntime'] +corespernode = jobinfo['numCores'] +numnodes = jobinfo['numNodes'] +totalcores = str(corespernode*numnodes) +queue = jobinfo['queue'] + + +# chane hh:mm:ss to minutes +maxminutes = int(maxminutes.split(":")[0]) * 60 + int(maxminutes.split(":")[1]) + int(maxminutes.split(":")[2]) / 60 +maxminutes = int(maxminutes) + + +systemcodepath = args.tapisfolder +faultinfo = metadata['faultdata'] +faultfiles = faultinfo["Faultfilenames"] + +filenames = [] +for filename in faultfiles: + # // seperate the filename from the whole path + filename = os.path.basename(filename) + filenames.append(filename) +faultinfo["Faultfilenames"] = filenames + + +# Write the faualt info in the faultinfo folder +with open(args.metadata + "/faultInfo.json", "w") as outfile: + json.dump(faultinfo, outfile, indent=4) + +# write the metadata file +metadata["faultdata_path"] = ""; +# delete the faultdata key +del metadata["faultdata"] +del metadata["jobdata"] + + +with open(args.metadata + "/metadata.json", "w") as outfile: + json.dump(metadata, outfile, indent=4) + +# print("username: ", args.username) +# print("password: ", args.password) +# exit() + +print("Connecting to DesignSafe") +t = Tapis(base_url= "https://designsafe.tapis.io", + username=args.username, + password=args.password) + +# Call to Tokens API to get access token +t.get_tokens() + +# %% +# ============================================================================= +# uploding files +# ============================================================================= +print("Uploading files to DesignSafe") +# create shakermaker directory if it does not exist +codepath = f"{t.username}/Shakermaker" +t.files.mkdir(systemId="designsafe.storage.default", path=codepath) +for filename in os.listdir(systemcodepath): + filedata = open(f"{systemcodepath}/{filename}", "r").read() + t.files.insert(systemId="designsafe.storage.default", path=f"{codepath}/{filename}", file=filedata) +for filename in faultfiles: + filedata = open(filename, "r").read() + t.files.insert(systemId="designsafe.storage.default", path=f"{codepath}/{os.path.basename(filename)}", file=filedata) +# upload the source time function +t.files.insert(systemId="designsafe.storage.default", path=f"{codepath}/SourceTimeFunction.py", file=open(f"{args.metadata}fault/SourceTimeFunction.py", "r").read()) +t.files.insert(systemId="designsafe.storage.default", path=f"{codepath}/metadata.json", file=open(args.metadata + "metadata.json", "r").read()) +t.files.insert(systemId="designsafe.storage.default", path=f"{codepath}/faultInfo.json", file=open(args.metadata + "faultInfo.json", "r").read()) + +# ============================================================================= +# submit a tapi job +# ============================================================================= +print("Submitting a job to DesignSafe") +day = time.strftime("%Y_%m_%d") +time_ = time.strftime("%H_%M_%S") +# attach the date and time to the username +jobname = f"EEUQ_DesignSafe_{t.username}_{day}_{time_}" +archivepath = f"{t.username}/tapis-jobs-archive/{jobname}" +urls = [ + f"tapis://designsafe.storage.default/{t.username}/Shakermaker/ShakerMakermodel.py", + f"tapis://designsafe.storage.default/{t.username}/Shakermaker/metadata.json", + f"tapis://designsafe.storage.default/{t.username}/Shakermaker/faultInfo.json", + f"tapis://designsafe.storage.default/{t.username}/Shakermaker/SourceTimeFunction.py" +] +for filename in filenames: + urls.append(f"tapis://designsafe.storage.default/{t.username}/Shakermaker/{filename}") + +my_job = { + "name": jobname, + "appId": "Shakermaker-app-amnp95", + "appVersion": "1.0.0", + "jobType": "BATCH", + "execSystemId": "frontera", + "execSystemExecDir":"${JobWorkingDir}/jobs/", + "execSystemInputDir":"${JobWorkingDir}/inputs/", + "execSystemOutputDir":"${JobWorkingDir}/inputs/results/", + "archiveSystemId": "designsafe.storage.default", + "archiveSystemDir": archivepath, + "nodeCount": numnodes, + "coresPerNode": corespernode, + "maxMinutes": maxminutes, + "execSystemLogicalQueue": queue, + "archiveOnAppError": False, + "fileInputArrays":[{"sourceUrls":urls,"targetDir":"*"}], + "parameterSet": { + "schedulerOptions": [{ "arg": "-A DesignSafe-SimCenter" }], + "envVariables": [ + {"key":"inputFile","value":"ShakerMakermodel.py"}, + {"key":"numProcessors","value":totalcores} + ] + } +} +job_info = t.jobs.submitJob(**my_job) +# ============================================================================= +# get the job status +# ============================================================================= +jobs = t.jobs.getJobList(limit=1, orderBy='lastUpdated(desc),name(asc)', computeTotal=True) +job_info = jobs[0] +uuid = job_info.uuid +# ge the status every 10 seconds +job_info = t.jobs.getJob(jobUuid=uuid) +previous_status = job_info.status +while job_info.status not in['FINISHED','FAILED', 'CANCELLED','CANCELLED', 'QUEUED', 'RUNNING']: + job_info = t.jobs.getJob(jobUuid=uuid) + if job_info.status != previous_status: + print(f"Job status: {job_info.status}") + previous_status = job_info.status + time.sleep(10.0) + +if job_info.status == 'FINISHED': + print("Job finished successfully") +if job_info.status == 'FAILED': + print("Job failed") + print("please check the job logs and contact the developers") +if job_info.status == 'CANCELLED': + print("Job cancelled") +if job_info.status == 'QUEUED': + print("Job is submitted and is in the queue") + print("This can take several days according to the queue") + print("please wait for the job to finish") + print("you can check the job status through the designsafe portal") +if job_info.status == 'RUNNING': + print("Job is running") + print("This can take several hours") + print("please wait for the job to finish") + print("you can check the job status through the designsafe portal") +# %% + diff --git a/modules/tools/ShakerMaker/TapisFiles/CMakeLists.txt b/modules/tools/ShakerMaker/TapisFiles/CMakeLists.txt new file mode 100644 index 000000000..106dcc4c1 --- /dev/null +++ b/modules/tools/ShakerMaker/TapisFiles/CMakeLists.txt @@ -0,0 +1,3 @@ +simcenter_add_python_script(SCRIPT ShakerMakermodel.py) + + diff --git a/modules/tools/ShakerMaker/TapisFiles/ShakerMakermodel.py b/modules/tools/ShakerMaker/TapisFiles/ShakerMakermodel.py new file mode 100644 index 000000000..3f32f2c5e --- /dev/null +++ b/modules/tools/ShakerMaker/TapisFiles/ShakerMakermodel.py @@ -0,0 +1,350 @@ +# %% +############################################################# +# This is shaker Maker model created for runing simulations # +# for the EE-UQ app. # +# This code created by Amin Pakzad and Pedro Arduino based # +# on the initial code by Jose Abell and Jorge Crempien # +# date = September 2024 # +# ########################################################### + +import os +import json +from geopy.distance import geodesic +from shakermaker import shakermaker +from shakermaker.crustmodel import CrustModel +from shakermaker.pointsource import PointSource +from shakermaker.faultsource import FaultSource +from shakermaker.slw_extensions import DRMHDF5StationListWriter +from shakermaker.sl_extensions import DRMBox +from shakermaker.station import Station +from shakermaker.stationlist import StationList +from mpi4py import MPI + +def calculate_distances_with_direction(lat1, lon1, lat2, lon2): + # Original points + point1 = (lat1, lon1) + + # Calculate north-south distance (latitude changes, longitude constant) + north_point = (lat2, lon1) + north_south_distance = geodesic(point1, north_point).kilometers + north_south_direction = "north" if lat2 > lat1 else "south" + + # Calculate east-west distance (longitude changes, latitude constant) + west_point = (lat1, lon2) + west_east_distance = geodesic(point1, west_point).kilometers + west_east_direction = "east" if lon2 > lon1 else "west" + + # south is negative + north_south_distance = -north_south_distance if north_south_direction == "south" else north_south_distance + # west is negative + west_east_distance = -west_east_distance if west_east_direction == "west" else west_east_distance + + return north_south_distance, west_east_distance + +# ====================================================================================== +# Code initialization +# ====================================================================================== +comm = MPI.COMM_WORLD +rank = comm.Get_rank() +nprocs = comm.Get_size() + +# reading the metdadatafile +metadata_file = "metadata.json" +with open(metadata_file) as f: + metadata = json.load(f) + +# make results directory if it doesn't exist +results_dir = "results" +if rank == 0: + if not os.path.exists("results"): + os.makedirs("results") +comm.barrier() +# Define the source parameters +# For estimating wave arrival windows, +# assume the following maximum and +# minimum propagation velocities +Vs_min = 3.14 # Minimum shear wave velocity +Vp_max = 8.00 # Maximum primary wave velocity +MINSLIP = 0.0 # Minimum slip for the fault + +# ====================================================================================== +# Shakermaker configuration +# ====================================================================================== + +_m = 0.001 # meters (ShakerMaker uses kilometers) +dt = metadata["analysisdata"]["dt"] # Time step +nfft = metadata["analysisdata"]["nfft"] # Number of samples in the record +dk = metadata["analysisdata"]["dk"] # (Wavelength space discretization) adjust using theory +tb = 0 # How much to "advance" the simulation window... no advance +tmin = metadata["analysisdata"]["tmin"] # Time when the final results start +tmax = metadata["analysisdata"]["tmax"] # Time when the final results end +delta_h = metadata["analysisdata"]["dh"]*_m # Horizontal distance increment +delta_v_rec = metadata["analysisdata"]["delta_v_rec"]*_m # Vertical distance increment for receivers +delta_v_src = metadata["analysisdata"]["delta_v_src"]*_m # Vertical distance increment for sources + +# nfft = 2048 # Number of samples in the record +# dk = 0.2 # (Wavelength space discretization) adjust using theory +# tb = 0 # How much to "advance" the simulation window... no advance +# tmin = 0. # Time when the final results start +# tmax = 45. # Time when the final results end +# delta_h = 40*_m # Horizontal distance increment +# delta_v_rec = 5.0*_m # Vertical distance increment for receivers +# delta_v_src = 200*_m # Vertical distance increment for sources + + +# options for the simulation +npairs_max = 200000 +allow_out_of_bounds = False + + +# ====================================================================================== +# Loading the crust model +# ====================================================================================== +layers = metadata["crustdata"] +num_layers = len(layers) +CRUST = CrustModel(num_layers) +for layer in layers: + name = layer['name'] + vp = layer['vp'] + vs = layer['vs'] + rho = layer['rho'] + thickness = layer['thick'] + Qa = layer['Qa'] + Qb = layer['Qb'] + CRUST.add_layer(thickness, vp, vs, rho, Qa, Qb) + +# ====================================================================================== +# Loading the fault +# ====================================================================================== +# faultpath = metadata["faultdata_path"] + +# make Fault Directory +fault_dir = "fault" + +# if rank == 0: +# # remove the directory if it exists +# if os.path.exists(fault_dir): +# if os.name == 'nt': +# os.system(f'rmdir /S /Q "{fault_dir}"') +# else: +# os.system(f'rm -rf "{fault_dir}"') + +# if not os.path.exists(fault_dir): +# os.makedirs(fault_dir) + +comm.barrier() + +# copy the fault files to the fault directory +# src = faultpath +# dst = fault_dir +# # windows, linux or mac +# if os.name == 'nt': # Windows +# cmd = 'copy' +# if os.name == 'posix': # Unix/Linux +# cmd = 'cp' +# if os.name == 'mac': # Mac +# cmd = 'cp' + +# if rank == 0: +# # copy the SourceTimeFunction.py file to the fault directory +# os.system(f'{cmd} "{src}/SourceTimeFunction.py" "{dst}"') + +# # copy faultInfo.json to the fault directory +# os.system(f'{cmd} "{src}/faultInfo.json" "{dst}"') + +# wait for all processes to finish +comm.barrier() + +# load the faultInfo.json file into faultdata +with open(f"faultInfo.json") as f: + faultdata = json.load(f) + + +faultLat = faultdata["latitude"] +faultLon = faultdata["longitude"] +filenames = faultdata["Faultfilenames"] +M0 = faultdata["M0"] +faultName = faultdata["name"] +xmean = faultdata["xmean"] +ymean = faultdata["ymean"] + +# if rank == 0: +# for filename in filenames: +# os.system(f'{cmd} "{src}/{filename}" "{dst}"') + +# # check that SourceTimeFunction is in the fault file +# files = os.listdir(fault_dir) +# if "SourceTimeFunction.py" not in files: +# raise ValueError("SourceTimeFunction.py not found in the fault file") + + +# wait for all processes to finish +comm.barrier() + +# import SourceTimeFunction from fault file +from SourceTimeFunction import source_time_function + + +for filename in filenames: + sources = [] + + # read the json fault file + f = open(f"{filename}") + faultsources = json.load(f) + + for source in faultsources: + xsource = source["x"] - xmean + ysource = source["y"] - ymean + zsource = source["z"] + strike = source["strike"] + dip = source["dip"] + rake = source["rake"] + t0 = source["t0"] + stf = source["stf"] + stf_type = stf["type"] + params = stf["parameters"] + numparams = stf["numParameters"] + stf_func = source_time_function(*params) + sources.append(PointSource([xsource, ysource, zsource], [strike,dip,rake], tt=t0, stf=stf_func)) + f.close() +FAULT = FaultSource(sources, + metadata = { + "name": f"{faultName} M0={M0}" + }) +# ====================================================================================== +# Loading the stations +# ====================================================================================== +stationsType = metadata["stationdata"]["stationType"] +# single station +if stationsType.lower() in ["singlestation" ,"single"]: + stationslist = [] + for station in metadata["stationdata"]["Singlestations"]: + stationLat = station["latitude"] + stationLon = station["longitude"] + stationDepth = station["depth"] + meta = station["metadata"] + xstation, ystation = calculate_distances_with_direction(faultLat, faultLon, stationLat, stationLon) + stationslist.append(Station([xstation, ystation, stationDepth], metadata=meta)) + + meta = {"name": metadata["stationdata"]["name"]} + STATIONS = StationList(stationslist,metadata=meta) + +elif stationsType.lower() in ["drmbox", "drm", "drm box", "drm_box","drm station"]: + DRMdata = metadata["stationdata"]["DRMbox"] + name = DRMdata["name"] + latitude = DRMdata["latitude"] + longitude = DRMdata["longitude"] + depth = DRMdata["Depth"] + Lx = DRMdata["Width X"] + Ly = DRMdata["Width Y"] + Lz = DRMdata["Depth"] + dx = DRMdata["Mesh Size X"] + dy = DRMdata["Mesh Size Y"] + dz = DRMdata["Mesh Size Z"] + nx, ny, nz = int(Lx/dx), int(Ly/dy), int(Lz/dz) + dx = dx * _m; dy = dy * _m; dz = dz * _m + Lx = Lx * _m; Ly = Ly * _m; Lz = Lz * _m + xstation, ystation = calculate_distances_with_direction(faultLat, faultLon, latitude, longitude) + STATIONS = DRMBox([xstation, ystation, 0], [nx, ny, nz], [dx, dy, dz], metadata={"name": name}) +else: + raise ValueError(f"Unknown station type: {stationsType}") + + +# ====================================================================================== +# Create the shakermaker model +# ====================================================================================== +model = shakermaker.ShakerMaker(CRUST, FAULT, STATIONS) + +if stationsType.lower() in ["drmbox", "drm", "drm box", "drm_box","drm station"]: + # creating the pairs + model.gen_greens_function_database_pairs( + dt=dt, # Output time-step + nfft=nfft, # N timesteps + dk=dk, # wavenumber discretization + tb=tb, # Initial zero-padding + tmin=tmin, + tmax=tmax, + smth=1, + sigma=2, + verbose=True, + debugMPI=False, + showProgress=True, + store_here="results/greensfunctions_database", + delta_h=delta_h, + delta_v_rec=delta_v_rec, + delta_v_src=delta_v_src, + npairs_max=npairs_max, + using_vectorize_manner=True, + cfactor=0.5 + ) + + # wait for all processes to finish + comm.barrier() + model.run_create_greens_function_database( + h5_database_name="results/greensfunctions_database", + dt=dt, # Output time-step + nfft=nfft, # N timesteps + dk=dk, # wavenumber discretization + tb=tb, # Initial zero-padding + tmin=tmin, + tmax=tmax, + smth=1, + sigma=2, + verbose=False, + debugMPI=False, + showProgress=True, + ) + + # wait for all processes to finish + comm.barrier() + writer = DRMHDF5StationListWriter("results/DRMLoad.h5drm") + model.run_faster( + h5_database_name="results/greensfunctions_database", + dt=dt, # Output time-step + nfft=nfft, # N timesteps + dk=dk, # wavenumber discretization + tb=tb, # Initial zero-padding + tmin=tmin, + tmax=tmax, + smth=1, + sigma=2, + verbose=False, + debugMPI=False, + showProgress=True, + writer=writer, + delta_h=delta_h, + delta_v_rec=delta_v_rec, + delta_v_src=delta_v_src, + allow_out_of_bounds=allow_out_of_bounds, + ) + + +# single station +if stationsType.lower() in ["singlestation" ,"single"]: + model.run( + dt=dt, # Output time-step + nfft=nfft, # N timesteps + dk=dk, # wavenumber discretization + tb=tb, # Initial zero-padding + tmin=tmin, + tmax=tmax, + smth=1, + sigma=2, + verbose=False, + debugMPI=False, + showProgress=True, + ) + if rank == 0: + for i, s in enumerate(stationslist): + output_filename = f"results/station{i+1}.npz" + s.save(output_filename) + + + + + + + + + + diff --git a/modules/tools/ShakerMaker/faultplotter.py b/modules/tools/ShakerMaker/faultplotter.py new file mode 100644 index 000000000..05b81444e --- /dev/null +++ b/modules/tools/ShakerMaker/faultplotter.py @@ -0,0 +1,387 @@ +# %% +import numpy as np +import matplotlib.pyplot as plt +import json +import os +from scipy.integrate import trapezoid +import pyvista as pv +from geopy.distance import geodesic +import argparse +import importlib.util +from pyproj import Transformer +import geopandas as gpd +from shapely.geometry import Point +import pandas as pd +import plotly.express as px + +def load_function_from_file(filepath, function_name): + # Load the module from the given file path + spec = importlib.util.spec_from_file_location("module.name", filepath) + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + + # Get the function from the module + return getattr(module, function_name) + + +def calculate_distances_with_direction(lat1, lon1, lat2, lon2): + # Original points + point1 = (lat1, lon1) + point2 = (lat2, lon2) + + # Calculate north-south distance (latitude changes, longitude constant) + north_point = (lat2, lon1) + north_south_distance = geodesic(point1, north_point).kilometers + north_south_direction = "north" if lat2 > lat1 else "south" + + # Calculate east-west distance (longitude changes, latitude constant) + west_point = (lat1, lon2) + west_east_distance = geodesic(point1, west_point).kilometers + west_east_direction = "east" if lon2 > lon1 else "west" + + # south is negative + north_south_distance = -north_south_distance if north_south_direction == "south" else north_south_distance + # west is negative + west_east_distance = -west_east_distance if west_east_direction == "west" else west_east_distance + + return north_south_distance, west_east_distance + + +def PlotSources(info): + + tmpLocation = info["tmpLocation"] + faultinfopath = tmpLocation + "/fault/faultInfo.json" + faultinfo = json.load(open(faultinfopath,"r")) + + # // load the source time fuction in the faultinfo path + sourcetimeinfopath = tmpLocation + "/fault/SourceTimeFunction.py" + # // load the source time function + source_time_function = load_function_from_file(sourcetimeinfopath, "source_time_function") + + + numFaults = len(faultinfo["Faultfilenames"]) + faultFiles = faultinfo["Faultfilenames"] + + xfault = faultinfo["xmean"] + yfault = faultinfo["ymean"] + + # filteringData = info["filteringData"] + # dataType = info["dataType"] + # filteringRange = info["filteringRange"] + + if numFaults < 1: + print("No faults to plot") + return + + if numFaults != len(faultFiles): + print("Number of files does not match number of faults") + return + + meshlist = [] + + for i in range(numFaults): + faultFile = tmpLocation + "/fault/" + faultFiles[i] + sources = json.load(open(faultFile,"r")) + x = np.zeros(len(sources)) + y = np.zeros(len(sources)) + z = np.zeros(len(sources)) + c1 = np.zeros(len(sources)) + c2 = np.zeros(len(sources)) + c3 = np.zeros(len(sources)) + c4 = np.zeros(len(sources)) + c5 = np.zeros(len(sources)) + for j,source in enumerate(sources): + + sourcetype = source["stf"]["type"] + x[j] = source["x"] + y[j] = source["y"] + z[j] = source["z"] + c1[j] = source["strike"] + c2[j] = source["dip"] + c3[j] = source["rake"] + c4[j] = source["t0"] + parameters = source["stf"]["parameters"] + c5[j] = source_time_function(*parameters, get_slip=True) + + + + + mesh = pv.PolyData(np.c_[x,y,z]) + mesh["strike"] = c1 + mesh["dip"] = c2 + mesh["rake"] = c3 + mesh["t0"] = c4 + mesh["slip"] = c5 + # strikes_rad = np.radians(c1) + # dips_rad = np.radians(c2) + # rakes_rad = np.radians(c3) + # # compute the vectors + # s = np.array([-np.sin(strikes_rad), np.cos(strikes_rad), np.zeros_like(strikes_rad)]) + # d = np.array([ + # np.cos(strikes_rad) * np.sin(dips_rad), + # np.sin(strikes_rad) * np.sin(dips_rad), + # -np.cos(dips_rad) + # ]) + # # Compute the rake vectors + # r = np.cos(rakes_rad) * s + np.sin(rakes_rad) * d + # # Normalize the rake vectors + # r_magnitudes = np.linalg.norm(r, axis=0) + # r_normalized = r / r_magnitudes + # mesh["rake_vector"] = r_normalized.T + + meshlist.append(mesh) + + + + + + Mesh = meshlist[0] + for i in range(1,numFaults): + Mesh = Mesh.merge(meshlist[i]) + + xy = Mesh.points[:,:2] + Mesh.points = Mesh.points- np.array([xfault,yfault,0]) + + + # finding hypocenter + hypocenterid = np.argmin(c4) + hypocenter = Mesh.points[hypocenterid].copy() + # off screen rendering + # remote_rendering + pl = pv.Plotter(shape=(3,2),groups=[((0, np.s_[:]))]) + pl.subplot(0,0) + # without scalars + Mesh.set_active_scalars(None) + pl.add_mesh(Mesh,color="blue") + + + # plotting the hypocenter + zhead = 0 + ztail = hypocenter[2] + # create an arrow + arrow = pv.Arrow(start=(hypocenter[0],hypocenter[1],ztail), direction=[0,0,1],scale = (zhead-ztail)) + pl.add_mesh(arrow, color="black",label="Hypocenter") + # pl.add_mesh(pv.PolyData(hypocenter),color="black",point_size=20,render_points_as_spheres=True,label="Hypocenter") + + # pl.subplot(1) + # # strike + # pl.add_mesh(Mesh.copy(), scalars="strike", cmap="viridis",label=f"Fault {info['faultname']}") + + pl.subplot(1,0) + # dip + pl.add_mesh(Mesh.copy(), scalars="dip", cmap="viridis") + + pl.subplot(1,1) + # rake + pl.add_mesh(Mesh.copy(), scalars="rake", cmap="viridis") + # Mesh2 = Mesh.copy() + # arrows = Mesh2.glyph(orient="rake_vector",scale="slip") + # arrows = Mesh2.glyph(orient="rake_vector",scale="slip") + + # pl.add_mesh(arrows, color="black") + # computinf the vectors of the rake using slip and strike + + pl.subplot(2,0) + # t0 + Mesh.set_active_scalars("t0") + pl.add_mesh(Mesh.copy(), scalars="t0", cmap="viridis") + + pl.subplot(2,1) + # slip + Mesh.set_active_scalars("slip") + pl.add_mesh(Mesh.copy(), scalars="slip", cmap="viridis") + + + # get the bounds + xmin, xmax, ymin, ymax, zmin, zmax = Mesh.bounds + + + + if info["plottingStation"].lower() in ["yes","true"]: + stationCoordinates = info["stationCoordinates"] + coords = [] + xy2 = [] + for coord in stationCoordinates: + north, east = calculate_distances_with_direction(faultinfo["latitude"],faultinfo["longitude"],coord[0],coord[1]) + coords.append([north,east,coord[2]]) + xy2.append([north + xfault ,east + yfault]) + + + stations = pv.PolyData(np.array(coords)) + + xmin0, xmax0, ymin0, ymax0, zmin0, zmax0 = stations.bounds + # update extreme values + xmin = min(xmin,xmin0) + xmax = max(xmax,xmax0) + ymin = min(ymin,ymin0) + ymax = max(ymax,ymax0) + zmin = min(zmin,zmin0) + zmax = max(zmax,zmax0) + + colors = plt.rcParams['axes.prop_cycle'].by_key()['color'] + + pl.subplot(0,0) + # if info["plottingStation"].lower() in ["yes","true"]: + # pl.add_mesh(stations, color="red", point_size=10, render_points_as_spheres=True, label="Stations") + + + if info["plotlayers"].lower() in ["yes","true"]: + thickness = info["thickness"] + # insert zero at the begining + thickness.insert(0,0) + # cummulative thickness + cummulative = np.cumsum(thickness) + if zmax < cummulative[-1]: + zamx = cummulative[-1] + cummulative[-1]*0.8 + cummulative[-1] = zmax+ cummulative[-1]*0.8 + for j in range(len(thickness)-1): + pl.add_mesh(pv.Cube(bounds=[xmin,xmax,ymin,ymax,cummulative[j],cummulative[j+1]]),color=colors[j],opacity=0.2,label=f"Layer {j+1}") + + + + + if not os.path.exists(tmpLocation): + os.makedirs(tmpLocation) + pl.link_views() + pl.show_grid(xtitle='X (North)',ytitle='Y (East)', ztitle='Z (Depth)') + pl.add_axes() + pl.camera.up = (0,0,-1) + pl.camera.elevation = 60 + pl.camera.azimuth = -90 + pl.reset_camera() + pl.camera.view_frustum(1.0) + pl.add_legend() + pl.export_html(f"{tmpLocation}/faults.html") + + transformer = Transformer.from_crs(faultinfo["epsg"], "epsg:4326") + xy = xy *1000 + # add (xfault,yfault) to the first row of xy + xy = np.vstack((xy,[xfault,yfault])) + x2,y2 = transformer.transform(xy[:,1], xy[:,0]) + geometry1 = [Point(xy) for xy in zip(x2,y2)] + gdf1 = gpd.GeoDataFrame(geometry=geometry1) + gdf1["type"] = "Fault" + gdf1.loc[gdf1.index[-1], "type"] = "Fault Center" + + if info["plottingStation"].lower() in ["yes","true"]: + xy2 = np.array(xy2) * 1000 + x2,y2 = transformer.transform(xy2[:,1], xy2[:,0]) + geometry2 = [Point(xy) for xy in zip(x2,y2)] + gdf2 = gpd.GeoDataFrame(geometry=geometry2) + gdf2["type"] = "Station" + # make the first row of the type column of gdf2 to be "Fault Center" + + + gdf = gpd.GeoDataFrame(pd.concat([gdf1,gdf2],ignore_index=True)) + # plot the data plotly + else: + gdf = gdf1 + + # use different colors for fault and stations + color_map = { + "Fault": "blue", + "Station": "red", + "Fault Center": "green" + } + gdf['size'] = gdf['type'].apply(lambda x: 15 if x != 'Fault' else 7) + # Plot with custom colors + fig = px.scatter_mapbox( + gdf, + lat=gdf.geometry.x, + lon=gdf.geometry.y, + color=gdf["type"], + size=gdf["size"], + color_discrete_map=color_map # Apply the custom color map + ) + min_lat, max_lat = gdf.geometry.x.min(), gdf.geometry.x.max() + min_lon, max_lon = gdf.geometry.y.min(), gdf.geometry.y.max() + # Update layout and display map + fig.update_layout( + mapbox_style="open-street-map", + mapbox_zoom=10, # Initial zoom level (adjustable) + # mapbox_center={"lat": (min_lat + max_lat) / 2, "lon": (min_lon + max_lon) / 2}, # Center the map + # mapbox_bounds={"west": min_lon, "east": max_lon, "south": min_lat, "north": max_lat} # Set bounds + ) + # export the as html + fig.write_html(f"{tmpLocation}/faults_map.html") + + + + + +if __name__ == "__main__": + info = { + "filteringData": "No", + "filteringRange": [25, 45], + "numsataions": 1, + "stationCoordinates": [[-33.482426, -70.505214,0]], + "plottingStation": "No", + "plotlayers": "No", + "thickness": [0.2,0.8,14.5,0], + "tmpLocation": "tmp", + } + + + parser = argparse.ArgumentParser() + parser.add_argument("--tmplocation", help="output location for DRM",required=True) + parser.add_argument("--thickness", help="Thickness of layers",required=False) + parser.add_argument("--plotlayers", help="Plot layers",required=False) + parser.add_argument("--plotstations", help="Plot stations",required=False) + parser.add_argument("--numsataions", help="Number of stations",required=False) + parser.add_argument("--stationcoordinates", help="Station coordinates",required=False) + + # print input arguments + + + args = parser.parse_args() + + if args.tmplocation: + info["tmpLocation"] = args.tmplocation + + if args.thickness: + info["thickness"] = [float(i) for i in args.thickness.split(";")[:-1]] + + if args.plotlayers: + info["plotlayers"] = args.plotlayers + + if args.plotstations: + info["plottingStation"] = args.plotstations + + if args.numsataions: + info["numstations"] = int(args.numsataions) + + if info["plottingStation"].lower() in ["yes","true"] and args.stationcoordinates: + # delete the first three characters + stringcoords = args.stationcoordinates[:] + info["stationCoordinates"] = [[float(i) for i in j.split(",")] for j in stringcoords.split(";")[:-1]] + + + if info["plottingStation"].lower() in ["yes","true"] and not args.stationcoordinates: + print("Station coordinates are required") + exit() + + if info["plottingStation"].lower() in ["yes","true"] and len(info["stationCoordinates"]) != info["numstations"]: + print("Number of stations does not match number of coordinates") + exit() + + if info["plotlayers"].lower() == "yes" and not args.thickness: + print("Thickness of layers is required") + exit() + + if info["plotlayers"].lower() == "yes" and len(info["thickness"]) < 1: + print("At least one layer is required") + exit() + + + + PlotSources(info) + + + + + + + + + + From 2c6bf4173c8a0b714d2d3c1f57115a23ed48a3c6 Mon Sep 17 00:00:00 2001 From: yisangriB Date: Thu, 26 Sep 2024 12:07:43 -0700 Subject: [PATCH 3/3] sy - fixes for continuous integration and printing errs in case it wasn't fixed. --- .../OpenSeesPy/createOpenSeesPyDriver.cpp | 68 +++++++++++++++++-- 1 file changed, 61 insertions(+), 7 deletions(-) diff --git a/modules/performFEM/OpenSeesPy/createOpenSeesPyDriver.cpp b/modules/performFEM/OpenSeesPy/createOpenSeesPyDriver.cpp index fb492e976..f32fa82a8 100644 --- a/modules/performFEM/OpenSeesPy/createOpenSeesPyDriver.cpp +++ b/modules/performFEM/OpenSeesPy/createOpenSeesPyDriver.cpp @@ -10,6 +10,13 @@ #include #include #include +#include // for freopen +#include // for exit +#include +#include +#include + + int getEDP(json_t *edp, std::vector &edpList); int getRV(json_t *edp, std::vector &rvList); @@ -33,7 +40,8 @@ std::string appendModelIndexToStem(int modelIndex, std::string filename) { return filename; } -int main(int argc, const char **argv) { +//int main(int argc, const char **argv) { +int createDriver(int argc, const char **argv) { if (argc < 5) { std::cerr << "createOpenSeesPyDriver:: expecting 4 inputs\n"; @@ -197,7 +205,21 @@ int main(int argc, const char **argv) { if (modelIndex > 0) { parametersScriptTemplate = appendModelIndexToStem(modelIndex, parametersScriptTemplate); } - std::filesystem::copy_file(parametersScript, parametersScriptTemplate); + try { + std::filesystem::copy_file(parametersScript, parametersScriptTemplate); + std::cout << "File copied successfully from " + << parametersScript << " to " + << parametersScriptTemplate << std::endl; + } catch (const std::filesystem::filesystem_error& e) { + std::cerr << "Error copying file: " << e.what() << std::endl; + std::cerr << "Source: " << e.path1() << ", Destination: " << e.path2() << std::endl; + exit(-1); + } catch (const std::exception& e) { + std::cerr << "An unexpected error occurred: " << e.what() << std::endl; + exit(-1); + } + + } else { for(std::vector::iterator itRV = rvList.begin(); itRV != rvList.end(); ++itRV) { std::string nm = *itRV; @@ -218,14 +240,14 @@ int main(int argc, const char **argv) { } std::string mainScript = json_string_value(femScript); std::ifstream modelFile(mainScript); - while (!modelFile.eof()) { - std::string line; - std::getline(modelFile, line); - templateFile << line << std::endl; + std::string line; + while (std::getline(modelFile, line)) { + std::cout << line << std::endl; // Print line to console + templateFile << line << std::endl; // Write line to template file } templateFile.close(); + if (strcmp(parametersScript.c_str(),"") == 0) { - if (strcmp(parametersScript.c_str(),"") == 0) { // workflowDriverFile << moveCommand << mainScript << " tmpSimCenter.script \n"; workflowDriverFile << dpreproCommand << " params.in " << mainScriptTemplate << " " << mainScript << "\n"; } else { @@ -268,7 +290,39 @@ int main(int argc, const char **argv) { // // done // + std::cerr << "The run was successful" << std::endl; exit(0); } +void signalHandler(int signal) { + std::cerr << "Caught signal: " << signal << std::endl; + exit(signal); +} + +void setupSignalHandlers() { + std::signal(SIGSEGV, signalHandler); +} + +void redirectCerrToFile() { + if (freopen("FEMpreprocessor.err", "w", stderr) == nullptr) { + std::cerr << "Failed to redirect stderr to error.log" << std::endl; + exit(EXIT_FAILURE); + } +} + + +int main(int argc, const char **argv) { + // to redirect the file + redirectCerrToFile(); + setupSignalHandlers(); + + try { + createDriver(argc, argv); // Pass arguments to the main logic function + } catch (const std::exception& e) { + std::cerr << "Caught exception in main: " << e.what() << std::endl; + } + + return 0; + +} \ No newline at end of file