From 2565a850a3eae8e31c2c76b3c47bdde932836c39 Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Tue, 1 Oct 2024 14:20:31 -0700 Subject: [PATCH 01/44] Add configuration strings to host config file through spack, begin creation of test_list.yaml, add py-pyyaml as a new TPL to be able to read in the performance test list --- scripts/CMakeLists.txt | 12 +--- scripts/performance/CMakeLists.txt | 12 ++++ scripts/performance/compare_times.py | 17 +++++ scripts/performance/performance.py.in | 62 ++++++++++++++++--- scripts/performance/test_list.yaml | 23 +++++++ scripts/spack/packages/spheral/package.py | 26 +++++++- src/SimulationControl/SpheralOptionParser.py | 18 ++++-- .../Hydro/Noh/Noh-cylindrical-2d.py | 5 ++ tests/functional/Hydro/Noh/Noh-planar-1d.py | 12 ++-- tests/unit/Utilities/testTimers.py.in | 2 +- 10 files changed, 161 insertions(+), 28 deletions(-) create mode 100644 scripts/performance/CMakeLists.txt create mode 100644 scripts/performance/compare_times.py create mode 100644 scripts/performance/test_list.yaml diff --git a/scripts/CMakeLists.txt b/scripts/CMakeLists.txt index aac9e6fd1..aea3d522d 100644 --- a/scripts/CMakeLists.txt +++ b/scripts/CMakeLists.txt @@ -56,11 +56,6 @@ if (NOT ENABLE_CXXONLY) "${CMAKE_CURRENT_BINARY_DIR}/lcatstest.sh" ) - configure_file( - "${CMAKE_CURRENT_SOURCE_DIR}/performance/performance.py.in" - "${CMAKE_CURRENT_BINARY_DIR}/performance/performance.py" - ) - install(FILES "${CMAKE_CURRENT_BINARY_DIR}/spheral-setup-venv.sh" "${CMAKE_CURRENT_BINARY_DIR}/spheral-env.sh" @@ -70,12 +65,9 @@ if (NOT ENABLE_CXXONLY) DESTINATION "${CMAKE_INSTALL_PREFIX}/scripts" ) - install(FILES - "${CMAKE_CURRENT_BINARY_DIR}/performance/performance.py" - DESTINATION "${CMAKE_INSTALL_PREFIX}/tests" - ) - install(CODE "execute_process( \ COMMAND bash ${CMAKE_CURRENT_BINARY_DIR}/spheral-setup-venv.sh \ )") + + include_directories(performance) endif() diff --git a/scripts/performance/CMakeLists.txt b/scripts/performance/CMakeLists.txt new file mode 100644 index 000000000..46e470251 --- /dev/null +++ b/scripts/performance/CMakeLists.txt @@ -0,0 +1,12 @@ + +configure_file( + "${CMAKE_CURRENT_SOURCE_DIR}/performance/performance.py.in" + "${CMAKE_CURRENT_BINARY_DIR}/performance/performance.py" +) + +install(FILES + "${CMAKE_CURRENT_BINARY_DIR}/performance/performance.py" + "${CMAKE_CURRENT_BINARY_DIR}/performance/test_list.json" + "${CMAKE_CURRENT_BINARY_DIR}/performance/compare_times.py" + DESTINATION "${CMAKE_INSTALL_PREFIX}/tests/performance" +) diff --git a/scripts/performance/compare_times.py b/scripts/performance/compare_times.py new file mode 100644 index 000000000..85b3d7d53 --- /dev/null +++ b/scripts/performance/compare_times.py @@ -0,0 +1,17 @@ + +regions = ["ConnectivityMap_computeConnectivity", "CheapRK2"] + +def compare_times(manager): + for test in manager.testlist: + run_dir = t.directory + cfile = os.path.join(run_dir, t.options["cali_file"]) + r = cr.CaliperReader() + r.read(cali_file) + records = r.records + for i in records: + if ("region" in i): + fname = i["region"] + if (type(fname) is list): + fname = fname[-1] + if (fname in regions): + print(f"{ diff --git a/scripts/performance/performance.py.in b/scripts/performance/performance.py.in index b337e5042..6c799c1ed 100644 --- a/scripts/performance/performance.py.in +++ b/scripts/performance/performance.py.in @@ -1,18 +1,66 @@ #!/user/bin/env python3 -import sys, os +import sys, os, yaml +import numpy as np + caliper_loc = "@CONFIG_CALIPER_DIR@" sys.path.append(os.path.join(caliper_loc, "lib64/caliper")) +benchmark_dir = "/usr/gapps/Spheral/benchmarks" +test_list_yaml = "@CMAKE_INSTALL_PREFIX@/tests/performance/test_list.yaml" +spheral_sys_arch = "@SPHERAL_SYS_ARCH@" +spheral_install_config = "@SPHERAL_CONFIGURATION@" + import caliperreader as cr -# Put some filler functions here -def compare_times(manager): - filtered = [test for test in manager.testlist if test.status is PASSED] - for t in filtered: - print(t) +from compare_times import compare_times onExit(compare_times) glue(keep=True) -source("functional/Hydro/Noh/Noh-cylindrical-2d.py") +def get_test(test_configs, gen_tests): + # Gathers the specific and general test data info + # Returns the input line, timer regions, and timers + gen_test_name = test_configs["gen_test"] + gen_test = gen_tests[gen_test_name] + input_line = gen_test["gen_inputs"] + " " + test_configs["inputs"] + return input_line, gen_test["regions"], gen_test["timers"] + +def create_test(test_dir, test_name, test_inps, test_num, num_cores): + test_path = os.path.join(test_dir, test_name+".py") + cali_file = f"{test_name}_{test_num}.cali" + ref_cali_file = os.path.join(benchmark_dir, spheral_test_config, cali_file) + # All tests should have doCompare input + inps = f"{test_inps} --caliperFilename {cali_file} --doCompare False" + t = test(test_path, inps, + label=f"{test_name} test {test_num}", + np=num_cores, + cali_file=f"{cali_file}", + ref_cali_file=f"{ref_cali_file}") + return t + +with open(test_list_yaml, 'r') as ff: + try: + loader = yaml.safe_load(ff) + except yaml.YAMLError as exception: + print(exception) + gen_tests = loader["gen_tests"] + if (spheral_sys_arch not in loader): + print(f"ERROR: {spheral_sys_arch} architecture not found in test_list.yaml") + sys.exit(1) + cur_arch_tests = loader[spheral_sys_arch] + for test_name, configs in cur_arch_tests.items(): + input_line, regions, timers = get_test(configs, gen_tests) + t = create_test(test_dir, + +noh_dir = "functional/Hydro/Noh" +noh_name = "Noh-cylindrical-2d" +num_cores = 72 +points_per_core = 5000 +total_points = points_per_core * num_cores +nradial = int(np.sqrt(total_points)) +ntheta = nradial +noh_inps = f"--crksph False --cfl 0.25 --Cl 1.0 --Cq 1.0 --filter 0.0 --nPerh 2.01 --graphics False --clearDirectories True --steps 10 --doComparison False --nRadial {nradial} --nTheta {ntheta}" +t = create_test(noh_dir, noh_name, noh_inps, 0, num_cores) +# Add a wait to ensure all timer files are done +wait() diff --git a/scripts/performance/test_list.yaml b/scripts/performance/test_list.yaml new file mode 100644 index 000000000..6a1458340 --- /dev/null +++ b/scripts/performance/test_list.yaml @@ -0,0 +1,23 @@ +# The performance tests are detailed in this file +# If any tests have general inputs and might be used multiple times, put them here with the title being the name of the file +gen_tests: + Noh-cylindrical-2d: + run_dir: functional/Hydro/Noh + inputs: "--crksph False --cfl 0.25 --Cl 1.0 --Cq 1.0 --filter 0.0 --nPerh 2.01 --graphics False --clearDirectories True --doComparison False" + regions: ["CheapRK2", "CheapRK2EvalDerivs", + "ConnectivityMap_computeConnectivity"] + timers: ["sum#inclusive#sum#time.duration"] + Noh-planar-1d: + run_dir: functional/Hydro/Noh + gen_inputs: "--graphics False --clearDirectories True --doComparison False --steps 20" + regions: ["CheapRKs", "CheapRK2EvalDerivs", + "ConnectivityMap_computeConnectivity"] + timers: ["sum#inclusive#sum#time.duration"] +# These correspond to the SPHERAL_SYS_ARCH CMake variable +# RZGenie/Ruby +linux-rhel8-broadwell: + NC2d: + gen_test: Noh-cylindrical-2d # Must correspond to entry in gen_tests + inputs: "--nTheta 600 --nRadial 600 --steps 10" + regions: ["CheapRK2PreInit"] + num_cores: 72 diff --git a/scripts/spack/packages/spheral/package.py b/scripts/spack/packages/spheral/package.py index 8695500ea..575d79289 100644 --- a/scripts/spack/packages/spheral/package.py +++ b/scripts/spack/packages/spheral/package.py @@ -1,9 +1,10 @@ -# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) from spack.package import * +import spack import socket import os @@ -83,6 +84,7 @@ class Spheral(CachedCMakePackage, CudaPackage): depends_on('py-docutils@0.18.1', type='build') depends_on('py-scipy@1.12.0', type='build') depends_on('py-ats@exit', type='build') + depends_on('py-pyyaml', type='build') depends_on('py-mpi4py@3.1.5', type='build', when='+mpi') depends_on('py-sphinx', type='build') @@ -101,6 +103,25 @@ def _get_sys_type(self, spec): sys_type = env["SYS_TYPE"] return sys_type + def _get_arch(self): + host_platform = spack.platforms.host() + host_os = host_platform.operating_system("default_os") + host_target = host_platform.target("default_target") + architecture = spack.spec.ArchSpec((str(host_platform), str(host_os), str(host_target))) + spack_arch = str(architecture) + return spack_arch.strip() + + # Create a name for the specific configuration being built + # This name is used to differentiate timings during performance testing + def _get_config_name(self, spec): + arch = self._get_arch() + config_name = f"{arch}_{spec.compiler.name}_{spec.compiler.version}" + if ("+mpi" in spec): + config_name += "_" + spec.format("{^mpi.name}_{^mpi.version}") + if ("+cuda" in spec): + config_name += "_" + spec.format("{^cuda.name}{^cuda.version}") + return config_name.replace(" ", "_") + @property def cache_name(self): @@ -163,6 +184,9 @@ def initconfig_package_entries(self): entries.append(cmake_cache_option('TPL_VERBOSE', False)) entries.append(cmake_cache_option('BUILD_TPL', True)) + entries.append(cmake_cache_string('SPHERAL_SYS_ARCH', self._get_arch())) + entries.append(cmake_cache_string('SPHERAL_CONFIGURATION', self._get_config_name(spec))) + # TPL locations entries.append(cmake_cache_path('caliper_DIR', spec['caliper'].prefix)) diff --git a/src/SimulationControl/SpheralOptionParser.py b/src/SimulationControl/SpheralOptionParser.py index 7f45e26c0..e9c883eb0 100644 --- a/src/SimulationControl/SpheralOptionParser.py +++ b/src/SimulationControl/SpheralOptionParser.py @@ -24,9 +24,13 @@ def commandLine(**options): dest = "verbose", default = False, help = "Verbose output -- print all options that were set.") - parser.add_argument("--caliperConfig", default="", type=str) - parser.add_argument("--caliperFilename", default="", type=str) - parser.add_argument("--caliperConfigJSON", default="", type=str) + # This logic checks if the user already set a Caliper argument and default value + # and prevents adding the argument if it already exists + arg_list = [action.dest for action in parser._actions] + cali_args = ["Config", "Filename", "ConfigJSON"] + for ca in cali_args: + if (ca not in arg_list): + parser.add_argument(f"--caliper{ca}", default="", type=str) # Evaluate the command line. args = parser.parse_args() arg_dict = vars(args) @@ -59,8 +63,11 @@ def commandLine(**options): if (type(val) != type(options[key])): val = eval(val, gd) gd[key] = val - # Initialize timers - InitTimers(args.caliperConfig, args.caliperFilename, args.caliperConfigJSON) + # Initialize Caliper ConfigManager + InitTimers(args.caliperConfig, + args.caliperFilename, + args.caliperConfigJSON, + args.caliperOutputDir) return def InitTimers(caliper_config, filename, caliper_json): @@ -69,6 +76,7 @@ def InitTimers(caliper_config, filename, caliper_json): if(not caliper_config): raise RuntimeError("SpheralOptionParser: specifying a configuration file without using one of the configurations means no timers are started") off_tests = ["none", "off", "disable", "disabled", "0"] + # Check if Caliper is turned off if (caliper_config.lower() in off_tests): return elif (caliper_config): diff --git a/tests/functional/Hydro/Noh/Noh-cylindrical-2d.py b/tests/functional/Hydro/Noh/Noh-cylindrical-2d.py index 855128579..533e3b039 100644 --- a/tests/functional/Hydro/Noh/Noh-cylindrical-2d.py +++ b/tests/functional/Hydro/Noh/Noh-cylindrical-2d.py @@ -170,6 +170,7 @@ dataDir = "dumps-cylindrical-Noh", outputFile = "None", comparisonFile = "None", + doCompare = True, graphics = True, ) @@ -604,6 +605,10 @@ control.updateViz(control.totalSteps, integrator.currentTime, 0.0) control.dropRestartFile() +# If running the performance test, stop here +if not doCompare: + sys.exit(0) + #------------------------------------------------------------------------------- # Plot the results. #------------------------------------------------------------------------------- diff --git a/tests/functional/Hydro/Noh/Noh-planar-1d.py b/tests/functional/Hydro/Noh/Noh-planar-1d.py index 2e9ac0831..8d7259c31 100644 --- a/tests/functional/Hydro/Noh/Noh-planar-1d.py +++ b/tests/functional/Hydro/Noh/Noh-planar-1d.py @@ -184,6 +184,7 @@ comparisonFile = "None", normOutputFile = "None", writeOutputLabel = True, + doComparison = True, # Parameters for the test acceptance., L1rho = 0.0537214, @@ -614,6 +615,9 @@ control.step(5) control.advance(goalTime, maxSteps) +# If running the performance test, stop here +if not doCompare: + sys.exit(0) #------------------------------------------------------------------------------- # Compute the analytic answer. @@ -641,12 +645,12 @@ Aans = [Pi/rhoi**gamma for (Pi, rhoi) in zip(Pans, rhoans)] L1 = 0.0 for i in range(len(rho)): - L1 = L1 + abs(rho[i]-rhoans[i]) + L1 = L1 + abs(rho[i]-rhoans[i]) L1_tot = L1 / len(rho) if mpi.rank == 0 and outputFile != "None": - print("L1=",L1_tot,"\n") - with open("Converge.txt", "a") as myfile: - myfile.write("%s %s\n" % (nx1, L1_tot)) + print("L1=",L1_tot,"\n") + with open("Converge.txt", "a") as myfile: + myfile.write("%s %s\n" % (nx1, L1_tot)) #------------------------------------------------------------------------------- # Plot the final state. diff --git a/tests/unit/Utilities/testTimers.py.in b/tests/unit/Utilities/testTimers.py.in index 53fb21506..afe5f11b4 100644 --- a/tests/unit/Utilities/testTimers.py.in +++ b/tests/unit/Utilities/testTimers.py.in @@ -29,7 +29,7 @@ test_dict_0 = {"perf_test": "weak_scaling"} adiak_valueString("perf_test", test_dict_0["perf_test"], adiak_categories.performance) # Caliperreader reads everything as strings for some terrible reason -# So the test have to be hacked up +# So the test has to be hacked up # Correct method: # test_dict_1 = {"rank_count": mpi.procs} From 4fd9292fa0c120652fabf29272e467117bcba670 Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Thu, 3 Oct 2024 08:47:35 -0700 Subject: [PATCH 02/44] Removed yaml method and TPL, fixed missed variable in spheraloptionparser from previous commit --- scripts/CMakeLists.txt | 3 +- scripts/performance/CMakeLists.txt | 10 +- scripts/performance/compare_times.py | 17 --- scripts/performance/performance.py.in | 120 ++++++++++++------- scripts/performance/test_list.yaml | 23 ---- scripts/spack/packages/spheral/package.py | 1 - src/SimulationControl/SpheralOptionParser.py | 3 +- 7 files changed, 81 insertions(+), 96 deletions(-) delete mode 100644 scripts/performance/compare_times.py delete mode 100644 scripts/performance/test_list.yaml diff --git a/scripts/CMakeLists.txt b/scripts/CMakeLists.txt index aea3d522d..1081fe206 100644 --- a/scripts/CMakeLists.txt +++ b/scripts/CMakeLists.txt @@ -65,9 +65,10 @@ if (NOT ENABLE_CXXONLY) DESTINATION "${CMAKE_INSTALL_PREFIX}/scripts" ) + add_subdirectory(performance) + install(CODE "execute_process( \ COMMAND bash ${CMAKE_CURRENT_BINARY_DIR}/spheral-setup-venv.sh \ )") - include_directories(performance) endif() diff --git a/scripts/performance/CMakeLists.txt b/scripts/performance/CMakeLists.txt index 46e470251..31baf5573 100644 --- a/scripts/performance/CMakeLists.txt +++ b/scripts/performance/CMakeLists.txt @@ -1,12 +1,10 @@ configure_file( - "${CMAKE_CURRENT_SOURCE_DIR}/performance/performance.py.in" - "${CMAKE_CURRENT_BINARY_DIR}/performance/performance.py" + "${CMAKE_CURRENT_SOURCE_DIR}/performance.py.in" + "${CMAKE_CURRENT_BINARY_DIR}/performance.py" ) install(FILES - "${CMAKE_CURRENT_BINARY_DIR}/performance/performance.py" - "${CMAKE_CURRENT_BINARY_DIR}/performance/test_list.json" - "${CMAKE_CURRENT_BINARY_DIR}/performance/compare_times.py" - DESTINATION "${CMAKE_INSTALL_PREFIX}/tests/performance" + "${CMAKE_CURRENT_BINARY_DIR}/performance.py" + DESTINATION "${CMAKE_INSTALL_PREFIX}/tests" ) diff --git a/scripts/performance/compare_times.py b/scripts/performance/compare_times.py deleted file mode 100644 index 85b3d7d53..000000000 --- a/scripts/performance/compare_times.py +++ /dev/null @@ -1,17 +0,0 @@ - -regions = ["ConnectivityMap_computeConnectivity", "CheapRK2"] - -def compare_times(manager): - for test in manager.testlist: - run_dir = t.directory - cfile = os.path.join(run_dir, t.options["cali_file"]) - r = cr.CaliperReader() - r.read(cali_file) - records = r.records - for i in records: - if ("region" in i): - fname = i["region"] - if (type(fname) is list): - fname = fname[-1] - if (fname in regions): - print(f"{ diff --git a/scripts/performance/performance.py.in b/scripts/performance/performance.py.in index 6c799c1ed..1aac9996f 100644 --- a/scripts/performance/performance.py.in +++ b/scripts/performance/performance.py.in @@ -1,66 +1,94 @@ #!/user/bin/env python3 -import sys, os, yaml +# This file runs and compares performance tests through the ats system. +# Run using: ./spheral-lcats tests/performance.py + +import sys, os, argparse, time import numpy as np caliper_loc = "@CONFIG_CALIPER_DIR@" sys.path.append(os.path.join(caliper_loc, "lib64/caliper")) +import caliperreader as cr +# Location of benchmark data benchmark_dir = "/usr/gapps/Spheral/benchmarks" -test_list_yaml = "@CMAKE_INSTALL_PREFIX@/tests/performance/test_list.yaml" +# Current system architecture from Spack spheral_sys_arch = "@SPHERAL_SYS_ARCH@" +# Current install configuration from Spack spheral_install_config = "@SPHERAL_CONFIGURATION@" -import caliperreader as cr - -from compare_times import compare_times +# Function called on exit to do timing comparisons +def compare_times(manager): + for test in manager.testlist: + run_dir = test.directory + cfile = os.path.join(run_dir, test.options["caliper_filename"]) + ref_caliper_file = test.options["ref_cali_file"] + regions = test.options["regions"] + timers = test.options["timers"] + r = cr.CaliperReader() + r.read(cfile) + records = r.records + # Extract current times + times = {} + for rec in records: + if ("region" in rec): + fname = rec["region"] + if (type(fname) is list): + fname = fname[-1] + if (fname in regions): + if (fname in times): + for t in timers: + times[fname][t] += float(rec[t]) + else: + new_dict = {} + for t in timers: + new_dict.update({t: float(rec[t])}) + times.update({fname: new_dict}) + for i, j in times.items(): + print(f"{i}") + for k, v in j.items(): + print(f"{k}: {v}") onExit(compare_times) glue(keep=True) -def get_test(test_configs, gen_tests): - # Gathers the specific and general test data info - # Returns the input line, timer regions, and timers - gen_test_name = test_configs["gen_test"] - gen_test = gen_tests[gen_test_name] - input_line = gen_test["gen_inputs"] + " " + test_configs["inputs"] - return input_line, gen_test["regions"], gen_test["timers"] +# NOH tests +group(name="NOH tests") +# General input for all Noh-cylindrical-2d.py tests +test_dir = "../functional/Hydro/Noh" +test_file = "Noh-cylindrical-2d.py" +gen_noh_inp = "--crksph False --cfl 0.25 --Cl 1.0 --Cq 1.0 --filter 0.0 --nPerh 2.01 --graphics False --clearDirectories True --doCompare False" -def create_test(test_dir, test_name, test_inps, test_num, num_cores): - test_path = os.path.join(test_dir, test_name+".py") - cali_file = f"{test_name}_{test_num}.cali" - ref_cali_file = os.path.join(benchmark_dir, spheral_test_config, cali_file) - # All tests should have doCompare input - inps = f"{test_inps} --caliperFilename {cali_file} --doCompare False" - t = test(test_path, inps, - label=f"{test_name} test {test_num}", - np=num_cores, - cali_file=f"{cali_file}", - ref_cali_file=f"{ref_cali_file}") - return t - -with open(test_list_yaml, 'r') as ff: - try: - loader = yaml.safe_load(ff) - except yaml.YAMLError as exception: - print(exception) - gen_tests = loader["gen_tests"] - if (spheral_sys_arch not in loader): - print(f"ERROR: {spheral_sys_arch} architecture not found in test_list.yaml") - sys.exit(1) - cur_arch_tests = loader[spheral_sys_arch] - for test_name, configs in cur_arch_tests.items(): - input_line, regions, timers = get_test(configs, gen_tests) - t = create_test(test_dir, - -noh_dir = "functional/Hydro/Noh" -noh_name = "Noh-cylindrical-2d" -num_cores = 72 -points_per_core = 5000 -total_points = points_per_core * num_cores +# Test 1 +num_cores = 8 +ppc = 100 # Points per core +# If we are on RZGenie/Ruby +if ("broadwell" in spheral_sys_arch): + num_cores = 72 + ppc = 500 +total_points = num_cores * ppc nradial = int(np.sqrt(total_points)) ntheta = nradial -noh_inps = f"--crksph False --cfl 0.25 --Cl 1.0 --Cq 1.0 --filter 0.0 --nPerh 2.01 --graphics False --clearDirectories True --steps 10 --doComparison False --nRadial {nradial} --nTheta {ntheta}" -t = create_test(noh_dir, noh_name, noh_inps, 0, num_cores) +test_name = "NC2D_1" +caliper_filename = f"{test_name}_{int(time.time())}.cali" +inps = f"{gen_noh_inp} --nTheta {ntheta} --nRadial {nradial} --steps 10 --caliperFilename {caliper_filename}" +test_path = os.path.join(test_dir, test_file) +# Path to benchmark timing data +ref_cali_file = os.path.join(benchmark_dir, spheral_install_config, caliper_filename) +regions = ["CheapRK2", + "CheapRK2PreInit", + "ConnectivityMap_computeConnectivity", + "ConnectivityMap_patch", + "CheapRK2EvalDerivs", + "CheapRK2EndStep"] +timers = ["sum#inclusive#sum#time.duration"] # Means the sum of the time from all ranks +t = test(script=test_path, clas=inps, label=f"{test_name}", np=num_cores, + caliper_filename=caliper_filename, + regions=regions, + timers=timers, + ref_cali_file=ref_cali_file) + +endgroup() + # Add a wait to ensure all timer files are done wait() diff --git a/scripts/performance/test_list.yaml b/scripts/performance/test_list.yaml deleted file mode 100644 index 6a1458340..000000000 --- a/scripts/performance/test_list.yaml +++ /dev/null @@ -1,23 +0,0 @@ -# The performance tests are detailed in this file -# If any tests have general inputs and might be used multiple times, put them here with the title being the name of the file -gen_tests: - Noh-cylindrical-2d: - run_dir: functional/Hydro/Noh - inputs: "--crksph False --cfl 0.25 --Cl 1.0 --Cq 1.0 --filter 0.0 --nPerh 2.01 --graphics False --clearDirectories True --doComparison False" - regions: ["CheapRK2", "CheapRK2EvalDerivs", - "ConnectivityMap_computeConnectivity"] - timers: ["sum#inclusive#sum#time.duration"] - Noh-planar-1d: - run_dir: functional/Hydro/Noh - gen_inputs: "--graphics False --clearDirectories True --doComparison False --steps 20" - regions: ["CheapRKs", "CheapRK2EvalDerivs", - "ConnectivityMap_computeConnectivity"] - timers: ["sum#inclusive#sum#time.duration"] -# These correspond to the SPHERAL_SYS_ARCH CMake variable -# RZGenie/Ruby -linux-rhel8-broadwell: - NC2d: - gen_test: Noh-cylindrical-2d # Must correspond to entry in gen_tests - inputs: "--nTheta 600 --nRadial 600 --steps 10" - regions: ["CheapRK2PreInit"] - num_cores: 72 diff --git a/scripts/spack/packages/spheral/package.py b/scripts/spack/packages/spheral/package.py index 575d79289..000fb8fc4 100644 --- a/scripts/spack/packages/spheral/package.py +++ b/scripts/spack/packages/spheral/package.py @@ -84,7 +84,6 @@ class Spheral(CachedCMakePackage, CudaPackage): depends_on('py-docutils@0.18.1', type='build') depends_on('py-scipy@1.12.0', type='build') depends_on('py-ats@exit', type='build') - depends_on('py-pyyaml', type='build') depends_on('py-mpi4py@3.1.5', type='build', when='+mpi') depends_on('py-sphinx', type='build') diff --git a/src/SimulationControl/SpheralOptionParser.py b/src/SimulationControl/SpheralOptionParser.py index e9c883eb0..7870dc838 100644 --- a/src/SimulationControl/SpheralOptionParser.py +++ b/src/SimulationControl/SpheralOptionParser.py @@ -66,8 +66,7 @@ def commandLine(**options): # Initialize Caliper ConfigManager InitTimers(args.caliperConfig, args.caliperFilename, - args.caliperConfigJSON, - args.caliperOutputDir) + args.caliperConfigJSON) return def InitTimers(caliper_config, filename, caliper_json): From 6e15a5ac036faf19c47c186680e7077b90bacb45 Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Thu, 3 Oct 2024 13:46:21 -0700 Subject: [PATCH 03/44] Made adiak python wrapping more general, automatically add commandLine inputs to adiak, updated testTimer to reflect these changes --- src/PYB11/Utilities/Utilities_PYB11.py | 4 +- src/SimulationControl/SpheralOptionParser.py | 5 ++- tests/unit/Utilities/testTimers.py.in | 47 +++++++++++++------- 3 files changed, 36 insertions(+), 20 deletions(-) diff --git a/src/PYB11/Utilities/Utilities_PYB11.py b/src/PYB11/Utilities/Utilities_PYB11.py index feba85364..6dc6cfc6a 100644 --- a/src/PYB11/Utilities/Utilities_PYB11.py +++ b/src/PYB11/Utilities/Utilities_PYB11.py @@ -804,6 +804,6 @@ def clippedVolume(poly = "const Dim<3>::FacetedVolume&", ("double", "Scalar"), ("std::string", "String")): exec(""" -adiak_value%(label)s = PYB11TemplateFunction(adiak_value, "%(value)s") -adiak_value2%(label)s = PYB11TemplateFunction(adiak_value2, "%(value)s", pyname="adiak_value%(label)s") +adiak_value%(label)s = PYB11TemplateFunction(adiak_value, "%(value)s", pyname="adiak_value") +adiak_value2%(label)s = PYB11TemplateFunction(adiak_value2, "%(value)s", pyname="adiak_value") """ % {"label" : label, "value" : value}) diff --git a/src/SimulationControl/SpheralOptionParser.py b/src/SimulationControl/SpheralOptionParser.py index 7870dc838..a304098bd 100644 --- a/src/SimulationControl/SpheralOptionParser.py +++ b/src/SimulationControl/SpheralOptionParser.py @@ -63,6 +63,7 @@ def commandLine(**options): if (type(val) != type(options[key])): val = eval(val, gd) gd[key] = val + adiak_value(key, val) # Initialize Caliper ConfigManager InitTimers(args.caliperConfig, args.caliperFilename, @@ -93,6 +94,6 @@ def InitTimers(caliper_config, filename, caliper_json): testname = os.path.splitext(os.path.basename(sys.argv[0]))[0] testname += unique_digits + ".cali" TimerMgr.default_start(testname) - adiak_valueInt("threads_per_rank", omp_get_num_threads()) - adiak_valueInt("num_ranks", mpi.procs) + adiak_value("threads_per_rank", omp_get_num_threads()) + adiak_value("num_ranks", mpi.procs) return diff --git a/tests/unit/Utilities/testTimers.py.in b/tests/unit/Utilities/testTimers.py.in index afe5f11b4..f717959d2 100644 --- a/tests/unit/Utilities/testTimers.py.in +++ b/tests/unit/Utilities/testTimers.py.in @@ -14,7 +14,25 @@ import mpi import sys, os, time -commandLine() +# Dictionary for testing Adiak inputs +test_dict = {"perf_test": "weak_scaling", + "rank_count": str(mpi.procs)} +# Note: Caliperreader reads everything as strings for some terrible reason +# The second entry should be +# "rank_count": mpi.procs} +# but is hacked up for the tests to pass +# Testing commmand line inputs that are automatically passed to adiak +test_int = 4 +test_str = "hello" +test_float = 4.224 + +inp_test_dict = {"test_int": test_int, + "test_str": test_str, + "test_float": test_float} + +commandLine(test_int=test_int, + test_str=test_str, + test_float=test_float) # Remove cali files from previous test runs caliper_file = TimerMgr.get_filename() @@ -25,21 +43,11 @@ if (os.path.exists(caliper_file)): do_timers = False if (TimerMgr.is_started()): do_timers = True -test_dict_0 = {"perf_test": "weak_scaling"} -adiak_valueString("perf_test", test_dict_0["perf_test"], - adiak_categories.performance) -# Caliperreader reads everything as strings for some terrible reason -# So the test has to be hacked up -# Correct method: -# test_dict_1 = {"rank_count": mpi.procs} -# adiak_valueInt("rank_count", test_dict_1["rank_count"]) -# Hacked method to have tests pass with caliperreader: -test_dict_1 = {"rank_count": str(mpi.procs)} -adiak_valueString("rank_count", test_dict_1["rank_count"]) +for key, val in test_dict.items(): + adiak_value(key, val) -test_dicts = [test_dict_0, test_dict_1] run_count = 8 sleep_time = 1.E-4 fake_timer_name = "test_timer" @@ -72,12 +80,19 @@ if (do_timers and TimerMgr.get_filename()): print("Run count in Caliper file is correct") else: found_errors += 1 - # Test for adiak values - for td in test_dicts: - if (td.items() <= r.globals.items()): + glbs = r.globals + # Test Adiak output for explicitly set values + for td in test_dict: + if (td.items() <= glbs.items()): print(f"Found {td.items()}") else: found_errors += 1 + # Test Adiak outputs for commandLine inputs + for tkey, tval in inp_test_dict.items(): + if (tkey in glbs.items() and str(tval) == glbs[tkey]): + print(f"Found {tkey}: {tval}") + else: + found_errors += 1 if (found_errors > 0): raise ValueError("Caliper file not correct") else: From 4b581fdebcb787da5acd0226da2e4229c446f100 Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Thu, 3 Oct 2024 14:25:31 -0700 Subject: [PATCH 04/44] Fix testTimers --- tests/unit/Utilities/testTimers.py.in | 60 +++++++++++++++++---------- 1 file changed, 37 insertions(+), 23 deletions(-) diff --git a/tests/unit/Utilities/testTimers.py.in b/tests/unit/Utilities/testTimers.py.in index f717959d2..09e698a3f 100644 --- a/tests/unit/Utilities/testTimers.py.in +++ b/tests/unit/Utilities/testTimers.py.in @@ -14,18 +14,18 @@ import mpi import sys, os, time -# Dictionary for testing Adiak inputs +# For testing explicitly set Adiak inputs test_dict = {"perf_test": "weak_scaling", - "rank_count": str(mpi.procs)} -# Note: Caliperreader reads everything as strings for some terrible reason -# The second entry should be -# "rank_count": mpi.procs} -# but is hacked up for the tests to pass -# Testing commmand line inputs that are automatically passed to adiak + "rank_count": mpi.procs, + "fake_float": 2.141} +# Add test dicts to Adiak +for key, val in test_dict.items(): + adiak_value(key, val) + +# For testing that commmandLine inputs are automatically passed to adiak test_int = 4 test_str = "hello" test_float = 4.224 - inp_test_dict = {"test_int": test_int, "test_str": test_str, "test_float": test_float} @@ -44,10 +44,6 @@ do_timers = False if (TimerMgr.is_started()): do_timers = True - -for key, val in test_dict.items(): - adiak_value(key, val) - run_count = 8 sleep_time = 1.E-4 fake_timer_name = "test_timer" @@ -56,6 +52,8 @@ for i in range(run_count): TimerMgr.timer_start(fake_timer_name) time.sleep(sleep_time) TimerMgr.timer_end(fake_timer_name) + +# Read in Caliper file and process it if (do_timers and TimerMgr.get_filename()): adiak_fini() TimerMgr.fini() @@ -69,30 +67,46 @@ if (do_timers and TimerMgr.get_filename()): r.read(caliper_file) records = r.records found_errors = 0 + # Test for timer name if (fake_timer_name in records[1]['region']): print(f"Found {fake_timer_name} timer") else: + print(f"ERROR: {fake_timer_name} timer not found") found_errors += 1 + # Test for function count count_val = int(eval(records[1]["avg#sum#rc.count"])) if (count_val == run_count): print("Run count in Caliper file is correct") else: + print("ERROR: Caliper function count is off") found_errors += 1 - glbs = r.globals + + # Note: Caliperreader reads everything as strings for some terrible reason + # we must convert the Adiak values first + adiak_inp = {} + for key, val in r.globals.items(): + try: + newval = eval(val) + except: + newval = val + adiak_inp.update({key: newval}) + # Test Adiak output for explicitly set values - for td in test_dict: - if (td.items() <= glbs.items()): - print(f"Found {td.items()}") - else: - found_errors += 1 + if (test_dict.items() <= adiak_inp.items()): + print(f"Found {test_dict.items()}") + else: + print("ERROR: Adiak values not found in Caliper file") + found_errors += 1 + # Test Adiak outputs for commandLine inputs - for tkey, tval in inp_test_dict.items(): - if (tkey in glbs.items() and str(tval) == glbs[tkey]): - print(f"Found {tkey}: {tval}") - else: - found_errors += 1 + if (inp_test_dict.items() <= adiak_inp.items()): + print(f"Found {inp_test_dict.items()}") + else: + print("ERROR: commandLine inputs not found in Adiak values in Caliper file") + found_errors += 1 + if (found_errors > 0): raise ValueError("Caliper file not correct") else: From 1248ea6d7b76511fff0886d77741c0dc90c507b7 Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Mon, 7 Oct 2024 16:46:59 -0700 Subject: [PATCH 05/44] Moved Caliper and Adiak parsing and init routines to a new SpheralTimingParser.py file instead of SpheralOptionParser.py, added ability to specify Adiak data directly from command line as a dictionary, update testTimer with adiakData tests and improved layout --- cmake/SetupSpheral.cmake | 1 + docs/developer/dev/diagnostic_tools.rst | 83 +++++++++------ scripts/performance/CMakeLists.txt | 2 +- scripts/performance/performance.py.in | 2 +- src/SimulationControl/CMakeLists.txt | 1 + src/SimulationControl/SpheralOptionParser.py | 60 ++--------- src/SimulationControl/SpheralTimingParser.py | 103 +++++++++++++++++++ tests/unit/Utilities/testTimers.py.in | 57 ++++------ 8 files changed, 183 insertions(+), 126 deletions(-) create mode 100644 src/SimulationControl/SpheralTimingParser.py diff --git a/cmake/SetupSpheral.cmake b/cmake/SetupSpheral.cmake index d93c4fd41..c0c7e756a 100644 --- a/cmake/SetupSpheral.cmake +++ b/cmake/SetupSpheral.cmake @@ -183,6 +183,7 @@ if (ENABLE_TESTS) string(REPLACE "\n" " " test_files ${test_files1}) separate_arguments(test_files) list(REMOVE_ITEM test_files tests/unit/CXXTests/runCXXTests.ats) + list(REMOVE_ITEM test_files tests/unit/Utilities/testTimers.py.in) install_with_directory( FILES ${test_files} SOURCE ${SPHERAL_ROOT_DIR} diff --git a/docs/developer/dev/diagnostic_tools.rst b/docs/developer/dev/diagnostic_tools.rst index f79cc14b2..a607db329 100644 --- a/docs/developer/dev/diagnostic_tools.rst +++ b/docs/developer/dev/diagnostic_tools.rst @@ -7,62 +7,64 @@ Spheral uses Caliper to preform code diagnostics, such as timing. To enable this ./scripts/devtools/host-config-build.py -.cmake -DENABLE_TIMER=ON -Querying using Caliper -====================== +Using Caliper +============= Caliper is configured and started through the ``cali::ConfigManager``. The ``cali::ConfigManager`` is wrapped in a ``TimerMgr`` singleton class, which has a python interface. .. note:: - ``TimerMgr`` is initialized and started during ``commandLine()`` in ``src/SimulationControl/SpheralOptionParser.py``. This is because ``commandLine()`` is almost always invoked directly near the start of a problem. However, if ``commandLine()`` is not called, the timers would need to be configured and started directly using the ``TimerMgr`` class. See :ref:`below ` for more details. + ``TimerMgr`` is initialized in ``src/SimulationControl/SpheralTimingParser.py`` which is called during ``commandLine()`` in ``src/SimulationControl/SpheralOptionParser.py``. This is because ``commandLine()`` is almost always invoked directly near the start of a problem. However, if ``commandLine()`` is not called, the timer manager would need to be configured and started directly using the ``TimerMgr`` class. See :ref:`below ` for more details. By default, the Caliper configuration is set to ``spot`` and outputs Caliper files (``.cali``). -For the default configuration, the Caliper files are named based on what file is being run, for example: -:: - python Noh-cylindrical-2d.py +There are many different Caliper configurations to view various information. Here are some extra links for those who want to read or experiment with other features in Caliper that can be incorporated into Spheral: -will produce a timing file called ``Noh-cylindrical-2d_YEAR_MONTH_DATE_TIME.cali`` where the file name includes the current date and time. + * `Configuration basics `_ + * `Builtin Configuration `_ + * `Manual Configuration `_ + * `Output Format `_ -The Caliper file name can be specified using the command line -:: +Caliper and Adiak Options +------------------------- - python Noh-cylindrical-2d.py --caliperFilename 'new_test_name.cali' +.. option:: --caliperFilename -Different Caliper configurations can be set at the command line using ``--caliperConfig`` like so -:: + Name of Caliper timing file. Should include file extensions. Optional, default: ``name_of_file_YEAR_MONTH_DATE_TIME.cali``. - python Noh-cylindrical-2d.py --caliperConfig 'runtime-report(output=time.txt),calc.inclusive,region.count' +.. option:: --caliperConfig CONFIG_STR -.. note:: - The above configuration produces timing results similar to the previous ``Spheral::Timer`` method. This results in a file named ``time.txt`` with cumulative times for the nested regions as well as a count of how many times each region ran. + Specify a built-in Caliper configuration or turn off timers with ``none``. Optional, default: ``spot``. -Similarly, a non-default Caliper configuration can be read in from a JSON file using ``--caliperConfigJSON`` and providing the file name. -Lastly, Caliper timers can be turned off using ``--caliperConfig none``. + **Example**: + :: -There are many different Caliper configurations to view various information. Here are some extra links for those who want to read or experiment with other features in Caliper that can be incorporated into Spheral: + ./spheral ex_prog.py --caliperConfig 'runtime-report(output=time.txt),calc.inclusive,region.count' - * `Configuration basics `_ - * `Builtin Configuration `_ - * `Manual Configuration `_ - * `Output Format `_ +.. note:: + The configuration in the example above produces timing results similar to the previous ``Spheral::Timer`` method. This results in a file named ``time.txt`` with cumulative times for the nested regions as well as a count of how many times each region ran. +.. option:: --caliperConfigJSON JSON_FILE -Adding Region Timers in C++ -=========================== + Specify a JSON file containing a non-default Caliper configuration. Optional. -So far there are two different types of regions in Spheral, using the following macros: -:: +.. option:: --adiakData ADIAK_DATA_STR - TIME_FUNCTION + Specify any Adiak data directly in the command line. Must be a string in key:value format, separated by commas. Optional. -or + **Example**: + :: -:: + ./spheral ex_prog.py --adiakData "test_name: the_cheat, test_num:10" + +.. note:: + By default, all ``commandLine()`` inputs are added as Adiak metadata. ``--adiakData`` are for metadata that does not come through Spheral command line arguments. Adiak metadata can also be added through the python interface. See :ref:`below ` for more details. - TIME_BEGIN("timer_name") - TIME_END("timer_name") +Adding Region Timers in C++ +--------------------------- + +The following macros are used to create timing regions in the Spheral C++ interface: - ``TIME_FUNCTION`` can be added to the very beginning of a function and creates a region for the entire function using the function's name. ``TIME_FUNCTION`` uses just the function name and no class or parameter information, so be careful when using this method with functions that could share names. @@ -70,26 +72,39 @@ or Adding Region Timers in Python -============================== +------------------------------ Region timers can be added inside the python code using the following function calls: :: + from SpheralUtilities import TimerMgr TimerMgr.timer_start("timer_name") some_function_call() TimerMgr.timer_end("timer_name") .. note:: - IMPORTANT: All timers must have both a start and end call. Otherwise, memory issues will occur. + All timers must have both a start and end call. Otherwise, memory issues will occur. + +.. _python_adiak: + +Adding Adiak Metadata in Python +------------------------------- + +Adiak metadata can be added inside python code using the following function calls: + +.. code-block:: python + + adiak_values("value_name", value) .. _manual_caliper: Starting Caliper Manually ======================== -As mentioned above, Caliper (not an individual Caliper timer) is normally configured and started in ``commandLine()`` python routine. However, Caliper can be directly configured and started through the python interface, if desired. This can be done by putting the following into the python file: +As mentioned above, the Caliper timing manager is normally configured and started in the ``commandLine()`` routine. However, Caliper can be directly configured and started through the python interface, if desired. This can be done by putting the following into the python file: :: + from SpheralUtilities import TimerMgr caliper_config = "some_configuration(output=some_filename.txt)" TimerMgr.add(caliper_config) TimerMgr.start() diff --git a/scripts/performance/CMakeLists.txt b/scripts/performance/CMakeLists.txt index 31baf5573..4d3dd3e76 100644 --- a/scripts/performance/CMakeLists.txt +++ b/scripts/performance/CMakeLists.txt @@ -6,5 +6,5 @@ configure_file( install(FILES "${CMAKE_CURRENT_BINARY_DIR}/performance.py" - DESTINATION "${CMAKE_INSTALL_PREFIX}/tests" + DESTINATION "${SPHERAL_ROOT_DIR}/tests" ) diff --git a/scripts/performance/performance.py.in b/scripts/performance/performance.py.in index 1aac9996f..b1003b781 100644 --- a/scripts/performance/performance.py.in +++ b/scripts/performance/performance.py.in @@ -65,7 +65,7 @@ ppc = 100 # Points per core # If we are on RZGenie/Ruby if ("broadwell" in spheral_sys_arch): num_cores = 72 - ppc = 500 + ppc = 1000 total_points = num_cores * ppc nradial = int(np.sqrt(total_points)) ntheta = nradial diff --git a/src/SimulationControl/CMakeLists.txt b/src/SimulationControl/CMakeLists.txt index 4310e577a..32f7ca438 100644 --- a/src/SimulationControl/CMakeLists.txt +++ b/src/SimulationControl/CMakeLists.txt @@ -60,6 +60,7 @@ spheral_install_python_files( SpheralPolytopeSiloDump.py Spheral1dVizDump.py SpheralMatplotlib.py + SpheralTimingParser.py findLastRestart.py Pnorm.py filearraycmp.py diff --git a/src/SimulationControl/SpheralOptionParser.py b/src/SimulationControl/SpheralOptionParser.py index a304098bd..b3efebc2c 100644 --- a/src/SimulationControl/SpheralOptionParser.py +++ b/src/SimulationControl/SpheralOptionParser.py @@ -7,7 +7,7 @@ from SpheralCompiledPackages import * from SpheralTestUtilities import globalFrame -from SpheralUtilities import TimerMgr +import SpheralTimingParser def commandLine(**options): @@ -24,23 +24,14 @@ def commandLine(**options): dest = "verbose", default = False, help = "Verbose output -- print all options that were set.") - # This logic checks if the user already set a Caliper argument and default value - # and prevents adding the argument if it already exists - arg_list = [action.dest for action in parser._actions] - cali_args = ["Config", "Filename", "ConfigJSON"] - for ca in cali_args: - if (ca not in arg_list): - parser.add_argument(f"--caliper{ca}", default="", type=str) + + # Parse Caliper and Adiak inputs + SpheralTimingParser.add_timing_args(parser) + # Evaluate the command line. args = parser.parse_args() arg_dict = vars(args) - if (not TimerMgr.timers_usable()): - if (args.caliperConfig or args.caliperFilename or args.caliperConfigJSON): - print("WARNING: Caliper command line inputs provided for "+\ - "non-timer install. Reconfigure the install with "+\ - "-DENABLE_TIMER=ON to be able to use Caliper timers.") - # Verbose output? if args.verbose: print("All parameters set:") @@ -50,12 +41,6 @@ def commandLine(**options): print(" * ", key, " = ", val) else: print(" ", key, " = ", val) - if (args.caliperConfig): - print(" * caliperConfig = ", args.caliperConfig) - if (args.caliperFilename): - print(" * caliperFilename = ", args.caliperFilename) - if (args.caliperConfigJSON): - print(" * caliperConfigJSON = ", args.caliperConfigJSON) # Set all the variables. gd = globalFrame().f_globals for key, val in arg_dict.items(): @@ -63,37 +48,6 @@ def commandLine(**options): if (type(val) != type(options[key])): val = eval(val, gd) gd[key] = val - adiak_value(key, val) - # Initialize Caliper ConfigManager - InitTimers(args.caliperConfig, - args.caliperFilename, - args.caliperConfigJSON) - return - -def InitTimers(caliper_config, filename, caliper_json): - if(caliper_json): - TimerMgr.load(caliper_json) - if(not caliper_config): - raise RuntimeError("SpheralOptionParser: specifying a configuration file without using one of the configurations means no timers are started") - off_tests = ["none", "off", "disable", "disabled", "0"] - # Check if Caliper is turned off - if (caliper_config.lower() in off_tests): - return - elif (caliper_config): - TimerMgr.add(caliper_config) - TimerMgr.start() - else: - import os, sys - if (filename): - testname = filename - else: - from datetime import datetime - # Append the current day and time to the filename - unique_digits = datetime.now().strftime("_%Y_%m_%d_%H%M%S_%f") - # Name file based on name of python file being run - testname = os.path.splitext(os.path.basename(sys.argv[0]))[0] - testname += unique_digits + ".cali" - TimerMgr.default_start(testname) - adiak_value("threads_per_rank", omp_get_num_threads()) - adiak_value("num_ranks", mpi.procs) + # Initialize timers and add inputs as Adiak metadata + SpheralTimingParser.init_timer(args) return diff --git a/src/SimulationControl/SpheralTimingParser.py b/src/SimulationControl/SpheralTimingParser.py new file mode 100644 index 000000000..96e6e8b06 --- /dev/null +++ b/src/SimulationControl/SpheralTimingParser.py @@ -0,0 +1,103 @@ +#------------------------------------------------------------------------------- +# Functions for adding Caliper and Adiak parsing arguments and initializing +# the timer manager +#------------------------------------------------------------------------------- + +import argparse, mpi +from SpheralUtilities import TimerMgr +from SpheralUtilities import adiak_value +import SpheralOpenMP + +def parse_dict(string): + try: + inp_dict = dict(item.split(":") for item in string.split(",")) + except: + raise SyntaxError("Input to --adiakData must be in key:value format, separated by commas") + new_dict = {} + for ikey, ival in inp_dict.items(): + try: + key = eval(ikey) + except: + key = ikey.strip() + try: + val = eval(ival) + except: + val = ival.strip() + new_dict.update({key: val}) + return new_dict + +def add_timing_args(parser): + """ + Add Caliper and Adiak arguments to the parser + """ + # Allow Adiak values to be set on the command line + # Inputs are a string that can be evaluated into a dictionary + # For example, --adiakData "testname: ShockTube1, testing:3" + parser.add_argument("--adiakData", default=None, + type=parse_dict) + # This logic checks if the user already set a Caliper + # argument and default value and prevents adding the argument + # if it already exists + arg_list = [action.dest for action in parser._actions] + cali_args = ["Config", "Filename", "ConfigJSON"] + for ca in cali_args: + if (ca not in arg_list): + parser.add_argument(f"--caliper{ca}", default="", type=str) + +def init_timer(args): + """ + Initializes the timing manager and adds input values to Adiak + Returns the equivalent dictionary with unnecessary inputs removed + """ + if args.verbose: + if (args.caliperConfig): + print(" * caliperConfig = ", args.caliperConfig) + if (args.caliperFilename): + print(" * caliperFilename = ", ars.caliperFilename) + if (args.caliperConfigJSON): + print(" * caliperConfigJSON = ", args.caliperConfigJSON) + if (not TimerMgr.timers_usable()): + if (args.caliperConfig or args.caliperFilename or args.caliperConfigJSON): + print("WARNING: Caliper command line inputs provided for "+\ + "non-timer install. Reconfigure the install with "+\ + "-DENABLE_TIMER=ON to be able to use Caliper timers.") + if(args.caliperConfigJSON): + TimerMgr.load(args.caliperConfigJSON) + if(not args.caliperConfig): + raise RuntimeError("SpheralOptionParser: specifying a configuration file without "+\ + "using one of the configurations means no timers are started") + off_tests = ["none", "off", "disable", "disabled", "0"] + # Check if Caliper is turned off + if (args.caliperConfig): + if (args.caliperConfig.lower() in off_tests): + return + TimerMgr.add(args.caliperConfig) + TimerMgr.start() + else: + import os, sys + # If output name for Caliper is given, use it + if (args.caliperFilename): + testname = args.caliperFilename + else: + from datetime import datetime + # Append the current day and time to the filename + unique_digits = datetime.now().strftime("_%Y_%m_%d_%H%M%S_%f") + # Name file based on name of python file being run + testname = os.path.splitext(os.path.basename(sys.argv[0]))[0] + testname += unique_digits + ".cali" + TimerMgr.default_start(testname) + # Add number of ranks and threads per rank + adiak_value("threads_per_rank", SpheralOpenMP.omp_get_num_threads()) + adiak_value("num_ranks", mpi.procs) + + # Add --adiakData inputs as Adiak metadata + if (args.adiakData): + for key, val in args.adiakData.items(): + adiak_value(key, val) + # Add all commandLine() inputs as Adiak metadata + args_dict = vars(args) + args_dict.pop("adiakData") + for key, val in args_dict.items(): + if val: + adiak_value(key, val) + return diff --git a/tests/unit/Utilities/testTimers.py.in b/tests/unit/Utilities/testTimers.py.in index 09e698a3f..d6b62992a 100644 --- a/tests/unit/Utilities/testTimers.py.in +++ b/tests/unit/Utilities/testTimers.py.in @@ -1,28 +1,29 @@ # # #ATS:test(SELF, "--caliperFilename 'timer_test_1.cali'", label="Timer test 1", np=8) -#ATS:test(SELF, "--caliperConfig 'None'", label="Timer test 2", np=8) -#ATS:test(SELF, "--caliperFilename 'timer_test_3.cali'", label="Timer test 3", np=1) +#ATS:test(SELF, "--caliperConfig 'none'", label="Timer test 2", np=8) +#ATS:test(SELF, "--caliperFilename 'timer_test_3.cali' --adiakData 'adiak_test:1, test_adiak:two'", label="Timer test 3", np=1) # import Spheral from SpheralTestUtilities import * from SpheralOptionParser import * -from SpheralUtilities import TimerMgr from SpheralUtilities import * import mpi import sys, os, time -# For testing explicitly set Adiak inputs +# Test set Adiak inputs test_dict = {"perf_test": "weak_scaling", "rank_count": mpi.procs, "fake_float": 2.141} -# Add test dicts to Adiak for key, val in test_dict.items(): adiak_value(key, val) -# For testing that commmandLine inputs are automatically passed to adiak +# Test the --adiakData input. This must match what is hard-coded in the ATS lines +adiak_data_dict = {"adiak_test": 1, "test_adiak": "two"} + +# Test that commmandLine inputs are being passed to Adiak test_int = 4 test_str = "hello" test_float = 4.224 @@ -60,30 +61,21 @@ if (do_timers and TimerMgr.get_filename()): mpi.barrier() caliper_loc = "@CONFIG_CALIPER_DIR@" sys.path.append(os.path.join(caliper_loc, "lib64/caliper")) - import caliperreader as cr if (not os.path.exists(caliper_file)): - raise ValueError("Caliper file not found") + raise FileNotFoundError("Caliper file not found") + import caliperreader as cr r = cr.CaliperReader() r.read(caliper_file) records = r.records - found_errors = 0 # Test for timer name - if (fake_timer_name in records[1]['region']): - print(f"Found {fake_timer_name} timer") - else: - print(f"ERROR: {fake_timer_name} timer not found") - found_errors += 1 + assert fake_timer_name in records[1]['region'], f"{fake_timer_name} timer not found" # Test for function count count_val = int(eval(records[1]["avg#sum#rc.count"])) - if (count_val == run_count): - print("Run count in Caliper file is correct") - else: - print("ERROR: Caliper function count is off") - found_errors += 1 + assert count_val == run_count, "Caliper function count is off" - # Note: Caliperreader reads everything as strings for some terrible reason + # Note: CaliperReader reads everything as strings for some terrible reason # we must convert the Adiak values first adiak_inp = {} for key, val in r.globals.items(): @@ -94,20 +86,11 @@ if (do_timers and TimerMgr.get_filename()): adiak_inp.update({key: newval}) # Test Adiak output for explicitly set values - if (test_dict.items() <= adiak_inp.items()): - print(f"Found {test_dict.items()}") - else: - print("ERROR: Adiak values not found in Caliper file") - found_errors += 1 - - # Test Adiak outputs for commandLine inputs - if (inp_test_dict.items() <= adiak_inp.items()): - print(f"Found {inp_test_dict.items()}") - else: - print("ERROR: commandLine inputs not found in Adiak values in Caliper file") - found_errors += 1 - - if (found_errors > 0): - raise ValueError("Caliper file not correct") - else: - print("No errors found for TimerMgr") + assert test_dict.items() <= adiak_inp.items(), "Adiak values not found in Caliper file" + + # Test Adiak outputs for commandLine() inputs + assert inp_test_dict.items() <= adiak_inp.items(), "commandLine() inputs not found in Caliper file Adiak values" + + # Test --adiakData command line input + if ("adiakData" in adiak_inp): + assert adiak_data_dict.items() <= adiak_inp.items(), "adiakData input found found in Caliper file Adiak values" From d4c1144da2257e2ab58d115fe8b17084a08ec4b4 Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Mon, 7 Oct 2024 16:55:33 -0700 Subject: [PATCH 06/44] Slight change to testTimers --- tests/unit/Utilities/testTimers.py.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/unit/Utilities/testTimers.py.in b/tests/unit/Utilities/testTimers.py.in index d6b62992a..1c2c96d55 100644 --- a/tests/unit/Utilities/testTimers.py.in +++ b/tests/unit/Utilities/testTimers.py.in @@ -2,7 +2,7 @@ # #ATS:test(SELF, "--caliperFilename 'timer_test_1.cali'", label="Timer test 1", np=8) #ATS:test(SELF, "--caliperConfig 'none'", label="Timer test 2", np=8) -#ATS:test(SELF, "--caliperFilename 'timer_test_3.cali' --adiakData 'adiak_test:1, test_adiak:two'", label="Timer test 3", np=1) +#ATS:test(SELF, "--caliperFilename 'timer_test_3.cali' --adiakData 'adiak_test: 1, test_adiak: two'", label="Timer test 3", np=1) # import Spheral From b01e5ef0cc754652fb30d6c004649447f33c9525 Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Tue, 8 Oct 2024 13:56:40 -0700 Subject: [PATCH 07/44] Fix issues with non-typical types being given to adiak --- scripts/performance/CMakeLists.txt | 2 +- scripts/performance/performance.py.in | 5 +++-- src/SimulationControl/SpheralTimingParser.py | 16 +++++++++++----- tests/unit/Utilities/testTimers.py.in | 9 ++++++--- 4 files changed, 21 insertions(+), 11 deletions(-) diff --git a/scripts/performance/CMakeLists.txt b/scripts/performance/CMakeLists.txt index 4d3dd3e76..fe3fffa7f 100644 --- a/scripts/performance/CMakeLists.txt +++ b/scripts/performance/CMakeLists.txt @@ -6,5 +6,5 @@ configure_file( install(FILES "${CMAKE_CURRENT_BINARY_DIR}/performance.py" - DESTINATION "${SPHERAL_ROOT_DIR}/tests" + DESTINATION "${SPHERAL_TEST_INSTALL_PREFIX}" ) diff --git a/scripts/performance/performance.py.in b/scripts/performance/performance.py.in index b1003b781..ab9c7ce64 100644 --- a/scripts/performance/performance.py.in +++ b/scripts/performance/performance.py.in @@ -55,7 +55,7 @@ glue(keep=True) # NOH tests group(name="NOH tests") # General input for all Noh-cylindrical-2d.py tests -test_dir = "../functional/Hydro/Noh" +test_dir = "@SPHERAL_TEST_INSTALL_PREFIX@/functional/Hydro/Noh" test_file = "Noh-cylindrical-2d.py" gen_noh_inp = "--crksph False --cfl 0.25 --Cl 1.0 --Cq 1.0 --filter 0.0 --nPerh 2.01 --graphics False --clearDirectories True --doCompare False" @@ -82,7 +82,8 @@ regions = ["CheapRK2", "CheapRK2EvalDerivs", "CheapRK2EndStep"] timers = ["sum#inclusive#sum#time.duration"] # Means the sum of the time from all ranks -t = test(script=test_path, clas=inps, label=f"{test_name}", np=num_cores, +spec_inps = f"{inps} --adiakData 'test_name: {test_name}, total_points:{nradial*ntheta}'" +t = test(script=test_path, clas=spec_inps, label=f"{test_name}", np=num_cores, caliper_filename=caliper_filename, regions=regions, timers=timers, diff --git a/src/SimulationControl/SpheralTimingParser.py b/src/SimulationControl/SpheralTimingParser.py index 96e6e8b06..3279da857 100644 --- a/src/SimulationControl/SpheralTimingParser.py +++ b/src/SimulationControl/SpheralTimingParser.py @@ -9,6 +9,9 @@ import SpheralOpenMP def parse_dict(string): + """ + Function to parse a dictionary provided through the command line + """ try: inp_dict = dict(item.split(":") for item in string.split(",")) except: @@ -46,8 +49,7 @@ def add_timing_args(parser): def init_timer(args): """ - Initializes the timing manager and adds input values to Adiak - Returns the equivalent dictionary with unnecessary inputs removed + Initializes the timing manager and adds input values to Adiak from parsed arguments """ if args.verbose: if (args.caliperConfig): @@ -94,10 +96,14 @@ def init_timer(args): if (args.adiakData): for key, val in args.adiakData.items(): adiak_value(key, val) + # Add all commandLine() inputs as Adiak metadata args_dict = vars(args) - args_dict.pop("adiakData") + args_dict.pop("adiakData") # Remove --adiakData inputs for key, val in args_dict.items(): - if val: - adiak_value(key, val) + if (type(val) is not type(None)): + try: + adiak_value(key, val) + except: + adiak_value(key, val.__name__) return diff --git a/tests/unit/Utilities/testTimers.py.in b/tests/unit/Utilities/testTimers.py.in index 1c2c96d55..8d3ef1002 100644 --- a/tests/unit/Utilities/testTimers.py.in +++ b/tests/unit/Utilities/testTimers.py.in @@ -86,11 +86,14 @@ if (do_timers and TimerMgr.get_filename()): adiak_inp.update({key: newval}) # Test Adiak output for explicitly set values - assert test_dict.items() <= adiak_inp.items(), "Adiak values not found in Caliper file" + assert test_dict.items() <= adiak_inp.items(),\ + "incorrect Adiak values found in Caliper file" # Test Adiak outputs for commandLine() inputs - assert inp_test_dict.items() <= adiak_inp.items(), "commandLine() inputs not found in Caliper file Adiak values" + assert inp_test_dict.items() <= adiak_inp.items(),\ + "incorrect commandLine() inputs found in Caliper file Adiak values" # Test --adiakData command line input if ("adiakData" in adiak_inp): - assert adiak_data_dict.items() <= adiak_inp.items(), "adiakData input found found in Caliper file Adiak values" + assert adiak_data_dict.items() <= adiak_inp.items(),\ + "incorrect adiakData inputs found in Caliper file Adiak values" From 482c5f92303f7b43ab0d24bfe46a584fd2de7770 Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Tue, 8 Oct 2024 14:11:06 -0700 Subject: [PATCH 08/44] Move performance.py to the correct place --- scripts/performance/CMakeLists.txt | 2 +- scripts/performance/performance.py.in | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/scripts/performance/CMakeLists.txt b/scripts/performance/CMakeLists.txt index fe3fffa7f..31baf5573 100644 --- a/scripts/performance/CMakeLists.txt +++ b/scripts/performance/CMakeLists.txt @@ -6,5 +6,5 @@ configure_file( install(FILES "${CMAKE_CURRENT_BINARY_DIR}/performance.py" - DESTINATION "${SPHERAL_TEST_INSTALL_PREFIX}" + DESTINATION "${CMAKE_INSTALL_PREFIX}/tests" ) diff --git a/scripts/performance/performance.py.in b/scripts/performance/performance.py.in index ab9c7ce64..16ad68bb9 100644 --- a/scripts/performance/performance.py.in +++ b/scripts/performance/performance.py.in @@ -55,7 +55,7 @@ glue(keep=True) # NOH tests group(name="NOH tests") # General input for all Noh-cylindrical-2d.py tests -test_dir = "@SPHERAL_TEST_INSTALL_PREFIX@/functional/Hydro/Noh" +test_dir = "@SPHERAL_TEST_INSTALL_PREFIX@/tests/functional/Hydro/Noh" test_file = "Noh-cylindrical-2d.py" gen_noh_inp = "--crksph False --cfl 0.25 --Cl 1.0 --Cq 1.0 --filter 0.0 --nPerh 2.01 --graphics False --clearDirectories True --doCompare False" @@ -75,12 +75,14 @@ inps = f"{gen_noh_inp} --nTheta {ntheta} --nRadial {nradial} --steps 10 --calipe test_path = os.path.join(test_dir, test_file) # Path to benchmark timing data ref_cali_file = os.path.join(benchmark_dir, spheral_install_config, caliper_filename) +# Select which timing regions to post-process regions = ["CheapRK2", "CheapRK2PreInit", "ConnectivityMap_computeConnectivity", "ConnectivityMap_patch", "CheapRK2EvalDerivs", "CheapRK2EndStep"] +# Select which timers to use to post-process the regions above timers = ["sum#inclusive#sum#time.duration"] # Means the sum of the time from all ranks spec_inps = f"{inps} --adiakData 'test_name: {test_name}, total_points:{nradial*ntheta}'" t = test(script=test_path, clas=spec_inps, label=f"{test_name}", np=num_cores, From b297eae0b686088311ce8ada2ced6c6bca348681 Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Wed, 9 Oct 2024 16:54:20 -0700 Subject: [PATCH 09/44] Reconfigured lcats, removed cerr output for distributed files, added some new options for Noh tests, slight changes to performance.py --- scripts/CMakeLists.txt | 2 +- scripts/gitlab/run_ats.py | 1 + scripts/lc/lcats | 413 +++++------------- scripts/lcatstest.in | 2 +- scripts/performance/performance.py.in | 50 ++- .../NestedGridRedistributeNodes.cc | 4 +- src/Distributed/ParmetisRedistributeNodes.cc | 2 +- src/Distributed/RedistributeNodes.cc | 2 +- .../SortAndDivideRedistributeNodes1d.cc | 4 +- .../SortAndDivideRedistributeNodes2d.cc | 8 +- .../SortAndDivideRedistributeNodes3d.cc | 10 +- .../SpaceFillingCurveRedistributeNodes.cc | 12 +- src/Distributed/VoronoiRedistributeNodes.cc | 8 +- .../Hydro/Noh/Noh-cylindrical-2d.py | 2 + .../functional/Hydro/Noh/Noh-spherical-3d.py | 6 + 15 files changed, 176 insertions(+), 350 deletions(-) diff --git a/scripts/CMakeLists.txt b/scripts/CMakeLists.txt index 3b5968840..714df3028 100644 --- a/scripts/CMakeLists.txt +++ b/scripts/CMakeLists.txt @@ -31,7 +31,7 @@ if (NOT ENABLE_CXXONLY) endif() if ($ENV{SYS_TYPE} MATCHES ".*blueos.*") - list(APPEND SPHERAL_ATS_BUILD_CONFIG_ARGS "--addOp --smpi_off") + list(APPEND SPHERAL_ATS_BUILD_CONFIG_ARGS "--smpi_off") endif() string(REPLACE ";" " " SPHERAL_ATS_BUILD_CONFIG_ARGS_STRING "${SPHERAL_ATS_BUILD_CONFIG_ARGS}") diff --git a/scripts/gitlab/run_ats.py b/scripts/gitlab/run_ats.py index 0e86c701b..3b3e1ca8c 100755 --- a/scripts/gitlab/run_ats.py +++ b/scripts/gitlab/run_ats.py @@ -94,6 +94,7 @@ def run_ats_test(args): ats_configs = ' --timelimit="45m"' test_alloc = " ".join(args.test_alloc) run_command = f"{test_alloc} {lcats_test} --logs test-logs {ats_file} {ats_configs}" + print(f"Running {run_command}") ci_output = os.path.join(args.ci_build_dir, "test-logs") run_and_report(run_command, ci_output, 0) diff --git a/scripts/lc/lcats b/scripts/lc/lcats index eb16e4171..4159f99e2 100755 --- a/scripts/lc/lcats +++ b/scripts/lc/lcats @@ -1,13 +1,10 @@ #!/usr/bin/env python3 -from builtins import str -from builtins import object -import os, string, time, sys -import getopt +import os, time, sys import time import platform import sys -import optparse, re, copy +import argparse, re import subprocess d_debug= 0 @@ -31,7 +28,7 @@ def cpu_count(): except Exception as e: print ("Error running lscpu to get cpu count\n") sys.exit(1) - + out = p.stdout lines = out.split('\n') @@ -61,13 +58,13 @@ def createBsubFile(inCommand, inAllOptions): #BSUB -o jobRetry.output # output is sent to file job.output #BSUB -J nightlyBlueosBuild # name of the job #BSUB -W 240 # alloc time - was defaulting to 30 minutes - FILE.write("#BSUB -G %s \n" % machineSettings.options.group) + FILE.write("#BSUB -G %s \n" % machineSettings.group) FILE.write("#BSUB -o " + bsubOutputFilename + "\n") FILE.write("#BSUB -J " + inFilename + "\n") - FILE.write("#BSUB -W %d \n" % machineSettings.options.allocTime ) - FILE.write("#BSUB -n " + str(machineSettings.options.numProcs) + "\n") + FILE.write("#BSUB -W %d \n" % machineSettings.allocTime ) + FILE.write("#BSUB -n " + str(machineSettings.numProcs) + "\n") FILE.write("\n\n") - FILE.write("setenv MACHINE_TYPE " + machineSettings.options.machineType + '\n') + FILE.write("setenv MACHINE_TYPE " + machineSettings.machineType + '\n') FILE.write("setenv SYS_TYPE " + SYS_TYPE + '\n') FILE.write(""+ '\n') @@ -78,7 +75,7 @@ def createBsubFile(inCommand, inAllOptions): FILE.write("date"+ '\n') FILE.close() return inFilename - + #--------------------------------------------------------------------------- def createMsubFile(inCommand, inAllOptions): @@ -96,27 +93,27 @@ def createMsubFile(inCommand, inAllOptions): FILE.write("#MSUB -N " + inFilename + '\n') FILE.write("#MSUB -j oe "+ '\n') # directs all err output to stdout ") FILE.write("#MSUB -o " + msubOutputFilename + '\n') - FILE.write("#MSUB -l nodes=" + str(machineSettings.options.numNodes)+ ":ppn=" + str(cpu_count()) + '\n') - FILE.write("#MSUB -l walltime=%d:00\n" % machineSettings.options.allocTime ) + FILE.write("#MSUB -l nodes=" + str(machineSettings.numNodes)+ ":ppn=" + str(cpu_count()) + '\n') + FILE.write("#MSUB -l walltime=%d:00\n" % machineSettings.allocTime ) # FILE.write("#MSUB -V # exports all environment var "+ '\n') - if machineSettings.options.name != 'cray': - FILE.write("#MSUB -q " + machineSettings.options.partition + '\n') + if machineSettings.name != 'cray': + FILE.write("#MSUB -q " + machineSettings.partition + '\n') FILE.write("#MSUB -l gres=ignore "+ '\n') - FILE.write("#MSUB -A " + machineSettings.options.bank + " #bank to use "+ '\n') + FILE.write("#MSUB -A " + machineSettings.bank + " #bank to use "+ '\n') FILE.write(""+ '\n') # LLNL specific - if machineSettings.options.name == 'cray': + if machineSettings.name == 'cray': FILE.write("source " + "/usr/projects/kull/developers/tools/kull_cshrc.csh " + '\n') # rzmerl and zin specific - increase limits to avoid pthread_create errors. - if machineSettings.options.name == 'chaos5BatchCapable': + if machineSettings.name == 'chaos5BatchCapable': FILE.write('limit maxproc 7168'+'\n') FILE.write('limit descriptors 7168'+'\n') - FILE.write("setenv MACHINE_TYPE " + machineSettings.options.machineType + '\n') + FILE.write("setenv MACHINE_TYPE " + machineSettings.machineType + '\n') FILE.write("setenv SYS_TYPE " + SYS_TYPE + '\n') FILE.write(""+ '\n') @@ -151,18 +148,18 @@ def createSbatchFile(inCommand, inAllOptions): FILE.write("#SBATCH --job-name=" + inFilename + '\n') FILE.write("#SBATCH --error="+ sbatchErrorFilename + '\n') # directs all err output to stdout ") FILE.write("#SBATCH --output="+ sbatchOutputFilename + '\n') # directs all other output to stdout ") - FILE.write("#SBATCH --nodes=" + str(machineSettings.options.numNodes)+ "\n") + FILE.write("#SBATCH --nodes=" + str(machineSettings.numNodes)+ "\n") FILE.write("#SBATCH --ntasks=" + str(cpu_count()) +"\n") # Is this OKay? Not sure if we want to default ntasks. - FILE.write("#SBATCH --time=%d\n" % machineSettings.options.allocTime ) + FILE.write("#SBATCH --time=%d\n" % machineSettings.allocTime ) - if machineSettings.options.name != 'cray': - FILE.write("#SBATCH --partition=" + machineSettings.options.partition + '\n') - FILE.write("#SBATCH --account=" + machineSettings.options.bank + " #bank to use "+ '\n') + if machineSettings.name != 'cray': + FILE.write("#SBATCH --partition=" + machineSettings.partition + '\n') + FILE.write("#SBATCH --account=" + machineSettings.bank + " #bank to use "+ '\n') FILE.write(""+ '\n') # LLNL specific - FILE.write("setenv MACHINE_TYPE " + machineSettings.options.machineType + '\n') + FILE.write("setenv MACHINE_TYPE " + machineSettings.machineType + '\n') FILE.write("setenv SYS_TYPE " + SYS_TYPE + '\n') FILE.write(""+ '\n') @@ -176,107 +173,32 @@ def createSbatchFile(inCommand, inAllOptions): FILE.close() return inFilename -#--------------------------------------------------------------------------- -from optparse import SUPPRESS_HELP -class NoErrOptionParser(optparse.OptionParser): - # Found this online... modified some - def __init__(self,*args,**kwargs): - self.valid_args_cre_list = [] - optparse.OptionParser.__init__(self, *args, **kwargs) - - def error(self,msg): - optparse.OptionParser.error(self,msg) - pass - - def add_option(self,*args,**kwargs): - self.valid_args_cre_list.append(re.compile('^'+args[0] + "[ =]")) - self.valid_args_cre_list.append(re.compile('^' + args[0] + '$')) - optparse.OptionParser.add_option(self, *args, **kwargs) - - def parse_args(self,*args,**kwargs): - # filter out invalid options - args_to_parse = args[0] - # all args are stored in args_to_parse - new_args_to_parse = [] - for a in args_to_parse: - for cre in self.valid_args_cre_list: - if cre.match(a): - new_args_to_parse.append(a) - break - elif not a.startswith("-"): - new_args_to_parse.append(a) - break - - #args that'll be used are stored in new_args_to_parse - # remove old values and insert the new - while len(args_to_parse) > 0: - args_to_parse.pop() - for a in new_args_to_parse: - args_to_parse.append(a) - - return optparse.OptionParser.parse_args(self,*args,**kwargs) - -#------------------------------------------------------------------------ -class AttributeDict (dict): - """A dictionary whose items can be accessed as attributes.""" - def __getattr__(self, name): - return self[name] - def __setattr__(self, name, value): - self[name] = value - def __repr__(self): - from io import StringIO - out = StringIO() - print("AttributeDict(", file=out) - keys = list(self.keys()) - keys.sort() - for key in keys: - print(" ", key, " = ", repr(self[key]), ",", file=out) - print(")", file=out) - s = out.getvalue() - out.close() - return s - __str__ = __repr__ #------------------------------------------------------------------------ -class MachineInfo (object): +class MachineInfo: def __init__ (self, **options): "Must not throw an exception -- object must always get created." super(MachineInfo, self).__init__() - - - self.options = AttributeDict( - # Run settings - - name= '', - allocTime= '', - machineType= '', - batch = True, - numNodes=4, - numProcs= None, - group = '', - partition= 'pbatch', - - atsArgs= [], - - ) - try: - self.options.update(options) - except Exception as e: - self.set(INVALID, 'Bad options: ' + e) - return - + self.name = '' + self.allocTime = '' + self.machineType = '' + self.batch = True + self.numNodes = 4 + self.numProcs = None + self.group = '' + self.partition = 'pbatch' + self.atsArgs = [] + self.__dict__.update(options) #--------------------------------------------------------------------------- # MAIN #--------------------------------------------------------------------------- #--------------------------------------------------------------------------- -# Setup option parser +# Setup argument parser #--------------------------------------------------------------------------- -parser= NoErrOptionParser(add_help_option=False) - -(options, args) = parser.parse_args(sys.argv[:]) +parser = argparse.ArgumentParser() #--------------------------------------------------------------------------- useCpu= cpu_count() @@ -603,7 +525,7 @@ else: print("Could not determine machine settings to use.") sys.exit(1) -print("Selected machine settings for: ", machineSettings.options.name) +print("Selected machine settings for: ", machineSettings.name) #---------------------------------------------------------- # inits and defaults @@ -620,103 +542,102 @@ bsubFilenameDefault= "tmpAts." + ezatsStartTime + ".job" # options affecting machine settings #--------------------------------------------------------------------------- -parser.add_option( "--allocTime", action="store", type="int", metavar="minutes", dest="allocTime", +parser.add_argument( "--allocTime", type=int, metavar="minutes", dest="allocTime", help = "The amount of time for the batch job (in minutes) .") -parser.add_option( "--interactive", action="store_true", dest="interactive", +parser.add_argument( "--interactive", action="store_true", dest="interactive", help = "Run ats interactively in SLURM (default is false if batch system detected.)") -parser.add_option( "--machineType", action="store", type="string", metavar="MACHINE_TYPE", dest="machineType", +parser.add_argument( "--machineType", type=str, metavar="MACHINE_TYPE", dest="machineType", help="Sets the MACHINE_TYPE for ats.") -parser.add_option( "--numNodes", action="store", type="int", metavar="number of nodes", dest="numNodes", +parser.add_argument( "--numNodes", type=int, metavar="number of nodes", dest="numNodes", help="Number of nodes to allocate for ats to run in.") -parser.add_option( "--partition", action="store", type="string", metavar="scheduler partition", dest="partition", +parser.add_argument( "--partition", type=str, metavar="scheduler partition", dest="partition", help = "Partition in which to run jobs.") -parser.add_option( "--bank", action="store", type="string", metavar="account to charge",dest="bank", +parser.add_argument( "--bank", type=str, metavar="account to charge",dest="bank", help = "Bank to use for batch job.") -parser.add_option("--wcid", action="store", type="string", metavar="WC-ID to assign", dest='wcid', - #default = machineSettings.options.bank, +parser.add_argument("--wcid", type=str, metavar="WC-ID to assign", dest='wcid', + #default = machineSettings.bank, help = "HERT WC-ID to use for batch job.") -parser.add_option( "--nogpu", action="store_true", dest="nogpu", +parser.add_argument( "--nogpu", action="store_true", dest="nogpu", help = "For blueos. Filters out gpu test. Used in conjunction with threaded option.") -parser.add_option( "--gpuonly", action="store_true", dest="gpuonly", +parser.add_argument( "--gpuonly", action="store_true", dest="gpuonly", help = "For blueos nvcc runs. Filters for gpu tests. Used in conjunction with threaded option.") -parser.add_option( "--sanitize", action="store_true", dest="sanitize", +parser.add_argument( "--sanitize", action="store_true", dest="sanitize", help = "Run sanitize tests. NOTE These need a specific build to work. ") #--------------------------------------------------------------------------- # other options #--------------------------------------------------------------------------- -parser.add_option( "--msubFilename", action="store", type="string", metavar="msub file name", dest='msubFilename', +parser.add_argument( "--msubFilename", type=str, metavar="msub file name", dest='msubFilename', default = msubFilenameDefault, help = "The name of the generated ats msub job script that will be run.") -parser.add_option( "--bsubFilename", action="store", type="string", metavar="msub file name", dest='bsubFilename', +parser.add_argument( "--bsubFilename", type=str, metavar="msub file name", dest='bsubFilename', default = bsubFilenameDefault, help = "The name of the generated ats bsub job script that will be run.") -parser.add_option( '--timelimit', dest='timelimit', default=30, +parser.add_argument( '--timelimit', dest='timelimit', default=30, help='Set the default time limit on each test. The value may be given as a digit followed by an s, m, or h to give the time in seconds, minutes (the default), or hours.') # The P2 version is a sym-link to the latest python 2 version of ATS. There's a P3 when we're ready for Python3 -parser.add_option( "--atsExe", action="store", type="string", dest="atsExe", default="/usr/apps/ats/7.0.P3/bin/ats", help="Sets which ats to use.") +parser.add_argument( "--atsExe", type=str, dest="atsExe", default="/usr/apps/ats/7.0.P3/bin/ats", help="Sets which ats to use.") -parser.add_option( "--addOp", action="store", type="string", dest="extraEzatsArgs", default='', - help="Adds extra job scheduler option to ezats.") - -parser.add_option( "--skip", action='store_true', dest='skip', default = False, +parser.add_argument( "--skip", action='store_true', dest='skip', default = False, help='skip actual execution of the tests, but show filtering results and missing test files.') -parser.add_option( "--testpath", action="store", type="string", dest="testpath", default="", +parser.add_argument( "--testpath", type=str, dest="testpath", default="", help="Specifies a path for ezats to use for unique test output.") -parser.add_option( "--debug-build", action="store_true", dest="debugbuild", default=False, +parser.add_argument( "--debug-build", action="store_true", dest="debugbuild", default=False, help="assume we are testing a debug build and should skip expensive (level>=100) tests.") -(options, args) = parser.parse_args(sys.argv[:]) +# Pass through options +parser.add_argument("passthrough", nargs="*", + help="Anything beyond a blank -- is passed through to the ats call") + +options = parser.parse_args() # If running in SLURM, use defaults of less nodes and pdebug partition if options.interactive: - machineSettings.options.batch = False - machineSettings.options.numNodes = 4 + machineSettings.batch = False + machineSettings.numNodes = 4 if "muir" in platform.node(): - machineSettings.options.partition = 'views' + machineSettings.partition = 'views' else: - machineSettings.options.partition = 'pdebug' + machineSettings.partition = 'pdebug' - machineSettings.options.allocTime = 60 + machineSettings.allocTime = 60 if options.allocTime: - machineSettings.options.allocTime = options.allocTime + machineSettings.allocTime = options.allocTime if options.machineType: - machineSettings.options.machineType = options.machineType + machineSettings.machineType = options.machineType if options.numNodes: - machineSettings.options.numNodes = options.numNodes + machineSettings.numNodes = options.numNodes if options.partition: - machineSettings.options.partition = options.partition + machineSettings.partition = options.partition if options.bank: - machineSettings.options.bank = options.bank + machineSettings.bank = options.bank if options.wcid: - machineSettings.options.wcid = options.wcid + machineSettings.wcid = options.wcid if (d_debug==1): print("options= ", options) - -whichAts= sys.argv[0] -atsArgs = sys.argv[1:] +atsArgs = " ".join(str(x) for x in options.passthrough) if "--help" in atsArgs or "-h" in atsArgs or "-help" in atsArgs: print("------------------------------------------------------------------") @@ -732,166 +653,56 @@ if "--help" in atsArgs or "-h" in atsArgs or "-help" in atsArgs: print("\n\n------------------------------------------------------------------") print("ezats sets these ATS options: ") print("------------------------------------------------------------------") - print('\n'.join(machineSettings.options.atsArgs)) + print('\n'.join(machineSettings.atsArgs)) print('\n\n') sys.exit(0) -# Convert array of strings to a string with spaces for delimiters -atsArgs = " ".join(str(x) for x in atsArgs) - -#--------------------------------------------------------------------------- -# Added this section to allow ezats to determine an appropriate filesystem -# to use for testing for this machine. The filesystem can then be passed -# onto to tests in ats that request a location. -# -# The appropriate filesystem to use for each machine is determined by the -# lustre file system summary and max bandwith tables -# https://computing.llnl.gov/?set=resources&page=lc_lustre -#--------------------------------------------------------------------------- - -#def checkFileSystem(path, timeout=4): -# 04/25/23: SD: Increasing the timeout as they're having lustre problems and we -# suspect this is causing failures. Revisit and change back to 4 when lustre issues -# are resolved -def checkFileSystem(path, timeout=30): - - def timeoutFunction( timeout, timeoutReturn, func, *args): - - res = timeoutReturn - - def handleTimeOut( signum, frame): - raise TimeOutException - - import signal - theOldHandler = signal.signal( signal.SIGALRM, handleTimeOut) - signal.alarm(timeout) - try: - try: - res = func(*args) - finally: - signal.signal(signal.SIGALRM, theOldHandler) - except: - pass #catch the TimeOutException - - signal.alarm(0) - - return res - - def canWriteToFileSystem(path): - - from tempfile import TemporaryFile - from os import makedirs - try: - if not os.path.exists(path): - makedirs( path ) - TemporaryFile(dir=path) - except: - return False - - return True - - - return timeoutFunction( timeout, False, canWriteToFileSystem, path ) - -#--------------------------------------------------------------------------- - -#---------------------------------------------------------- -# Examine options: -#---------------------------------------------------------- - print("Note: the srun message 'error: ioctl(TIOCGWINSZ)' can be ignored. \n[It means the process is trying to do something that requires a tty \nbut it's not doing either a read or write.]\n") -#---------------------------------------------------------- -# get args to add - added threaded option to the ezatsArgs or it would be passed to ats -#---------------------------------------------------------- -batchArgs= ['partition', 'bank', 'wcid'] -ezatsArgs= ['addOp', 'batch', 'interactive', 'name', 'allocTime', 'atsExe', 'machineType', 'bsubFile', 'msubFile', 'bank', 'testpath' , 'threaded', 'gpuonly' ,'nogpu', 'numProcs', 'group', 'sanitize', 'debug-build'] - +toAdd = "" # Add glue arg to pass unique file system test path to ats -toAdd= """ --glue='testpath=str("%s")' """ % options.testpath +if (options.testpath): + toAdd= """ --glue='testpath=str("%s")' """ % options.testpath if options.sanitize: - toAdd += """ --filter="sanitize==1" """ + toAdd += """ --filter="sanitize==1" """ if options.debugbuild: toAdd += """ --filter="level<100" """ -for machineArg in machineSettings.options: - if machineSettings.options[machineArg] == '' or \ - machineSettings.options[machineArg] == None: - continue - if machineArg not in atsArgs and \ - machineArg != 'atsArgs' and \ - machineArg not in batchArgs and \ - machineArg not in ezatsArgs: - toAdd += "--" + machineArg + " " + str(machineSettings.options[machineArg]) + " " -toAdd+= " " - -for machineArg in machineSettings.options.atsArgs: - theArg= machineArg.replace("=", " ").split()[0] - if theArg not in atsArgs: - toAdd += machineArg + " " - -atsArgs= options.atsExe + " " + toAdd + atsArgs -finalCommandToRun= atsArgs -#---------------------------------------------------------- -# clean finalCommandToRun -#---------------------------------------------------------- -listCommandsToRemove= ['--'+x for x in ezatsArgs] +toAdd += " ".join(x for x in machineSettings.atsArgs if x not in atsArgs) +if(machineSettings.machineType != 'SlurmProcessorScheduled' and + machineSettings.machineType != 'blueos_3_ppc64le_ib_p9'): + try: + toAdd += f" --numNodes {machineSettings.numNodes}" + except: + pass + try: + toAdd += f" --wcid {machineSettings.wcid}" + except: + pass + +finalCommandToRun = f"{options.atsExe} {toAdd} {atsArgs}" -if machineSettings.options.batch: +if machineSettings.batch: listCommandsToRemove.append('--batchHost') listCommandsToRemove.append('--batchT') listCommandsToRemove.append('--batchP') listCommandsToRemove.append('--batch ') listCommandsToRemove.append('--partition') -if machineSettings.options.machineType=='SlurmProcessorScheduled' or machineSettings.options.machineType=='blueos_3_ppc64le_ib_p9': - listCommandsToRemove.append('--numNodes') - listCommandsToRemove.append('--wcid') -for machineArg in machineSettings.options.atsArgs: +for machineArg in machineSettings.atsArgs: if 'REMOVE' in machineArg: listCommandsToRemove.append(machineArg) # Remove all extra spaces finalCommandToRun = re.sub(r"\s+", " ", finalCommandToRun.strip()) -# Remove extra options for both batch and interactive -if len(listCommandsToRemove) > 0: - for unwantedCommand in listCommandsToRemove[:]: - startPos= finalCommandToRun.find(unwantedCommand) - if d_debug: - print("DEBUG: ", unwantedCommand, "-- found start pos= ", startPos) - if startPos!= -1: #if found - endPos= finalCommandToRun.find(" -", startPos+2) - if d_debug: - print("DEBUG: ", "end pos= ", endPos) - - if endPos== -1: - endPos= finalCommandToRun.find("--", startPos+2) - if d_debug: - print("DEBUG: ", "found end pos= ", endPos) - if endPos== -1: # looking at last option, backtrack to find the last space.. - endPos= finalCommandToRun.rfind(' ') - if d_debug: - print("DEBUG: ", "rfind() found end pos= ", endPos) - - if endPos < startPos: - finalCommandToRun= finalCommandToRun[:endPos] - else: - finalCommandToRun= finalCommandToRun[:startPos] + finalCommandToRun[endPos:] - - if d_debug: - print("DEBUG: ", unwantedCommand, "-----> ", finalCommandToRun) - -#finalCommandToRun= finalCommandToRun.replace(options.extraEzatsArgs, '', 1) -realFinalCommandToRun= None - #---------------------------------------------------------- # if MSUB, SBATCH or BSUB #---------------------------------------------------------- -if machineSettings.options.batch: +if machineSettings.batch: print("--- ATS COMMAND ---\n", finalCommandToRun) if "blueos" in SYS_TYPE: bsubFilename= createBsubFile(finalCommandToRun, options) @@ -935,53 +746,52 @@ if machineSettings.options.batch: #---------------------------------------------------------- else: - os.environ["MACHINE_TYPE"] = machineSettings.options.machineType - if machineSettings.options.name in ['rzwhippet_flux']: + os.environ["MACHINE_TYPE"] = machineSettings.machineType + if machineSettings.name in ['rzwhippet_flux']: os.environ["MACHINE_TYPE"] = "flux00" os.environ["BATCH_TYPE"] = "None" if platform.processor() == 'ppc64': numProcsLine = "" else: - numProcsLine = " -n %d" % ( machineSettings.options.numNodes* cpu_count() ) + numProcsLine = " -n %d" % ( machineSettings.numNodes* cpu_count() ) - if machineSettings.options.allocTime: - if machineSettings.options.name in ['rzwhippet_flux']: - allocTime = "-t %dm" % machineSettings.options.allocTime + if machineSettings.allocTime: + if machineSettings.name in ['rzwhippet_flux']: + allocTime = "-t %dm" % machineSettings.allocTime else: - allocTime = "--time=%d:00" % machineSettings.options.allocTime + allocTime = "--time=%d:00" % machineSettings.allocTime else: allocTime = "" HERT_WC_ID = '' - if machineSettings.options.name in ['chama', 'glory']: - HERT_WC_ID = ' --account=' + machineSettings.options.wcid + if machineSettings.name in ['chama', 'glory']: + HERT_WC_ID = ' --account=' + machineSettings.wcid - if machineSettings.options.name in ['rzwhippet_flux']: + if machineSettings.name in ['rzwhippet_flux']: finalCommandToRun= "flux alloc --exclusive " \ + " " + allocTime \ + HERT_WC_ID \ + options.extraEzatsArgs \ - + " -N " + str(machineSettings.options.numNodes) \ + + " -N " + str(machineSettings.numNodes) \ + numProcsLine + " " \ + finalCommandToRun - # + " -p " + machineSettings.options.partition + " " + # + " -p " + machineSettings.partition + " " # Threaded tests under ats should NOT use salloc elif 'blue' not in os.environ['SYS_TYPE']: finalCommandToRun= "salloc --exclusive " \ + " " + allocTime \ + HERT_WC_ID \ + options.extraEzatsArgs \ - + " -N " + str(machineSettings.options.numNodes) \ + + " -N " + str(machineSettings.numNodes) \ + numProcsLine \ - + " -p " + machineSettings.options.partition + " " \ + + " -p " + machineSettings.partition + " " \ + finalCommandToRun else: - finalCommandToRun += " --numNodes="+ str(machineSettings.options.numNodes) + finalCommandToRun += " --numNodes="+ str(machineSettings.numNodes) #sys.exit() if (d_debug==1): - print("whichAts= ", whichAts) print("finalCommandToRun after= ", finalCommandToRun) #---------------------------------------------------------- @@ -1002,12 +812,7 @@ if tagFilter != -1 and comboMark==True: else: argsToUse= finalCommandToRun.split() -if realFinalCommandToRun is not None: - print("Running:\n ", realFinalCommandToRun) - if (d_debug==1): - print("Really running:\n ", finalCommandToRun) -else: - print("Running:\n ", finalCommandToRun) +print("Running:\n ", finalCommandToRun) if (d_debug==1): print("atsExe= ", options.atsExe) print("atsArgs= ", argsToUse) @@ -1018,5 +823,3 @@ try: except Exception as e: print("Caught - non-zero exit status 3 - thrown by final command", e) print("Tests appear to execute correctly...but this output is here to keep an eye on this.") - - diff --git a/scripts/lcatstest.in b/scripts/lcatstest.in index c13ae04c1..58ece000f 100644 --- a/scripts/lcatstest.in +++ b/scripts/lcatstest.in @@ -1,3 +1,3 @@ #!/usr/bin/env bash -@CMAKE_INSTALL_PREFIX@/spheral @CMAKE_INSTALL_PREFIX@/scripts/lcats --atsExe @CMAKE_INSTALL_PREFIX@/.venv/bin/ats -e @CMAKE_INSTALL_PREFIX@/spheral @SPHERAL_ATS_BUILD_CONFIG_ARGS_STRING@ "$@" +@CMAKE_INSTALL_PREFIX@/spheral @CMAKE_INSTALL_PREFIX@/scripts/lcats --atsExe @CMAKE_INSTALL_PREFIX@/.venv/bin/ats -- -e @CMAKE_INSTALL_PREFIX@/spheral @SPHERAL_ATS_BUILD_CONFIG_ARGS_STRING@ "$@" diff --git a/scripts/performance/performance.py.in b/scripts/performance/performance.py.in index 16ad68bb9..4e267e319 100644 --- a/scripts/performance/performance.py.in +++ b/scripts/performance/performance.py.in @@ -3,7 +3,7 @@ # This file runs and compares performance tests through the ats system. # Run using: ./spheral-lcats tests/performance.py -import sys, os, argparse, time +import sys, shutil, os, time import numpy as np caliper_loc = "@CONFIG_CALIPER_DIR@" @@ -16,18 +16,31 @@ benchmark_dir = "/usr/gapps/Spheral/benchmarks" spheral_sys_arch = "@SPHERAL_SYS_ARCH@" # Current install configuration from Spack spheral_install_config = "@SPHERAL_CONFIGURATION@" +# Manually change this if wanting to add performance +# data to benchmark directory +ci_run = False # Function called on exit to do timing comparisons def compare_times(manager): for test in manager.testlist: run_dir = test.directory + cali_file = test.options["caliper_filename"] cfile = os.path.join(run_dir, test.options["caliper_filename"]) - ref_caliper_file = test.options["ref_cali_file"] - regions = test.options["regions"] - timers = test.options["timers"] + ref_regions = test.options["regions"] + ref_timers = test.options["timers"] r = cr.CaliperReader() r.read(cfile) records = r.records + gls = r.globals + if (ci_run): + ref_caliper_dir = test.options["ref_cali_dir"] + if (not os.path.exists(ref_caliper_dir)): + os.makedirs(ref_caliper_dir) + new_cali_data = os.path.join(ref_caliper_dir, cfile) + shutil.copyfile(cfile, new_cali_data) + print("globals") + for i, j in gls.items(): + print(f"{i}: {j}") # Extract current times times = {} for rec in records: @@ -35,19 +48,20 @@ def compare_times(manager): fname = rec["region"] if (type(fname) is list): fname = fname[-1] - if (fname in regions): + if (fname in ref_regions): if (fname in times): - for t in timers: + for t in ref_timers: times[fname][t] += float(rec[t]) else: new_dict = {} - for t in timers: + for t in ref_timers: new_dict.update({t: float(rec[t])}) times.update({fname: new_dict}) - for i, j in times.items(): - print(f"{i}") - for k, v in j.items(): - print(f"{k}: {v}") + # print("timers") + # for i, j in times.items(): + # print(f"{i}") + # for k, v in j.items(): + # print(f"{k}: {v}") onExit(compare_times) glue(keep=True) @@ -57,7 +71,8 @@ group(name="NOH tests") # General input for all Noh-cylindrical-2d.py tests test_dir = "@SPHERAL_TEST_INSTALL_PREFIX@/tests/functional/Hydro/Noh" test_file = "Noh-cylindrical-2d.py" -gen_noh_inp = "--crksph False --cfl 0.25 --Cl 1.0 --Cq 1.0 --filter 0.0 --nPerh 2.01 --graphics False --clearDirectories True --doCompare False" +gen_noh_inp = "--crksph False --cfl 0.25 --Cl 1.0 --Cq 1.0 --filter 0.0 "+\ + "--nPerh 2.01 --graphics False --clearDirectories True --doCompare False" # Test 1 num_cores = 8 @@ -67,14 +82,13 @@ if ("broadwell" in spheral_sys_arch): num_cores = 72 ppc = 1000 total_points = num_cores * ppc -nradial = int(np.sqrt(total_points)) -ntheta = nradial +nradial = 100 test_name = "NC2D_1" caliper_filename = f"{test_name}_{int(time.time())}.cali" -inps = f"{gen_noh_inp} --nTheta {ntheta} --nRadial {nradial} --steps 10 --caliperFilename {caliper_filename}" +inps = f"{gen_noh_inp} --nRadial {nradial} --steps 10 --caliperFilename {caliper_filename}" test_path = os.path.join(test_dir, test_file) # Path to benchmark timing data -ref_cali_file = os.path.join(benchmark_dir, spheral_install_config, caliper_filename) +ref_cali_dir = os.path.join(benchmark_dir, spheral_install_config, test_name) # Select which timing regions to post-process regions = ["CheapRK2", "CheapRK2PreInit", @@ -84,12 +98,12 @@ regions = ["CheapRK2", "CheapRK2EndStep"] # Select which timers to use to post-process the regions above timers = ["sum#inclusive#sum#time.duration"] # Means the sum of the time from all ranks -spec_inps = f"{inps} --adiakData 'test_name: {test_name}, total_points:{nradial*ntheta}'" +spec_inps = f"{inps} --adiakData 'test_name: {test_name}'" t = test(script=test_path, clas=spec_inps, label=f"{test_name}", np=num_cores, caliper_filename=caliper_filename, regions=regions, timers=timers, - ref_cali_file=ref_cali_file) + ref_cali_dir=ref_cali_dir) endgroup() diff --git a/src/Distributed/NestedGridRedistributeNodes.cc b/src/Distributed/NestedGridRedistributeNodes.cc index 3ab1263b0..435508557 100644 --- a/src/Distributed/NestedGridRedistributeNodes.cc +++ b/src/Distributed/NestedGridRedistributeNodes.cc @@ -110,7 +110,7 @@ redistributeNodes(DataBase& dataBase, // Output the initial load distribution statistics. const string initialLoadStats = this->gatherDomainDistributionStatistics(work); - if (procID == 0) cerr << "NestedGridRedistributeNodes::redistributeNodes initial load balancing:" << endl + if (procID == 0) cout << "NestedGridRedistributeNodes::redistributeNodes initial load balancing:" << endl << initialLoadStats << endl << endl; // Compute the total work, and the target work per processor. @@ -240,7 +240,7 @@ redistributeNodes(DataBase& dataBase, // Output the final load distribution statistics. const string finalLoadStats = this->gatherDomainDistributionStatistics(work); - if (procID == 0) cerr << "NestedGridRedistributeNodes::redistributeNodes final load balancing:" << endl + if (procID == 0) cout << "NestedGridRedistributeNodes::redistributeNodes final load balancing:" << endl << finalLoadStats << endl << endl; MPI_Barrier(Communicator::communicator()); diff --git a/src/Distributed/ParmetisRedistributeNodes.cc b/src/Distributed/ParmetisRedistributeNodes.cc index 40db1fcff..5806cdb44 100644 --- a/src/Distributed/ParmetisRedistributeNodes.cc +++ b/src/Distributed/ParmetisRedistributeNodes.cc @@ -669,7 +669,7 @@ printConnectivityStatistics(const map > >& neighbo CHECK(navgNeighbor > 0); avgNeighbor /= navgNeighbor; - cerr << "ParmetisRedistributeNodes:: min connections = " + cout << "ParmetisRedistributeNodes:: min connections = " << minNeighbor << endl << " max connections = " << maxNeighbor << endl diff --git a/src/Distributed/RedistributeNodes.cc b/src/Distributed/RedistributeNodes.cc index e7c4039ff..e8dd382f3 100644 --- a/src/Distributed/RedistributeNodes.cc +++ b/src/Distributed/RedistributeNodes.cc @@ -736,7 +736,7 @@ workPerNode(const DataBase& dataBase, // Output some statistics. const Scalar minWeight = result.min(); const Scalar maxWeight = result.max(); - if (Process::getRank() == 0) cerr << "RedistributeNodes::workPerNode: min/max work : " + if (Process::getRank() == 0) cout << "RedistributeNodes::workPerNode: min/max work : " << minWeight << " " << maxWeight << endl; diff --git a/src/Distributed/SortAndDivideRedistributeNodes1d.cc b/src/Distributed/SortAndDivideRedistributeNodes1d.cc index c3a0d4bf6..757afb17a 100644 --- a/src/Distributed/SortAndDivideRedistributeNodes1d.cc +++ b/src/Distributed/SortAndDivideRedistributeNodes1d.cc @@ -114,7 +114,7 @@ redistributeNodes(DataBase >& dataBase, // Output the initial load distribution statistics. const string initialLoadStats = this->gatherDomainDistributionStatistics(work); - if (procID == 0) cerr << "SortAndDivideRedistributeNodes::redistributeNodes initial load balancing:" << endl + if (procID == 0) cout << "SortAndDivideRedistributeNodes::redistributeNodes initial load balancing:" << endl << initialLoadStats << endl << endl; // Compute the total work, and the target work per processor. @@ -183,7 +183,7 @@ redistributeNodes(DataBase >& dataBase, // Output the final load distribution statistics. const string finalLoadStats = this->gatherDomainDistributionStatistics(work); - if (procID == 0) cerr << "SortAndDivideRedistributeNodes::redistributeNodes final load balancing:" << endl + if (procID == 0) cout << "SortAndDivideRedistributeNodes::redistributeNodes final load balancing:" << endl << finalLoadStats << endl << endl; MPI_Barrier(Communicator::communicator()); diff --git a/src/Distributed/SortAndDivideRedistributeNodes2d.cc b/src/Distributed/SortAndDivideRedistributeNodes2d.cc index 67471ac7c..28adc908f 100644 --- a/src/Distributed/SortAndDivideRedistributeNodes2d.cc +++ b/src/Distributed/SortAndDivideRedistributeNodes2d.cc @@ -125,14 +125,14 @@ redistributeNodes(DataBase >& dataBase, // Output the initial load distribution statistics. const string initialLoadStats = this->gatherDomainDistributionStatistics(work); if (procID == 0) { - cerr << "SortAndDivideRedistributeNodes::redistributeNodes initial load balancing:" << endl + cout << "SortAndDivideRedistributeNodes::redistributeNodes initial load balancing:" << endl << initialLoadStats << endl << " Domain distribution shape tensor: " << shapeTensor.eigenValues << endl << " Number of domains per work chunk: "; for (vector::const_iterator itr = domainsPerStep.begin(); itr != domainsPerStep.end(); - ++itr) cerr << " " << *itr; - cerr << endl; + ++itr) cout << " " << *itr; + cout << endl; } // Compute the total work, and the target work per processor. @@ -232,7 +232,7 @@ redistributeNodes(DataBase >& dataBase, // Output the final load distribution statistics. const string finalLoadStats = this->gatherDomainDistributionStatistics(work); - if (procID == 0) cerr << "SortAndDivideRedistributeNodes::redistributeNodes final load balancing:" << endl + if (procID == 0) cout << "SortAndDivideRedistributeNodes::redistributeNodes final load balancing:" << endl << finalLoadStats << endl << endl; MPI_Barrier(Communicator::communicator()); diff --git a/src/Distributed/SortAndDivideRedistributeNodes3d.cc b/src/Distributed/SortAndDivideRedistributeNodes3d.cc index b45cbf79b..f6a837a98 100644 --- a/src/Distributed/SortAndDivideRedistributeNodes3d.cc +++ b/src/Distributed/SortAndDivideRedistributeNodes3d.cc @@ -123,11 +123,11 @@ redistributeNodes(DataBase >& dataBase, // Output the initial load distribution statistics. const string initialLoadStats = this->gatherDomainDistributionStatistics(work); if (procID == 0) { - cerr << "SortAndDivideRedistributeNodes::redistributeNodes initial load balancing:" << endl + cout << "SortAndDivideRedistributeNodes::redistributeNodes initial load balancing:" << endl << initialLoadStats << endl << " Domain distribution shape tensor: " << shapeTensor.eigenValues << endl; for (int i = 0; i != Dimension::nDim; ++i) { - cerr << " " << shapeTensor.eigenVectors.getColumn(i) << endl; + cout << " " << shapeTensor.eigenVectors.getColumn(i) << endl; } } @@ -205,7 +205,7 @@ redistributeNodes(DataBase >& dataBase, // Iterator over the number of z domains we'll be assigning. for (int iz = 0; iz != numZChunks; ++iz) { - if (procID == 0) cerr << "Assigning domain " << assignDomainID + if (procID == 0) cout << "Assigning domain " << assignDomainID << " of " << numProcs << "..."; // Peel off nodes from the front of the unassigned nodes, until the desired work @@ -231,7 +231,7 @@ redistributeNodes(DataBase >& dataBase, // Increment the domain we're assigning to. ++assignDomainID; - if (procID == 0) cerr << "Done." << endl; + if (procID == 0) cout << "Done." << endl; } @@ -272,7 +272,7 @@ redistributeNodes(DataBase >& dataBase, // Output the final load distribution statistics. const string finalLoadStats = this->gatherDomainDistributionStatistics(work); - if (procID == 0) cerr << "SortAndDivideRedistributeNodes::redistributeNodes final load balancing:" << endl + if (procID == 0) cout << "SortAndDivideRedistributeNodes::redistributeNodes final load balancing:" << endl << finalLoadStats << endl << endl; MPI_Barrier(Communicator::communicator()); diff --git a/src/Distributed/SpaceFillingCurveRedistributeNodes.cc b/src/Distributed/SpaceFillingCurveRedistributeNodes.cc index ee6f135f5..e3d662daa 100644 --- a/src/Distributed/SpaceFillingCurveRedistributeNodes.cc +++ b/src/Distributed/SpaceFillingCurveRedistributeNodes.cc @@ -157,27 +157,27 @@ redistributeNodes(DataBase& dataBase, // Compute the target work per domain. const Scalar targetWork = workField.sumElements()/numProcs; - if (procID == 0) cerr << "SpaceFillingCurveRedistributeNodes: Target work per process " << targetWork << endl; + if (procID == 0) cout << "SpaceFillingCurveRedistributeNodes: Target work per process " << targetWork << endl; // Compute the Key indices for each point on this processor. - if (procID == 0) cerr << "SpaceFillingCurveRedistributeNodes: Hashing indices" << endl; + if (procID == 0) cout << "SpaceFillingCurveRedistributeNodes: Hashing indices" << endl; FieldList indices = computeHashedIndices(dataBase); // Find the range of hashed indices. const Key indexMin = indices.min(); const Key indexMax = indices.max(); CHECK(indexMax < indexMax + indexMax); - if (procID == 0) cerr << "SpaceFillingCurveRedistributeNodes: Index min/max : " << indexMin << " " << indexMax << endl; + if (procID == 0) cout << "SpaceFillingCurveRedistributeNodes: Index min/max : " << indexMin << " " << indexMax << endl; // Build the array of (hashed index, DomainNode) pairs. // Note this comes back locally sorted. - if (procID == 0) cerr << "SpaceFillingCurveRedistributeNodes: sorting indices" << endl; + if (procID == 0) cout << "SpaceFillingCurveRedistributeNodes: sorting indices" << endl; vector > > sortedIndices = buildIndex2IDPairs(indices, nodeDistribution); const int numLocalNodes = nodeDistribution.size(); // Build our set of unique indices and their count. - if (procID == 0) cerr << "SpaceFillingCurveRedistributeNodes: Counting uniques and such" << endl; + if (procID == 0) cout << "SpaceFillingCurveRedistributeNodes: Counting uniques and such" << endl; vector uniqueIndices; vector count; vector work; @@ -209,7 +209,7 @@ redistributeNodes(DataBase& dataBase, CHECK(work.size() == uniqueIndices.size()); } maxCount = allReduce(maxCount, SPHERAL_OP_MAX); - if (procID == 0) cerr << "SpaceFillingCurveRedistributeNodes: max redundancy is " << maxCount << endl; + if (procID == 0) cout << "SpaceFillingCurveRedistributeNodes: max redundancy is " << maxCount << endl; // // DEBUG // { diff --git a/src/Distributed/VoronoiRedistributeNodes.cc b/src/Distributed/VoronoiRedistributeNodes.cc index 9449965fc..142782ec6 100644 --- a/src/Distributed/VoronoiRedistributeNodes.cc +++ b/src/Distributed/VoronoiRedistributeNodes.cc @@ -404,7 +404,7 @@ redistributeNodes(DataBase& dataBase, // Define the the length scale we use to determine when the generator positions have converged. const double tol = (xmax - xmin).minElement() * mTolerance; - if (procID == 0) cerr << "VoronoiRedistributeNodes: Found bounding box of " << xmin << " " << xmax << endl + if (procID == 0) cout << "VoronoiRedistributeNodes: Found bounding box of " << xmin << " " << xmax << endl << " yielding generator convergence tolerance of " << tol << endl; // Determine the average work per generator. @@ -531,7 +531,7 @@ redistributeNodes(DataBase& dataBase, CHECK(newGeneratorsInParents.size() == newParentCells.size()); generatorsInParents = newGeneratorsInParents; parentCells = newParentCells; - if (procID == 0) cerr << " Generation " << level << " : " + if (procID == 0) cout << " Generation " << level << " : " << numRemainingGenerators << " generators remaining in " << generatorsInParents.size() << " cells." << endl; @@ -540,7 +540,7 @@ redistributeNodes(DataBase& dataBase, // // Are there still remaining degeneracies in the generator positions? // if (numRemainingGenerators > 0) { -// if (procID == 0) cerr << " --> Breaking up " << numRemainingGenerators +// if (procID == 0) cout << " --> Breaking up " << numRemainingGenerators // << " degeneracies in intial generator positions." // << endl; // for (vector >::const_iterator cellItr = generatorsInParents.begin(); @@ -648,7 +648,7 @@ redistributeNodes(DataBase& dataBase, workRatio = maxWork*safeInv(minWork); // Report this iterations statistics. - if (procID == 0) cerr << "VoronoiRedistributeNodes: Lloyds iteration " << iteration << endl + if (procID == 0) cout << "VoronoiRedistributeNodes: Lloyds iteration " << iteration << endl << " max change: " << maxDeltaGenerator << endl << " work ratio change: " << workRatio << " " << oldWorkRatio << " " << abs(workRatio*safeInv(oldWorkRatio) - 1.0) << endl << " [min, max, avg] work [" << minWork << ", " << maxWork << ", " << avgWork << "]" << endl diff --git a/tests/functional/Hydro/Noh/Noh-cylindrical-2d.py b/tests/functional/Hydro/Noh/Noh-cylindrical-2d.py index 533e3b039..60f5ce444 100644 --- a/tests/functional/Hydro/Noh/Noh-cylindrical-2d.py +++ b/tests/functional/Hydro/Noh/Noh-cylindrical-2d.py @@ -44,6 +44,7 @@ from math import * from SolidSpheral2d import * +from SpheralUtilities import adiak_value from SpheralTestUtilities import * from GenerateNodeDistribution2d import * from CubicNodeGenerator import GenerateSquareNodeDistribution @@ -330,6 +331,7 @@ output("db.appendNodeList(nodes1)") output("db.numNodeLists") output("db.numFluidNodeLists") +adiak_value("total_points", db.globalNumInternalNodes) #------------------------------------------------------------------------------- # Construct the hydro physics object. diff --git a/tests/functional/Hydro/Noh/Noh-spherical-3d.py b/tests/functional/Hydro/Noh/Noh-spherical-3d.py index 43226af1a..465827cdb 100644 --- a/tests/functional/Hydro/Noh/Noh-spherical-3d.py +++ b/tests/functional/Hydro/Noh/Noh-spherical-3d.py @@ -7,6 +7,7 @@ import os, shutil, sys from math import * from SolidSpheral3d import * +from SpheralUtilities import adiak_value from SpheralTestUtilities import * from GenerateNodeDistribution3d import * @@ -123,6 +124,7 @@ dataDir = "dumps-spherical-Noh", outputFile = "Noh_spherical_profiles.gnu", comparisonFile = "None", + doCompare = True, graphics = True, ) @@ -253,6 +255,7 @@ output("db.appendNodeList(nodes1)") output("db.numNodeLists") output("db.numFluidNodeLists") +adiak_value("total_points", db.globalNumInternalNodes) #------------------------------------------------------------------------------- # Construct the hydro physics object. @@ -523,6 +526,9 @@ control.updateViz(control.totalSteps, integrator.currentTime, 0.0) control.dropRestartFile() +if not doCompare: + sys.exit(0) + #------------------------------------------------------------------------------- # Plot the results. #------------------------------------------------------------------------------- From 26226679db379c65fb5c1072664ae0da5276ec9a Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Mon, 14 Oct 2024 10:55:56 -0700 Subject: [PATCH 10/44] Fix typo in Noh-planar-1d.py --- tests/functional/Hydro/Noh/Noh-planar-1d.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/functional/Hydro/Noh/Noh-planar-1d.py b/tests/functional/Hydro/Noh/Noh-planar-1d.py index 8d7259c31..021c4df13 100644 --- a/tests/functional/Hydro/Noh/Noh-planar-1d.py +++ b/tests/functional/Hydro/Noh/Noh-planar-1d.py @@ -184,7 +184,7 @@ comparisonFile = "None", normOutputFile = "None", writeOutputLabel = True, - doComparison = True, + doCompare = True, # Parameters for the test acceptance., L1rho = 0.0537214, From 4c2807bfc1efce5722321b99ab95cb676dc0dc0a Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Mon, 14 Oct 2024 15:13:45 -0700 Subject: [PATCH 11/44] Change cout to cerr for iterate ideal h --- src/Utilities/iterateIdealH.cc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/Utilities/iterateIdealH.cc b/src/Utilities/iterateIdealH.cc index 308118349..104e45141 100644 --- a/src/Utilities/iterateIdealH.cc +++ b/src/Utilities/iterateIdealH.cc @@ -279,7 +279,7 @@ iterateIdealH(DataBase& dataBase, // Output the statitics. if (Process::getRank() == 0 && maxIterations > 1) - cerr << "iterateIdealH: (iteration, deltaH) = (" + cout << "iterateIdealH: (iteration, deltaH) = (" << itr << ", " << maxDeltaH << ")" << endl; @@ -348,7 +348,7 @@ iterateIdealH(DataBase& dataBase, // Report the final timing. const auto t1 = clock(); if (Process::getRank() == 0 && maxIterations > 1) - cerr << "iterateIdealH: required a total of " + cout << "iterateIdealH: required a total of " << (t1 - t0)/CLOCKS_PER_SEC << " seconds." << endl; From 48d585d0f93ae7212cd76b0a4aef75a82856d046 Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Wed, 23 Oct 2024 13:08:14 -0700 Subject: [PATCH 12/44] Overhaul of interactions with ATS system, fixed hacky install of test files, changed SPHERAL_TEST_INSTALL_PREFIX to include tests directory, added tests for viz and restart files in SpheralController, created separate performance analysis python file --- .gitlab/machines.yml | 2 - .gitlab/scripts.yml | 2 +- CMakeLists.txt | 2 +- Dockerfile | 2 +- cmake/SetupSpheral.cmake | 48 +- scripts/CMakeLists.txt | 26 +- scripts/atstest.in | 3 - scripts/gitlab/performance_analysis.py.in | 32 + scripts/gitlab/run_ats.py | 108 --- scripts/lc/lcats | 835 +++--------------- scripts/lcatstest.in | 3 - scripts/performance/CMakeLists.txt | 10 - scripts/performance/performance.py.in | 111 --- scripts/spheral-setup-venv.in | 6 +- scripts/spheral_ats.py.in | 158 ++++ src/PYB11/Utilities/Utilities_PYB11.py | 23 +- src/SimulationControl/SpheralController.py | 26 +- src/SimulationControl/SpheralOptionParser.py | 5 +- src/SimulationControl/SpheralTimingParser.py | 15 +- tests/CRKSPH.ats | 1 + tests/PSPH.ats | 1 + tests/compSPH.ats | 1 + .../Hydro/Noh/Noh-cylindrical-2d.py | 31 +- .../functional/Hydro/Noh/Noh-spherical-3d.py | 29 +- tests/integration.ats | 1 + tests/performance.py.in | 145 +++ tests/unit/CMakeLists.txt | 2 +- tests/unit/CXXTests/CMakeLists.txt | 4 +- 28 files changed, 573 insertions(+), 1059 deletions(-) delete mode 100644 scripts/atstest.in create mode 100644 scripts/gitlab/performance_analysis.py.in delete mode 100755 scripts/gitlab/run_ats.py delete mode 100644 scripts/lcatstest.in delete mode 100644 scripts/performance/CMakeLists.txt delete mode 100644 scripts/performance/performance.py.in create mode 100644 scripts/spheral_ats.py.in create mode 100644 tests/performance.py.in diff --git a/.gitlab/machines.yml b/.gitlab/machines.yml index 95b01e765..56efee2f4 100644 --- a/.gitlab/machines.yml +++ b/.gitlab/machines.yml @@ -9,7 +9,6 @@ HOSTNAME: 'ruby' PARTITION: pdebug BUILD_ALLOC: srun -N 1 -c 36 -p pdebug -t 60 - TEST_ALLOC: '' CLEAN_ALLOC: srun -n 20 extends: [.on_toss_4_x86] @@ -20,7 +19,6 @@ variables: HOSTNAME: 'lassen' BUILD_ALLOC: lalloc 1 -W 60 - TEST_ALLOC: $BUILD_ALLOC CLEAN_ALLOC: lalloc 1 lrun -n 20 LC_MODULES: "cuda/11.1.0" extends: [.on_blueos_3_ppc64] diff --git a/.gitlab/scripts.yml b/.gitlab/scripts.yml index 5b44f650f..69fdc435b 100644 --- a/.gitlab/scripts.yml +++ b/.gitlab/scripts.yml @@ -55,7 +55,7 @@ - CI_BUILD_DIR=$(cat ci-dir.txt) - cd $CI_BUILD_DIR && cat job-name.txt - - ./build_gitlab/install/spheral $SCRIPT_DIR/gitlab/run_ats.py --test-alloc "$TEST_ALLOC" --ats-file $ATS_FILE --ci-build-dir $CI_BUILD_DIR || exit_code=$? + - ./build_gitlab/install/spheral ./build_gitlab/install/spheral-ats --ciRun ./build_gitlab/install/$ATS_FILE || exit_code=$? - cp -r test-logs $CI_PROJECT_DIR - exit $exit_code artifacts: diff --git a/CMakeLists.txt b/CMakeLists.txt index 41b049c11..07640e039 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -5,7 +5,7 @@ include(cmake/SpheralVersion.cmake) project(spheral LANGUAGES C CXX Fortran VERSION ${SPHERAL_VERSION}) set(SPHERAL_ROOT_DIR ${CMAKE_CURRENT_SOURCE_DIR} CACHE PATH "Path to Spheral source directory") -set(SPHERAL_TEST_INSTALL_PREFIX ${CMAKE_INSTALL_PREFIX}) +set(SPHERAL_TEST_INSTALL_PREFIX ${CMAKE_INSTALL_PREFIX}/tests) include(cmake/SetupSpheral.cmake) diff --git a/Dockerfile b/Dockerfile index 41a840fac..ba0889335 100644 --- a/Dockerfile +++ b/Dockerfile @@ -75,5 +75,5 @@ RUN make install # Run ATS testing suite. WORKDIR ../install ENV MPLBACKEND=agg -RUN ./spheral-atstest --filter="level<100" tests/integration.ats +RUN ./spheral-ats --filter="level<100" tests/integration.ats # ----------------------------------------------------------------------------- diff --git a/cmake/SetupSpheral.cmake b/cmake/SetupSpheral.cmake index c0c7e756a..5a9f99ee2 100644 --- a/cmake/SetupSpheral.cmake +++ b/cmake/SetupSpheral.cmake @@ -153,41 +153,21 @@ endif() # Build C++ tests and install tests to install directory #------------------------------------------------------------------------------- if (ENABLE_TESTS) + configure_file( + "${SPHERAL_ROOT_DIR}/tests/performance.py.in" + "${CMAKE_BINARY_DIR}/tests/performance.py" + ) + install(FILES + "${CMAKE_BINARY_DIR}/tests/performance.py" + DESTINATION "${SPHERAL_TEST_INSTALL_PREFIX}" + ) + install(DIRECTORY ${SPHERAL_ROOT_DIR}/tests/ + USE_SOURCE_PERMISSIONS + DESTINATION "${SPHERAL_TEST_INSTALL_PREFIX}" + PATTERN "*CMakeLists.txt*" EXCLUDE + PATTERN "*.in" EXCLUDE + PATTERN "*~" EXCLUDE) add_subdirectory(${SPHERAL_ROOT_DIR}/tests/unit) - - # A macro to preserve directory structure when installing files - macro(install_with_directory) - set(optionsArgs "") - set(oneValueArgs SOURCE DESTINATION) - set(multiValueArgs FILES) - cmake_parse_arguments(CAS "${optionsArgs}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN} ) - foreach(FILE ${CAS_FILES}) - get_filename_component(DIR ${FILE} DIRECTORY) - INSTALL(FILES ${CAS_SOURCE}/${FILE} DESTINATION ${CAS_DESTINATION}/${DIR}) - endforeach() - endmacro(install_with_directory) - - # Find the test files we want to install - set(test_files1 "") - if (EXISTS "${CMAKE_SOURCE_DIR}/.git") - execute_process( - COMMAND git ls-files tests - WORKING_DIRECTORY ${SPHERAL_ROOT_DIR} - OUTPUT_VARIABLE test_files1) - else() - execute_process( - COMMAND find tests -type f - WORKING_DIRECTORY ${SPHERAL_ROOT_DIR} - OUTPUT_VARIABLE test_files1) - endif() - string(REPLACE "\n" " " test_files ${test_files1}) - separate_arguments(test_files) - list(REMOVE_ITEM test_files tests/unit/CXXTests/runCXXTests.ats) - list(REMOVE_ITEM test_files tests/unit/Utilities/testTimers.py.in) - install_with_directory( - FILES ${test_files} - SOURCE ${SPHERAL_ROOT_DIR} - DESTINATION ${SPHERAL_TEST_INSTALL_PREFIX}) endif() include(${SPHERAL_ROOT_DIR}/cmake/SpheralConfig.cmake) diff --git a/scripts/CMakeLists.txt b/scripts/CMakeLists.txt index 714df3028..31eaddb53 100644 --- a/scripts/CMakeLists.txt +++ b/scripts/CMakeLists.txt @@ -30,43 +30,37 @@ if (NOT ENABLE_CXXONLY) list(APPEND SPHERAL_ATS_BUILD_CONFIG_ARGS "--filter='\"not svph\"'") endif() - if ($ENV{SYS_TYPE} MATCHES ".*blueos.*") - list(APPEND SPHERAL_ATS_BUILD_CONFIG_ARGS "--smpi_off") - endif() - string(REPLACE ";" " " SPHERAL_ATS_BUILD_CONFIG_ARGS_STRING "${SPHERAL_ATS_BUILD_CONFIG_ARGS}") configure_file( "${CMAKE_CURRENT_SOURCE_DIR}/spheral-setup-venv.in" "${CMAKE_CURRENT_BINARY_DIR}/spheral-setup-venv.sh" - ) + ) configure_file( "${CMAKE_CURRENT_SOURCE_DIR}/spheral-env.in" "${CMAKE_CURRENT_BINARY_DIR}/spheral-env.sh" - ) + ) configure_file( - "${CMAKE_CURRENT_SOURCE_DIR}/atstest.in" - "${CMAKE_CURRENT_BINARY_DIR}/atstest.sh" - ) + "${CMAKE_CURRENT_SOURCE_DIR}/spheral_ats.py.in" + "${CMAKE_CURRENT_BINARY_DIR}/spheral_ats.py" + ) configure_file( - "${CMAKE_CURRENT_SOURCE_DIR}/lcatstest.in" - "${CMAKE_CURRENT_BINARY_DIR}/lcatstest.sh" + "${CMAKE_CURRENT_SOURCE_DIR}/gitlab/performance_analysis.py.in" + "${CMAKE_CURRENT_BINARY_DIR}/performance_analysis.py" ) install(FILES "${CMAKE_CURRENT_BINARY_DIR}/spheral-setup-venv.sh" "${CMAKE_CURRENT_BINARY_DIR}/spheral-env.sh" - "${CMAKE_CURRENT_BINARY_DIR}/atstest.sh" - "${CMAKE_CURRENT_BINARY_DIR}/lcatstest.sh" - "${CMAKE_CURRENT_SOURCE_DIR}/lc/lcats" + "${CMAKE_CURRENT_BINARY_DIR}/spheral_ats.py" + "${CMAKE_CURRENT_BINARY_DIR}/performance_analysis.py" + "${CMAKE_CURRENT_SOURCE_DIR}/spheralutils.py" DESTINATION "${CMAKE_INSTALL_PREFIX}/scripts" ) - add_subdirectory(performance) - install(CODE "execute_process( \ COMMAND env PYTHONPATH=${SPACK_PYTHONPATH} ${PYTHON_EXE} -m venv .venv --without-pip --prompt \ 'Spheral>')" diff --git a/scripts/atstest.in b/scripts/atstest.in deleted file mode 100644 index c46f4d31c..000000000 --- a/scripts/atstest.in +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env bash - -@CMAKE_INSTALL_PREFIX@/.venv/bin/ats -e @CMAKE_INSTALL_PREFIX@/spheral @SPHERAL_ATS_BUILD_CONFIG_ARGS_STRING@ "$@" diff --git a/scripts/gitlab/performance_analysis.py.in b/scripts/gitlab/performance_analysis.py.in new file mode 100644 index 000000000..97739b3ab --- /dev/null +++ b/scripts/gitlab/performance_analysis.py.in @@ -0,0 +1,32 @@ +#!/user/bin/env python3 + +import os, sys +import argparse + +# Location of benchmark data +benchmark_dir = "/usr/gapps/Spheral/benchmarks" + +caliper_loc = "@CONFIG_CALIPER_DIR@" +sys.path.append(os.path.join(caliper_loc, "lib64/caliper")) +import caliperreader as cr + +def main(): + #--------------------------------------------------------------------------- + # Setup argument parser + #--------------------------------------------------------------------------- + parser = argparse.ArgumentParser() + parser.add_argument("--atsOutput", type=str, required=True, + help="Path to atsr.py file produced from running performance.py") + args = parser.parse_args() + + atsFile = args.atsOutput + if (os.path.isdir(args.atsOutput)): + atsFile = os.path.join(args.atsOutput, "atsr.py") + if (not os.path.exists(atsFile)): + raise Exception(f"File {atsFile} does not exist") + # Run atsr.py and put values into globals + exec(compile(open(atsFile).read(), atsFile, 'exec'), globals()) + state = globals()["state"] + +if __name__=="__main__": + main() diff --git a/scripts/gitlab/run_ats.py b/scripts/gitlab/run_ats.py deleted file mode 100755 index 3b3e1ca8c..000000000 --- a/scripts/gitlab/run_ats.py +++ /dev/null @@ -1,108 +0,0 @@ -#!/usr/bin/env python3 - -import sys, subprocess, argparse, os - -sys.path.append(os.path.join(os.path.dirname(__file__), "..")) -from spheralutils import sexe - -# If the number of failed tests exceeds this value, ATS is not rerun -max_test_failures = 10 -# Number of times to rerun the ATS tests -max_reruns = 1 - -#------------------------------------------------------------------------------ - -def parse_args(): - parser = argparse.ArgumentParser() - - # Spec args - parser.add_argument('--test-alloc', type=str, nargs="+", - help='Allocation command for the machine.') - parser.add_argument('--ats-file', type=str, - help='ATS test file to run.') - parser.add_argument('--ci-build-dir', type=str, - help='CI build directory.') - parser.add_argument('--ci-install-dir', type=str, - default="build_gitlab/install", - help="Location of Spheral installation "+\ - "relative to --ci-build-dir") - return parser.parse_args() - -#------------------------------------------------------------------------------ - -# Run ats.py to check results and return the number of failed tests -def report_results(output_dir): - ats_py = os.path.join(output_dir, "atsr.py") - if (not os.path.exists(ats_py)): - print(f"{ats_py} does not exists") - sys.exit(1) - exec(compile(open(ats_py).read(), ats_py, 'exec'), globals()) - state = globals()["state"] - failed_tests = [t for t in state['testlist'] if t['status'] in [FAILED,TIMEDOUT] ] - if len(failed_tests) > 0: - print(f"ATS failed {len(failed_tests)} tests.") - for t in failed_tests: - print(t['name']) - return len(failed_tests) - else: - print("ATS passed all tests.") - return 0 - -#------------------------------------------------------------------------------ - -# Run the tests and check if any failed -def run_and_report(run_command, ci_output, num_runs): - if (num_runs > max_reruns): - print("Exceeded number of ATS reruns") - sys.exit(1) - sexe(run_command) - tests_passed = report_results(ci_output) - if (tests_passed == 0): - if (num_runs > 0): - print("WARNING: Some tests were run multiple times") - sys.exit(0) - # This should be added back in once Jacamar can handle exit codes properly - # if (num_runs == 0): - # sys.exit(0) - # else: - # sys.exit(80) - elif (tests_passed >= max_test_failures): - print("Too many test failures, not rerunning ATS") - sys.exit(1) - else: - rerun_command = run_command - if (num_runs == 0): - ats_cont_file = os.path.join(ci_output, "continue.ats") - if (not os.path.exists(ats_cont_file)): - print(f"{ats_cont_file} not found, ATS cannot be rerun") - sys.exit(1) - rerun_command = f"{run_command} {ats_cont_file}" - print("WARNING: Test failure, rerunning ATS") - run_and_report(rerun_command, ci_output, num_runs + 1) - -#------------------------------------------------------------------------------ - -def run_ats_test(args): - build_gl_dir = os.path.join(args.ci_build_dir, args.ci_install_dir) - ats_file = os.path.join(build_gl_dir, args.ats_file) - if (not os.path.exists(ats_file)): - print(f"{ats_file} does not exists") - sys.exit(1) - lcats_test = os.path.join(build_gl_dir, "spheral-lcatstest") - if (not os.path.exists(lcats_test)): - print(f"{lcats_test} does not exists") - ats_configs = ' --timelimit="45m"' - test_alloc = " ".join(args.test_alloc) - run_command = f"{test_alloc} {lcats_test} --logs test-logs {ats_file} {ats_configs}" - print(f"Running {run_command}") - ci_output = os.path.join(args.ci_build_dir, "test-logs") - run_and_report(run_command, ci_output, 0) - -#------------------------------------------------------------------------------ - -def main(): - args = parse_args() - run_ats_test(args) - -if __name__ == "__main__": - main() diff --git a/scripts/lc/lcats b/scripts/lc/lcats index 4159f99e2..70a7cb984 100755 --- a/scripts/lc/lcats +++ b/scripts/lc/lcats @@ -6,189 +6,101 @@ import platform import sys import argparse, re import subprocess +import copy d_debug= 0 SYS_TYPE = os.environ.get('SYS_TYPE','') # This is better than platform.node() some of the time, because it differentiates between jade, jadeita, and jadedev. -LCSCHEDCLUSTER = os.environ.get('LCSCHEDCLUSTER',platform.node()) - -def cpu_count(): - """Reliably return the number of physical cores. - multiprocessing.cpu_count() and psutil.cpu_count() do not do this correctly. - Instead parse the output of 'lscpu'. - """ - if 'rzwhippet' in platform.node(): - return 56 - elif 'rzgenie' in platform.node() or 'ruby' in platform.node(): - return 36 - else: - try: - p = subprocess.run(["lscpu",], stdout=subprocess.PIPE, text=True) - except Exception as e: - print ("Error running lscpu to get cpu count\n") - sys.exit(1) - - out = p.stdout - lines = out.split('\n') - - for line in lines: - key, value = line.split(":") - if key == "Core(s) per socket": - cores_per_socket = int(value) - if key == "Socket(s)": - sockets = int(value) - break - - return (cores_per_socket * sockets) +LCSCHEDCLUSTER = os.environ.get('LCSCHEDCLUSTER','') +#------------------------------------------------------------------------ -#--------------------------------------------------------------------------- -def createBsubFile(inCommand, inAllOptions): - inFilename= inAllOptions.msubFilename - - FILE= open(inFilename, "w") - - bsubOutputFilename= inFilename + ".out" - - import platform - thisNode= platform.node() - - #BSUB -G guests - #BSUB -o jobRetry.output # output is sent to file job.output - #BSUB -J nightlyBlueosBuild # name of the job - #BSUB -W 240 # alloc time - was defaulting to 30 minutes - FILE.write("#BSUB -G %s \n" % machineSettings.group) - FILE.write("#BSUB -o " + bsubOutputFilename + "\n") - FILE.write("#BSUB -J " + inFilename + "\n") - FILE.write("#BSUB -W %d \n" % machineSettings.allocTime ) - FILE.write("#BSUB -n " + str(machineSettings.numProcs) + "\n") - FILE.write("\n\n") - FILE.write("setenv MACHINE_TYPE " + machineSettings.machineType + '\n') - FILE.write("setenv SYS_TYPE " + SYS_TYPE + '\n') - - FILE.write(""+ '\n') - FILE.write("date"+ '\n') - FILE.write("cd " + os.getcwd() + "\n") - FILE.write(inCommand+ '\n') - - FILE.write("date"+ '\n') - FILE.close() - return inFilename - -#--------------------------------------------------------------------------- -def createMsubFile(inCommand, inAllOptions): - - inFilename= inAllOptions.msubFilename - - FILE= open(inFilename, "w") - - msubOutputFilename= inFilename + ".out" - - import platform - thisNode= platform.node() - - FILE.write("#!/bin/tcsh" + '\n') - FILE.write("\n") - FILE.write("#MSUB -N " + inFilename + '\n') - FILE.write("#MSUB -j oe "+ '\n') # directs all err output to stdout ") - FILE.write("#MSUB -o " + msubOutputFilename + '\n') - FILE.write("#MSUB -l nodes=" + str(machineSettings.numNodes)+ ":ppn=" + str(cpu_count()) + '\n') - FILE.write("#MSUB -l walltime=%d:00\n" % machineSettings.allocTime ) -# FILE.write("#MSUB -V # exports all environment var "+ '\n') - - if machineSettings.name != 'cray': - FILE.write("#MSUB -q " + machineSettings.partition + '\n') - FILE.write("#MSUB -l gres=ignore "+ '\n') - FILE.write("#MSUB -A " + machineSettings.bank + " #bank to use "+ '\n') - - FILE.write(""+ '\n') - - # LLNL specific - if machineSettings.name == 'cray': - FILE.write("source " + "/usr/projects/kull/developers/tools/kull_cshrc.csh " + '\n') - - # rzmerl and zin specific - increase limits to avoid pthread_create errors. - if machineSettings.name == 'chaos5BatchCapable': - FILE.write('limit maxproc 7168'+'\n') - FILE.write('limit descriptors 7168'+'\n') - - FILE.write("setenv MACHINE_TYPE " + machineSettings.machineType + '\n') - FILE.write("setenv SYS_TYPE " + SYS_TYPE + '\n') - - FILE.write(""+ '\n') - FILE.write("date"+ '\n') - FILE.write("cd " + os.getcwd() + "\n") - FILE.write(inCommand+ '\n') - - FILE.write("date"+ '\n') - - - FILE.close() - return inFilename - -#--------------------------------------------------------------------------- -def createSbatchFile(inCommand, inAllOptions): - ''' Some clusters don't have msub wrappers so we use sbatch directly. - Options are still the same as those used for the msub just the submission is different - so reusing most of the same variables here. - ''' - inFilename= inAllOptions.msubFilename - - FILE= open(inFilename, "w") - - sbatchOutputFilename= inFilename + ".out" - sbatchErrorFilename= inFilename + ".error" - - import platform - thisNode= platform.node() - - FILE.write("#!/bin/tcsh" + '\n') - FILE.write("\n") - FILE.write("#SBATCH --job-name=" + inFilename + '\n') - FILE.write("#SBATCH --error="+ sbatchErrorFilename + '\n') # directs all err output to stdout ") - FILE.write("#SBATCH --output="+ sbatchOutputFilename + '\n') # directs all other output to stdout ") - FILE.write("#SBATCH --nodes=" + str(machineSettings.numNodes)+ "\n") - FILE.write("#SBATCH --ntasks=" + str(cpu_count()) +"\n") # Is this OKay? Not sure if we want to default ntasks. - FILE.write("#SBATCH --time=%d\n" % machineSettings.allocTime ) - - if machineSettings.name != 'cray': - FILE.write("#SBATCH --partition=" + machineSettings.partition + '\n') - FILE.write("#SBATCH --account=" + machineSettings.bank + " #bank to use "+ '\n') - - FILE.write(""+ '\n') - - # LLNL specific - FILE.write("setenv MACHINE_TYPE " + machineSettings.machineType + '\n') - FILE.write("setenv SYS_TYPE " + SYS_TYPE + '\n') - - FILE.write(""+ '\n') - FILE.write("date"+ '\n') - FILE.write("cd " + os.getcwd() + "\n") - FILE.write(inCommand+ '\n') +class MachineInfo: + def __init__ (self, **options): + self.name = '' + self.allocTime = 120 + self.timeCmd = '--time' + self.timeLimit = 120 + self.machineType = '' + self.numNodes = 4 + self.procsPerNode = None + self.allocCmd = None + self.nodeCmd = '' + self.gpusPerNode = 0 + self.group = '' + self.groupCmd = '' + self.partition = 'pbatch' + self.partitionCmd = '-p' - FILE.write("date"+ '\n') + self.bank = '' + self.defaultAtsArgs = [ + "--continueFreq=15", + "--timelimit=120"] + self.atsArgs = [] + self.envArgs = [] + self.__dict__.update(options) + def get_ats_args(self): + args_list = " ".join(str(x) for x in self.defaultAtsArgs + self.atsArgs) + args_list += f" --numNodes {self.numNodes} --allInteractive" + return args_list - FILE.close() - return inFilename + def get_launch_cmd(self): + launch_cmd = f"{self.allocCmd} {self.nodeCmd} {self.numNodes} {self.timeCmd} {self.allocTime}" + if (self.group): + launch_cmd += f" {self.groupCmd} {self.group}" + if (self.partition): + launch_cmd += f" {self.partitionCmd} {self.partition}" + return launch_cmd -#------------------------------------------------------------------------ - -class MachineInfo: + def get_num_procs(self): + return self.numNodes * self.procsPerNode +class blueOS(MachineInfo): def __init__ (self, **options): "Must not throw an exception -- object must always get created." super(MachineInfo, self).__init__() - self.name = '' - self.allocTime = '' - self.machineType = '' - self.batch = True + self.name = '', + self.allocTime = 240 + self.timeCmd = '-W' + self.machineType = 'blueos_3_ppc64le_ib_p9' self.numNodes = 4 - self.numProcs = None + self.procsPerNode = 40 + self.gpusPerNode = 4 + self.allocCmd = '/usr/tcetmp/bin/lalloc' + self.nodeCmd = "" + self.group = 'guests' + self.groupCmd = '-G' + self.bank = 'guests' + self.partition = 'pdebug' + self.partitionCmd = '-q' + self.atsArgs = ["--smpi_off", + "--npMax=36", + "--glue='noDraco=True'", + "--glue='noVisit=True'", + "--glue='noOpacityServer=True'", + "--glue='noCxxUnitTesting=True'"] + self.envArgs = [] + self.__dict__.update(options) + +class toss4(MachineInfo): + def __init__ (self, **options): + super(MachineInfo, self).__init__() + self.name = '', + self.allocTime = 180 + self.machineType = 'slurm36' + self.numNodes = 2 + self.procsPerNode = 36 + self.gpusPerNode = 0 + self.allocCmd = 'salloc --exclusive' + self.nodeCmd = "-N" self.group = '' - self.partition = 'pbatch' - self.atsArgs = [] + self.bank = 'wbronze' + self.partition = 'pdebug' + self.partitionCmd = '-p' + self.atsArgs = ["--npMax=40"] + self.envArgs = [] self.__dict__.update(options) #--------------------------------------------------------------------------- @@ -196,432 +108,94 @@ class MachineInfo: #--------------------------------------------------------------------------- #--------------------------------------------------------------------------- -# Setup argument parser -#--------------------------------------------------------------------------- -parser = argparse.ArgumentParser() - -#--------------------------------------------------------------------------- -useCpu= cpu_count() -#--------------------------------------------------------------------------- - -blueosSettings= MachineInfo( - name='blueos', - machineType='blueos_3_ppc64le_ib_p9', - batch= False, - allocTime = 240, - group = "guests", - partition='pdebug', - numProcs = 128, - numNodes=4, - bank='guests', - atsArgs=[ - "--glue='independent=True'", - '--allInteractive', - "--npMax=40", - "--continueFreq=15", - "--timelimit=120", - "--glue='noDraco=True'", - "--glue='noVisit=True'", - "--glue='noOpacityServer=True'", - "--glue='noCxxUnitTesting=True'", - ], -) - -rzmantaSettings= MachineInfo( - name='blueos', - machineType='rzmanta', - batch= True, - allocTime = 240, - group = "guests", - partition='pdebug', - numProcs = 128, - #numNodes=, - #bank='science', - atsArgs=[ - "--glue='independent=True'", - '--allInteractive', - "--npMax=8", - "--continueFreq=15", - "--timelimit=480", - "--glue='noDraco=True'", - "--glue='noVisit=True'", - "--glue='noOpacityServer=True'", - "--glue='noCxxUnitTesting=True'", - ], -) - -bgqSettings= MachineInfo( - name='bgq', - machineType='rzuseq', - batch= False, - allocTime = 480, - - partition='pdebug', - numNodes=64, - bank='science', - atsArgs=[ - "--glue='independent=True'", - '--allInteractive', - "--npMax=8", - "--continueFreq=15", - "--timelimit=480", - "--glue='noDraco=True'", - "--glue='noVisit=True'", - "--glue='noOpacityServer=True'", - "--glue='noCxxUnitTesting=True'", - ], - -) - -chaos5BatchCapable = MachineInfo( - name='chaos5BatchCapable', - machineType='SlurmProcessorScheduled', - batch = True, - partition='pbatch', - numNodes=8, - bank='wbronze', - allocTime = 180, - atsArgs=[ "--allInteractive", - "--glue='independent=True'", - "--continueFreq=15", - "--timelimit=60", - "--npMax=%s" % cpu_count() - ], - -) - -chaos5NotBatchCapable= MachineInfo( - name='chaos5NotBatchCapable', - machineType='SlurmProcessorScheduled', - batch=False, - numNodes=4, - partition='pdebug', - bank='wbronze', - allocTime = 180, - atsArgs=[ "--allInteractive", - "--continueFreq=15", - "--timelimit=60", - "--glue='independent=True'" - ], - -) - -craySettings= MachineInfo( - name='cray', - machineType='cray', - atsArgs=[ - '--allInteractive', - "--timelimit=60", - "--glue='independent=True'", - ], - -) - -chamaSettings= MachineInfo( - name='chama', - machineType='SlurmProcessorScheduled', - batch = False, - partition='nw', - numNodes=8, - bank='FY140244', # TAG's WC_IC - wcid='FY140244', # TAG's WC_IC - allocTime = 240, - atsArgs=[ "--allInteractive", - "--glue='independent=True'", - "--glue='noDraco=True'", - "--glue='noOverlink=True'", - "--glue='noOpacityServer=True'", - "--glue='noTracker=True'", - "--timelimit=60", - "--npMax=%s" % cpu_count() - ], - -) - -glorySettings= MachineInfo( - name='glory', - machineType='SlurmProcessorScheduled', - batch = False, - partition='nw', - numNodes=8, - bank='FY140244', # TAG's WC_IC - wcid='FY140244', # TAG's WC_IC - allocTime = 240, - atsArgs=[ "--allInteractive", - "--glue='independent=True'", - # "--glue='noDraco=True'", - "--glue='noOverlink=True'", - "--glue='noOpacityServer=True'", - "--glue='noTracker=True'", - "--timelimit=60", - "--npMax=%s" % cpu_count() - ], - -) - -toss3Settings= MachineInfo( - name='rzgenie', - machineType='SlurmProcessorScheduled', - batch= False, - #allocTime = 240, - allocTime = 180, - partition='pdebug', - #numNodes=4, - numNodes=2, - bank='wbronze', - atsArgs=[ - "--glue='independent=True'", - '--allInteractive', - "--npMax=%s"%(useCpu), - "--continueFreq=15", - "--timelimit=120", - ], - -) - -toss3Batch= MachineInfo( - name='rztopaz', - machineType='SlurmProcessorScheduled', - batch= True, - allocTime = 360, - partition='pbatch', - numNodes=4, - bank='wbronze', - atsArgs=[ - "--glue='independent=True'", - '--allInteractive', - "--npMax=36", - "--continueFreq=15", - "--timelimit=120", - ], - -) - -rztopazSettings= MachineInfo( - name='rztopaz', - machineType='SlurmProcessorScheduled', - batch= False, - allocTime = 60, - partition='pdebug', - numNodes=2, - bank='wbronze', - atsArgs=[ - "--glue='independent=True'", - '--allInteractive', - "--npMax=36", - "--continueFreq=15", - "--timelimit=60", - ], -) - -toss4Settings= MachineInfo( - name='toss4machine', - machineType='slurm36', - batch= False, - allocTime = 180, - partition='pdebug', - numNodes=2, - bank='wbronze', - atsArgs=[ - "--glue='independent=True'", - '--allInteractive', - "--npMax=36", - "--continueFreq=15", - "--timelimit 120m", - ], -) - -toss4BatchSettings= MachineInfo( - name='toss4BatchMachine', - machineType='slurm36', - batch= True, - allocTime = 180, - partition='pdebug', - numNodes=2, - bank='wbronze', - atsArgs=[ - "--glue='independent=True'", - '--allInteractive', - "--npMax=36", - "--continueFreq=15", - "--timelimit 120m", - ], -) - -# Ruby settings (same as TOSS4 interactive without pdebug) -rubySettings= MachineInfo( - name='toss4machine', - machineType='slurm36', - batch= False, - allocTime = 180, - numNodes=2, - bank='wbronze', - atsArgs=[ - "--glue='independent=True'", - '--allInteractive', - "--npMax=36", - "--continueFreq=15", - "--timelimit 120m", - ], -) - -# settings when rzwhippet is running flux natively -rzwhippetSettings= MachineInfo( - name='rzwhippet', - machineType='flux00', - batch= False, - allocTime = 180, - numNodes=2, - bank='wbronze', - atsArgs=[ - "--glue='independent=True'", - '--allInteractive', - "--npMax=%s"%(useCpu), - "--continueFreq=15", - "--timelimit=120", - ], -) - -# Determine machine settings to use +# Setup machine info classes #----------------------------------------------------------------------- -# Determine machine settings to use -# NOTE: -# The options are obtained from argv, the defaults for the options are from the machineSettings -# options are used later to figure what to do.... machine settings are used for non-init options -# -#----------------------------------------------------------------------- - -if platform.processor() == 'ppc64': - machineSettings = bgqSettings - -elif 'PRGENVMODULES' in os.environ: # cray machine - machineSettings = craySettings - -elif 'chama' in SYS_TYPE: - machineSettings = chamaSettings - -elif 'glory' in SYS_TYPE: - machineSettings = glorySettings - -elif 'blue' in SYS_TYPE: - machineSettings = blueosSettings - -elif 'toss_3' in SYS_TYPE: - if 'rzgenie' in LCSCHEDCLUSTER or 'jadedev' == LCSCHEDCLUSTER or 'zindev' == LCSCHEDCLUSTER: - # Developer machines are interactive - machineSettings = toss3Settings - elif '--partition=pdebug' in sys.argv and not '--batch' in sys.argv: - # Need short queue settings - machineSettings = rztopazSettings - else: - # Put it in batch. - machineSettings = toss3Batch -elif 'toss_4' in SYS_TYPE: - if 'ruby' in LCSCHEDCLUSTER: - machineSettings = rubySettings - else: - machineSettings = toss4Settings -else: - print("Could not determine machine settings to use.") - sys.exit(1) -print("Selected machine settings for: ", machineSettings.name) +lassenSettings = blueOS(name="lassen") +rzanselSettings = blueOS(name="rzansel") -#---------------------------------------------------------- -# inits and defaults -#---------------------------------------------------------- -import random -import time -ezatsLocaltime = time.localtime() -ezatsStartTime = time.strftime("%y%m%d%H%M%S",ezatsLocaltime) -msubFilenameDefault= "tmpAts." + ezatsStartTime + ".job" -bsubFilenameDefault= "tmpAts." + ezatsStartTime + ".job" +rubySettings = toss4(name="ruby", partition="") +rzgenieSettings = toss4(name="rzgenie") +rzwhippetSettings = toss4(name="rzwhippet", procsPerNode=112) +allSettings = [lassenSettings, rzanselSettings, rubySettings, rzgenieSettings] +macNames = {x.name: x for x in allSettings} #--------------------------------------------------------------------------- -# options affecting machine settings +# Setup argument parser #--------------------------------------------------------------------------- +parser = argparse.ArgumentParser() parser.add_argument( "--allocTime", type=int, metavar="minutes", dest="allocTime", help = "The amount of time for the batch job (in minutes) .") -parser.add_argument( "--interactive", action="store_true", dest="interactive", - help = "Run ats interactively in SLURM (default is false if batch system detected.)") - -parser.add_argument( "--machineType", type=str, metavar="MACHINE_TYPE", dest="machineType", - help="Sets the MACHINE_TYPE for ats.") +parser.add_argument( "--machine", type=str, default=None, choices=list(macNames.keys()), + help="Sets the machine for ats.") -parser.add_argument( "--numNodes", type=int, metavar="number of nodes", dest="numNodes", +parser.add_argument( "--numNodes", type=int, help="Number of nodes to allocate for ats to run in.") -parser.add_argument( "--partition", type=str, metavar="scheduler partition", dest="partition", +parser.add_argument( "--partition", type=str, help = "Partition in which to run jobs.") -parser.add_argument( "--bank", type=str, metavar="account to charge",dest="bank", +parser.add_argument( "--bank", type=str, help = "Bank to use for batch job.") -parser.add_argument("--wcid", type=str, metavar="WC-ID to assign", dest='wcid', - #default = machineSettings.bank, +parser.add_argument("--wcid", type=str, help = "HERT WC-ID to use for batch job.") -parser.add_argument( "--nogpu", action="store_true", dest="nogpu", - help = "For blueos. Filters out gpu test. Used in conjunction with threaded option.") - -parser.add_argument( "--gpuonly", action="store_true", dest="gpuonly", - help = "For blueos nvcc runs. Filters for gpu tests. Used in conjunction with threaded option.") - parser.add_argument( "--sanitize", action="store_true", dest="sanitize", help = "Run sanitize tests. NOTE These need a specific build to work. ") - #--------------------------------------------------------------------------- # other options #--------------------------------------------------------------------------- -parser.add_argument( "--msubFilename", type=str, metavar="msub file name", dest='msubFilename', +ezatsLocaltime = time.localtime() +ezatsStartTime = time.strftime("%y%m%d%H%M%S",ezatsLocaltime) +msubFilenameDefault= "tmpAts." + ezatsStartTime + ".job" +bsubFilenameDefault= "tmpAts." + ezatsStartTime + ".job" +parser.add_argument( "--msubFilename", type=str, default = msubFilenameDefault, help = "The name of the generated ats msub job script that will be run.") -parser.add_argument( "--bsubFilename", type=str, metavar="msub file name", dest='bsubFilename', +parser.add_argument( "--bsubFilename", type=str, default = bsubFilenameDefault, help = "The name of the generated ats bsub job script that will be run.") parser.add_argument( '--timelimit', dest='timelimit', default=30, help='Set the default time limit on each test. The value may be given as a digit followed by an s, m, or h to give the time in seconds, minutes (the default), or hours.') # The P2 version is a sym-link to the latest python 2 version of ATS. There's a P3 when we're ready for Python3 -parser.add_argument( "--atsExe", type=str, dest="atsExe", default="/usr/apps/ats/7.0.P3/bin/ats", help="Sets which ats to use.") - -parser.add_argument( "--skip", action='store_true', dest='skip', default = False, - help='skip actual execution of the tests, but show filtering results and missing test files.') +parser.add_argument( "--atsExe", type=str, default="/usr/apps/ats/7.0.P3/bin/ats", help="Sets which ats to use.") -parser.add_argument( "--testpath", type=str, dest="testpath", default="", +parser.add_argument( "--testpath", type=str, default="", help="Specifies a path for ezats to use for unique test output.") -parser.add_argument( "--debug-build", action="store_true", dest="debugbuild", default=False, - help="assume we are testing a debug build and should skip expensive (level>=100) tests.") - # Pass through options parser.add_argument("passthrough", nargs="*", help="Anything beyond a blank -- is passed through to the ats call") options = parser.parse_args() -# If running in SLURM, use defaults of less nodes and pdebug partition -if options.interactive: - machineSettings.batch = False - machineSettings.numNodes = 4 +#--------------------------------------------------------------------------- +# Determine machine settings to use +#----------------------------------------------------------------------- - if "muir" in platform.node(): - machineSettings.partition = 'views' - else: - machineSettings.partition = 'pdebug' +if options.machine: + machineSettings = macNames[options.machine] +elif LCSCHEDCLUSTER in macNames: + machineSettings = macNames[LCSCHEDCLUSTER] +else: + print("Could not determine machine settings to use.") + sys.exit(1) - machineSettings.allocTime = 60 +print("Selected machine settings for: ", machineSettings.name) + +#--------------------------------------------------------------------------- +# options affecting machine settings +#--------------------------------------------------------------------------- if options.allocTime: machineSettings.allocTime = options.allocTime -if options.machineType: - machineSettings.machineType = options.machineType - if options.numNodes: machineSettings.numNodes = options.numNodes @@ -634,30 +208,8 @@ if options.bank: if options.wcid: machineSettings.wcid = options.wcid -if (d_debug==1): - print("options= ", options) - atsArgs = " ".join(str(x) for x in options.passthrough) -if "--help" in atsArgs or "-h" in atsArgs or "-help" in atsArgs: - print("------------------------------------------------------------------") - print("Options available for only ezats: ") - print("------------------------------------------------------------------") - parser.print_help() - print("------------------------------------------------------------------") - print("Options for ats: ") - print("------------------------------------------------------------------") - from subprocess import check_call - check_call([options.atsExe, "-h"]) - - print("\n\n------------------------------------------------------------------") - print("ezats sets these ATS options: ") - print("------------------------------------------------------------------") - print('\n'.join(machineSettings.atsArgs)) - print('\n\n') - - sys.exit(0) - print("Note: the srun message 'error: ioctl(TIOCGWINSZ)' can be ignored. \n[It means the process is trying to do something that requires a tty \nbut it's not doing either a read or write.]\n") toAdd = "" @@ -668,158 +220,21 @@ if (options.testpath): if options.sanitize: toAdd += """ --filter="sanitize==1" """ -if options.debugbuild: - toAdd += """ --filter="level<100" """ - -toAdd += " ".join(x for x in machineSettings.atsArgs if x not in atsArgs) -if(machineSettings.machineType != 'SlurmProcessorScheduled' and - machineSettings.machineType != 'blueos_3_ppc64le_ib_p9'): - try: - toAdd += f" --numNodes {machineSettings.numNodes}" - except: - pass - try: - toAdd += f" --wcid {machineSettings.wcid}" - except: - pass - -finalCommandToRun = f"{options.atsExe} {toAdd} {atsArgs}" - -if machineSettings.batch: - listCommandsToRemove.append('--batchHost') - listCommandsToRemove.append('--batchT') - listCommandsToRemove.append('--batchP') - listCommandsToRemove.append('--batch ') - listCommandsToRemove.append('--partition') - -for machineArg in machineSettings.atsArgs: - if 'REMOVE' in machineArg: - listCommandsToRemove.append(machineArg) +#toAdd += " ".join(x for x in machineSettings.atsArgs if x not in atsArgs) -# Remove all extra spaces -finalCommandToRun = re.sub(r"\s+", " ", finalCommandToRun.strip()) +AtsRunCmd = f"{options.atsExe} {toAdd} {atsArgs} {machineSettings.get_ats_args()}" -#---------------------------------------------------------- -# if MSUB, SBATCH or BSUB -#---------------------------------------------------------- -if machineSettings.batch: - print("--- ATS COMMAND ---\n", finalCommandToRun) - if "blueos" in SYS_TYPE: - bsubFilename= createBsubFile(finalCommandToRun, options) - batchtype = 'bsub < ' # have to have an input file redirect for bsub - print("\nWritten to %s batch filename: %s " %(batchtype, bsubFilename)) - cmd = batchtype + ' ' + bsubFilename - elif 'magma' in LCSCHEDCLUSTER: - sbatchFilename= createSbatchFile(finalCommandToRun, options) - batchtype = 'sbatch' - print("\nWritten to %s batch filename: %s " %(batchtype, sbatchFilename)) - cmd = batchtype + ' ' + sbatchFilename - elif 'mica' in LCSCHEDCLUSTER: - sbatchFilename= createSbatchFile(finalCommandToRun, options) - batchtype = 'sbatch' - print("\nWritten to %s batch filename: %s " %(batchtype, sbatchFilename)) - cmd = batchtype + ' ' + sbatchFilename - else: - msubFilename= createMsubFile(finalCommandToRun, options) - batchtype = 'msub' - print("\nWritten to %s batch filename: %s " %(batchtype, msubFilename)) - cmd = batchtype + ' ' + msubFilename - - if not options.skip: - from subprocess import check_call - print("Running0: ", cmd) - #check_call( cmd.split() ) - os.system( cmd ) - else: - if 'msub' in batchtype: - print("SKIP option in ats command. ' msub ", msubFilename, "' was not executed.") - elif 'sbatch' in batchtype: - print("SKIP option in ats command. ' sbatch ", sbatchFilename, "' was not executed.") - else: - print("SKIP option in ats command. ' bsub ", bsubFilename, "' was not executed.") - - - sys.exit() - -#---------------------------------------------------------- -# else SALLOC or threaded w/out salloc -#---------------------------------------------------------- -else: +os.environ["MACHINE_TYPE"] = machineSettings.machineType +os.environ["BATCH_TYPE"] = "None" - os.environ["MACHINE_TYPE"] = machineSettings.machineType - if machineSettings.name in ['rzwhippet_flux']: - os.environ["MACHINE_TYPE"] = "flux00" - os.environ["BATCH_TYPE"] = "None" - - if platform.processor() == 'ppc64': - numProcsLine = "" - else: - numProcsLine = " -n %d" % ( machineSettings.numNodes* cpu_count() ) - - if machineSettings.allocTime: - if machineSettings.name in ['rzwhippet_flux']: - allocTime = "-t %dm" % machineSettings.allocTime - else: - allocTime = "--time=%d:00" % machineSettings.allocTime - else: - allocTime = "" - - HERT_WC_ID = '' - if machineSettings.name in ['chama', 'glory']: - HERT_WC_ID = ' --account=' + machineSettings.wcid - - if machineSettings.name in ['rzwhippet_flux']: - finalCommandToRun= "flux alloc --exclusive " \ - + " " + allocTime \ - + HERT_WC_ID \ - + options.extraEzatsArgs \ - + " -N " + str(machineSettings.numNodes) \ - + numProcsLine + " " \ - + finalCommandToRun - # + " -p " + machineSettings.partition + " " - # Threaded tests under ats should NOT use salloc - elif 'blue' not in os.environ['SYS_TYPE']: - finalCommandToRun= "salloc --exclusive " \ - + " " + allocTime \ - + HERT_WC_ID \ - + options.extraEzatsArgs \ - + " -N " + str(machineSettings.numNodes) \ - + numProcsLine \ - + " -p " + machineSettings.partition + " " \ - + finalCommandToRun - else: - finalCommandToRun += " --numNodes="+ str(machineSettings.numNodes) - #sys.exit() - -if (d_debug==1): - print("finalCommandToRun after= ", finalCommandToRun) - -#---------------------------------------------------------- -# Find filter part and keep whole -# -# [05/30] The problem is from splitting the command into argsToUse. If we ran w/ something other than os.execv, maybe this will work correctly. -tagFilter= finalCommandToRun.find('--filter') -comboMark= False -for anArg in sys.argv[1:]: - if '--filter' in anArg and ("'" in anArg): - comboMark= True -if tagFilter != -1 and comboMark==True: - startFilter= finalCommandToRun.find("'", tagFilter) - endFilter= finalCommandToRun.find("'", startFilter+1) - filterPart= finalCommandToRun[tagFilter:endFilter+1] - filterPart= filterPart.replace("'", '') - argsToUse= finalCommandToRun[0:tagFilter].split() + [filterPart] + finalCommandToRun[endFilter+1:].split() -else: - argsToUse= finalCommandToRun.split() +finalCommandToRun = machineSettings.get_launch_cmd() + " " + AtsRunCmd +# Remove all extra spaces +finalCommandToRun = re.sub(r"\s+", " ", finalCommandToRun.strip()) -print("Running:\n ", finalCommandToRun) -if (d_debug==1): - print("atsExe= ", options.atsExe) - print("atsArgs= ", argsToUse) +print(f"Running command:\n {finalCommandToRun}") from subprocess import check_call try: check_call( finalCommandToRun,shell=True ) except Exception as e: print("Caught - non-zero exit status 3 - thrown by final command", e) - print("Tests appear to execute correctly...but this output is here to keep an eye on this.") diff --git a/scripts/lcatstest.in b/scripts/lcatstest.in deleted file mode 100644 index 58ece000f..000000000 --- a/scripts/lcatstest.in +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env bash - -@CMAKE_INSTALL_PREFIX@/spheral @CMAKE_INSTALL_PREFIX@/scripts/lcats --atsExe @CMAKE_INSTALL_PREFIX@/.venv/bin/ats -- -e @CMAKE_INSTALL_PREFIX@/spheral @SPHERAL_ATS_BUILD_CONFIG_ARGS_STRING@ "$@" diff --git a/scripts/performance/CMakeLists.txt b/scripts/performance/CMakeLists.txt deleted file mode 100644 index 31baf5573..000000000 --- a/scripts/performance/CMakeLists.txt +++ /dev/null @@ -1,10 +0,0 @@ - -configure_file( - "${CMAKE_CURRENT_SOURCE_DIR}/performance.py.in" - "${CMAKE_CURRENT_BINARY_DIR}/performance.py" -) - -install(FILES - "${CMAKE_CURRENT_BINARY_DIR}/performance.py" - DESTINATION "${CMAKE_INSTALL_PREFIX}/tests" -) diff --git a/scripts/performance/performance.py.in b/scripts/performance/performance.py.in deleted file mode 100644 index 4e267e319..000000000 --- a/scripts/performance/performance.py.in +++ /dev/null @@ -1,111 +0,0 @@ -#!/user/bin/env python3 - -# This file runs and compares performance tests through the ats system. -# Run using: ./spheral-lcats tests/performance.py - -import sys, shutil, os, time -import numpy as np - -caliper_loc = "@CONFIG_CALIPER_DIR@" -sys.path.append(os.path.join(caliper_loc, "lib64/caliper")) -import caliperreader as cr - -# Location of benchmark data -benchmark_dir = "/usr/gapps/Spheral/benchmarks" -# Current system architecture from Spack -spheral_sys_arch = "@SPHERAL_SYS_ARCH@" -# Current install configuration from Spack -spheral_install_config = "@SPHERAL_CONFIGURATION@" -# Manually change this if wanting to add performance -# data to benchmark directory -ci_run = False - -# Function called on exit to do timing comparisons -def compare_times(manager): - for test in manager.testlist: - run_dir = test.directory - cali_file = test.options["caliper_filename"] - cfile = os.path.join(run_dir, test.options["caliper_filename"]) - ref_regions = test.options["regions"] - ref_timers = test.options["timers"] - r = cr.CaliperReader() - r.read(cfile) - records = r.records - gls = r.globals - if (ci_run): - ref_caliper_dir = test.options["ref_cali_dir"] - if (not os.path.exists(ref_caliper_dir)): - os.makedirs(ref_caliper_dir) - new_cali_data = os.path.join(ref_caliper_dir, cfile) - shutil.copyfile(cfile, new_cali_data) - print("globals") - for i, j in gls.items(): - print(f"{i}: {j}") - # Extract current times - times = {} - for rec in records: - if ("region" in rec): - fname = rec["region"] - if (type(fname) is list): - fname = fname[-1] - if (fname in ref_regions): - if (fname in times): - for t in ref_timers: - times[fname][t] += float(rec[t]) - else: - new_dict = {} - for t in ref_timers: - new_dict.update({t: float(rec[t])}) - times.update({fname: new_dict}) - # print("timers") - # for i, j in times.items(): - # print(f"{i}") - # for k, v in j.items(): - # print(f"{k}: {v}") - -onExit(compare_times) -glue(keep=True) - -# NOH tests -group(name="NOH tests") -# General input for all Noh-cylindrical-2d.py tests -test_dir = "@SPHERAL_TEST_INSTALL_PREFIX@/tests/functional/Hydro/Noh" -test_file = "Noh-cylindrical-2d.py" -gen_noh_inp = "--crksph False --cfl 0.25 --Cl 1.0 --Cq 1.0 --filter 0.0 "+\ - "--nPerh 2.01 --graphics False --clearDirectories True --doCompare False" - -# Test 1 -num_cores = 8 -ppc = 100 # Points per core -# If we are on RZGenie/Ruby -if ("broadwell" in spheral_sys_arch): - num_cores = 72 - ppc = 1000 -total_points = num_cores * ppc -nradial = 100 -test_name = "NC2D_1" -caliper_filename = f"{test_name}_{int(time.time())}.cali" -inps = f"{gen_noh_inp} --nRadial {nradial} --steps 10 --caliperFilename {caliper_filename}" -test_path = os.path.join(test_dir, test_file) -# Path to benchmark timing data -ref_cali_dir = os.path.join(benchmark_dir, spheral_install_config, test_name) -# Select which timing regions to post-process -regions = ["CheapRK2", - "CheapRK2PreInit", - "ConnectivityMap_computeConnectivity", - "ConnectivityMap_patch", - "CheapRK2EvalDerivs", - "CheapRK2EndStep"] -# Select which timers to use to post-process the regions above -timers = ["sum#inclusive#sum#time.duration"] # Means the sum of the time from all ranks -spec_inps = f"{inps} --adiakData 'test_name: {test_name}'" -t = test(script=test_path, clas=spec_inps, label=f"{test_name}", np=num_cores, - caliper_filename=caliper_filename, - regions=regions, - timers=timers, - ref_cali_dir=ref_cali_dir) - -endgroup() - -# Add a wait to ensure all timer files are done -wait() diff --git a/scripts/spheral-setup-venv.in b/scripts/spheral-setup-venv.in index fafd20547..af82efa4b 100644 --- a/scripts/spheral-setup-venv.in +++ b/scripts/spheral-setup-venv.in @@ -15,11 +15,9 @@ sed -i 's|XXXXXX|\x27\x27\x27exec\x27 @CMAKE_INSTALL_PREFIX@/.venv/bin/python "$ echo "Creating spheral symlink to spheral-env script ..." cd @CMAKE_INSTALL_PREFIX@ chmod u+x scripts/spheral-env.sh -chmod u+x scripts/atstest.sh -chmod u+x scripts/lcatstest.sh +chmod u+x scripts/spheral_ats.py cp --symbolic-link scripts/spheral-env.sh spheral &> /dev/null -cp --symbolic-link scripts/atstest.sh spheral-atstest &> /dev/null -cp --symbolic-link scripts/lcatstest.sh spheral-lcatstest &> /dev/null +cp --symbolic-link scripts/spheral_ats.py spheral-ats &> /dev/null cd - > /dev/null echo "Byte-compiling packages in install path ..." diff --git a/scripts/spheral_ats.py.in b/scripts/spheral_ats.py.in new file mode 100644 index 000000000..b87fe5f68 --- /dev/null +++ b/scripts/spheral_ats.py.in @@ -0,0 +1,158 @@ +#!/usr/bin/env python3 + +import os, time, sys +import argparse + +# This is a wrapper for running Spheral through ATS + +# These are set by CMake +spheral_sys_arch = "@SPHERAL_SYS_ARCH@" +spheral_install_config = "@SPHERAL_CONFIGURATION@" + +# Find spheralutils.py +install_prefix = "@CMAKE_INSTALL_PREFIX@" +sys.path.append(os.path.join(install_prefix, "scripts")) +from spheralutils import sexe + +# Apply filters set during install +install_filters = "@SPHERAL_ATS_BUILD_CONFIG_ARGS_STRING@" + +# Options for running CI +# If the number of failed tests exceeds this value, ATS is not rerun +max_test_failures = 10 +# Number of times to rerun the ATS tests +max_reruns = 1 +test_log_name = "test-logs" + +ats_exe = os.path.join(install_prefix, ".venv/bin/ats") +spheral_exe = os.path.join(install_prefix, "spheral") + +#------------------------------------------------------------------------------ + +# Run ats.py to check results and return the number of failed tests +def report_results(output_dir): + ats_py = os.path.join(output_dir, "atsr.py") + if (not os.path.exists(ats_py)): + raise Exception(f"{ats_py} does not exists") + exec(compile(open(ats_py).read(), ats_py, 'exec'), globals()) + state = globals()["state"] + failed_tests = [t for t in state['testlist'] if t['status'] in [FAILED,TIMEDOUT] ] + if len(failed_tests) > 0: + print(f"ATS failed {len(failed_tests)} tests.") + for t in failed_tests: + print(t['name']) + return len(failed_tests) + else: + print("ATS passed all tests.") + return 0 + +#------------------------------------------------------------------------------ + +# Run the tests and check if any failed +def run_and_report(run_command, ci_output, num_runs): + if (num_runs > max_reruns): + raise Exception ("Exceeded number of ATS reruns") + try: + sexe(run_command) + except Exception as e: + print(e) + tests_passed = report_results(ci_output) + if (tests_passed == 0): + if (num_runs > 0): + print("WARNING: Some tests were run multiple times") + sys.exit(0) + # This should be added back in once Jacamar can handle exit codes properly + # if (num_runs == 0): + # sys.exit(0) + # else: + # sys.exit(80) + elif (tests_passed >= max_test_failures): + raise Exception("Too many test failures, not rerunning ATS") + else: + rerun_command = run_command + if (num_runs == 0): + ats_cont_file = os.path.join(ci_output, "continue.ats") + if (not os.path.exists(ats_cont_file)): + raise Exception(f"{ats_cont_file} not found, ATS cannot be rerun") + rerun_command = f"{run_command} {ats_cont_file}" + print("WARNING: Test failure, rerunning ATS") + run_and_report(rerun_command, ci_output, num_runs + 1) + +def main(): + #--------------------------------------------------------------------------- + # Setup argument parser + #--------------------------------------------------------------------------- + parser = argparse.ArgumentParser(allow_abbrev=False, + usage=""" + ./spheral-ats --numNodes 2 tests/integration.ats --filter="level<100" + """, + description=""" + Launches and runs Spheral using the ATS system. + Must provide an ATS file (either python or .ats). + Any unrecognized arguments are passed as inputs to the ATS file. + """) + parser.add_argument("--numNodes", type=int, + default=None, + help="Number of nodes to allocate.") + parser.add_argument("--timeLimit", type=int, + default=None, + help="Time limit for allocation.") + parser.add_argument("--ciRun", action="store_true", + help="Pass if running the CI.") + parser.add_argument("--atsHelp", action="store_true", + help="Print the help output for ATS. Useful for seeing ATS options.") + options, unknown_options = parser.parse_known_args() + if (options.atsHelp): + sexe(f"{ats_exe} --help") + return + + #--------------------------------------------------------------------------- + # Setup machine info classes + #--------------------------------------------------------------------------- + mac_args = [] + ats_args = [install_filters, "--allInteractive"] + numNodes = options.numNodes + timeLimit = options.timeLimit + if ("broadwell" in spheral_sys_arch): + os.environ["MACHINE_TYPE"] = "slurm36" + numNodes = numNodes if numNodes else 2 + timeLimit = timeLimit if timeLimit else 120 + time_limit = 120 + mac_args = ["--npMax=36"] + launch_cmd = f"salloc --exclusive -N {numNodes} -t {timeLimit} " + elif ("power9" in spheral_sys_arch): + numNodes = numNodes if numNodes else 1 + timeLimit = timeLimit if timeLimit else 60 + mac_args = ["--smpi_off", "--npMax=40"] + launch_cmd = f"lalloc {numNodes} -W {timeLimit} " + if (options.ciRun): + launch_cmd += "-q pdebug -G guests " + mac_args.append("--lrun_pack") + else: + mac_args.append("--lrun") + ats_args.extend(mac_args) + + #--------------------------------------------------------------------------- + # Launch ATS + #--------------------------------------------------------------------------- + if (numNodes): + ats_args.append(f"--numNodes {numNodes}") + if (timeLimit): + ats_args.append(f"--timelimit {timeLimit}") + # If doing a CI run, set some more options + if (options.ciRun): + ats_args.append(f"--logs {test_log_name}") + ats_args = " ".join(str(x) for x in ats_args) + other_args = " ".join(str(x) for x in unknown_options) + run_command = f"{launch_cmd}{ats_exe} -e {spheral_exe} {ats_args} {other_args}" + print(f"\nRunning: {run_command}\n") + if (not options.ciRun): + try: + sexe(run_command) + except Exception as e: + print(e) + else: + run_and_report(run_command, test_log_name, 0) + +if __name__ == "__main__": + main() diff --git a/src/PYB11/Utilities/Utilities_PYB11.py b/src/PYB11/Utilities/Utilities_PYB11.py index 6dc6cfc6a..7ed49a7d3 100644 --- a/src/PYB11/Utilities/Utilities_PYB11.py +++ b/src/PYB11/Utilities/Utilities_PYB11.py @@ -803,7 +803,22 @@ def clippedVolume(poly = "const Dim<3>::FacetedVolume&", ("long", "Long"), ("double", "Scalar"), ("std::string", "String")): - exec(""" -adiak_value%(label)s = PYB11TemplateFunction(adiak_value, "%(value)s", pyname="adiak_value") -adiak_value2%(label)s = PYB11TemplateFunction(adiak_value2, "%(value)s", pyname="adiak_value") -""" % {"label" : label, "value" : value}) + exec(f""" +adiak_value{label} = PYB11TemplateFunction(adiak_value, "{value}", pyname="adiak_value") +adiak_value2{label} = PYB11TemplateFunction(adiak_value2, "{value}", pyname="adiak_value") +""") +array_types = ["Scalar", + "Vector", + "Tensor", + "SymTensor", + "ThirdRankTensor", + "FourthRankTensor", + "FifthRankTensor"] +for ndim in dims: + for ctype in array_types: + value = f"Dim::<{ndim}>::{ctype}" + label = f"{ctype}{ndim}" + exec(f""" +adiak_value{label} = PYB11TemplateFunction(adiak_value, "{value}", pyname="adiak_value") +adiak_value2{label} = PYB11TemplateFunction(adiak_value2, "{value}", pyname="adiak_value") +""") diff --git a/src/SimulationControl/SpheralController.py b/src/SimulationControl/SpheralController.py index 9dca59646..22fc5d513 100644 --- a/src/SimulationControl/SpheralController.py +++ b/src/SimulationControl/SpheralController.py @@ -5,6 +5,7 @@ from SpheralCompiledPackages import * from SpheralTimer import SpheralTimer +from SpheralUtilities import adiak_value from SpheralConservation import SpheralConservation from GzipFileIO import GzipFileIO from SpheralTestUtilities import globalFrame @@ -52,6 +53,7 @@ def __init__(self, integrator, volumeType = RKVolumeType.RKVoronoiVolume, facetedBoundaries = None, printAllTimers = False): + self.restartBaseName = restartBaseName self.restart = RestartableObject(self) self.integrator = integrator self.restartObjects = restartObjects @@ -81,6 +83,7 @@ def __init__(self, integrator, # Determine the dimensionality of this run, based on the integrator. self.dim = "%id" % self.integrator.dataBase.nDim + adiak_value("dim", self.dim) # Determine the visualization method. if self.dim == "1d": @@ -101,8 +104,11 @@ def __init__(self, integrator, self.insertDistributedBoundary(integrator.physicsPackages()) # Should we look for the last restart set? - if restoreCycle == -1: - restoreCycle = findLastRestart(restartBaseName) + if restartBaseName: + if restoreCycle == -1: + restoreCycle = findLastRestart(restartBaseName) + else: + restoreCycle = None # Generic initialization work. self.reinitializeProblem(restartBaseName, @@ -182,7 +188,8 @@ def reinitializeProblem(self, restartBaseName, vizBaseName, self._periodicTimeWork = [] # Set the restart file base name. - self.setRestartBaseName(restartBaseName) + if restartBaseName: + self.setRestartBaseName(restartBaseName) # Set the simulation time. self.integrator.currentTime = initialTime @@ -387,9 +394,12 @@ def advance(self, goalTime, maxSteps=None): numActualGhostNodes = 0 for bc in bcs: numActualGhostNodes += bc.numGhostNodes - print("Total number of (internal, ghost, active ghost) nodes : (%i, %i, %i)" % (mpi.allreduce(db.numInternalNodes, mpi.SUM), - mpi.allreduce(db.numGhostNodes, mpi.SUM), - mpi.allreduce(numActualGhostNodes, mpi.SUM))) + numInternal = db.globalNumInternalNodes + numGhost = db.globalNumGhostNodes + numActGhost = mpi.allreduce(numActualGhostNodes, mpi.SUM) + print(f"Total number of (internal, ghost, active ghost) nodes : ({numInternal}, {numGhost}, {numActGhost})") + adiak_value("total_internal_nodes", numInternal) + adiak_value("total_ghost_nodes", numGhost) # Print how much time was spent per integration cycle. self.stepTimer.printStatus() @@ -560,6 +570,8 @@ def findname(thing): #-------------------------------------------------------------------------- def dropRestartFile(self): + if not self.restartBaseName: + return # First find out if the requested directory exists. import os dire = os.path.dirname(os.path.abspath(self.restartBaseName)) @@ -588,6 +600,8 @@ def dropRestartFile(self): def loadRestartFile(self, restoreCycle, frameDict=None): + if not self.restartBaseName: + return # Find out if the requested file exists. import os fileName = self.restartBaseName + "_cycle%i" % restoreCycle diff --git a/src/SimulationControl/SpheralOptionParser.py b/src/SimulationControl/SpheralOptionParser.py index b3efebc2c..e85893e13 100644 --- a/src/SimulationControl/SpheralOptionParser.py +++ b/src/SimulationControl/SpheralOptionParser.py @@ -46,7 +46,10 @@ def commandLine(**options): for key, val in arg_dict.items(): if key in options: if (type(val) != type(options[key])): - val = eval(val, gd) + if (type(val) == str and val == "None"): + val = None + else: + val = eval(val, gd) gd[key] = val # Initialize timers and add inputs as Adiak metadata SpheralTimingParser.init_timer(args) diff --git a/src/SimulationControl/SpheralTimingParser.py b/src/SimulationControl/SpheralTimingParser.py index 3279da857..af1935fdb 100644 --- a/src/SimulationControl/SpheralTimingParser.py +++ b/src/SimulationControl/SpheralTimingParser.py @@ -8,6 +8,8 @@ from SpheralUtilities import adiak_value import SpheralOpenMP +cali_args = ["caliperConfig", "caliperFilename", "caliperConfigJSON"] + def parse_dict(string): """ Function to parse a dictionary provided through the command line @@ -42,10 +44,9 @@ def add_timing_args(parser): # argument and default value and prevents adding the argument # if it already exists arg_list = [action.dest for action in parser._actions] - cali_args = ["Config", "Filename", "ConfigJSON"] for ca in cali_args: if (ca not in arg_list): - parser.add_argument(f"--caliper{ca}", default="", type=str) + parser.add_argument(f"--{ca}", default="", type=str) def init_timer(args): """ @@ -96,14 +97,4 @@ def init_timer(args): if (args.adiakData): for key, val in args.adiakData.items(): adiak_value(key, val) - - # Add all commandLine() inputs as Adiak metadata - args_dict = vars(args) - args_dict.pop("adiakData") # Remove --adiakData inputs - for key, val in args_dict.items(): - if (type(val) is not type(None)): - try: - adiak_value(key, val) - except: - adiak_value(key, val.__name__) return diff --git a/tests/CRKSPH.ats b/tests/CRKSPH.ats index d6a66db00..b0e5c7f78 100644 --- a/tests/CRKSPH.ats +++ b/tests/CRKSPH.ats @@ -7,6 +7,7 @@ filter = 0.0 KernelConstructor = "NBSplineKernel" order = 7 linearInExpansion = False +glue(independent=True) #------------------------------------------------------------------------------- # Function to add the tests. diff --git a/tests/PSPH.ats b/tests/PSPH.ats index f18294446..89a1e1194 100644 --- a/tests/PSPH.ats +++ b/tests/PSPH.ats @@ -11,6 +11,7 @@ evolveTotalEnergy = True boolCullenViscosity = True HopkinsConductivity = True resMultiplier = 1 +glue(independent=True) #------------------------------------------------------------------------------- # Function to add the tests. diff --git a/tests/compSPH.ats b/tests/compSPH.ats index af0a555e8..580b5e4c0 100644 --- a/tests/compSPH.ats +++ b/tests/compSPH.ats @@ -7,6 +7,7 @@ filter = 0.0 KernelConstructor = "NBSplineKernel" order = 5 linearInExpansion = False +glue(independent=True) #------------------------------------------------------------------------------- # Function to add the tests. diff --git a/tests/functional/Hydro/Noh/Noh-cylindrical-2d.py b/tests/functional/Hydro/Noh/Noh-cylindrical-2d.py index 60f5ce444..444c7715c 100644 --- a/tests/functional/Hydro/Noh/Noh-cylindrical-2d.py +++ b/tests/functional/Hydro/Noh/Noh-cylindrical-2d.py @@ -44,7 +44,6 @@ from math import * from SolidSpheral2d import * -from SpheralUtilities import adiak_value from SpheralTestUtilities import * from GenerateNodeDistribution2d import * from CubicNodeGenerator import GenerateSquareNodeDistribution @@ -220,18 +219,21 @@ if solid: hydroname = "Solid" + hydroname -dataDir = os.path.join(dataDir, - hydroname, - "nPerh=%f" % nPerh, - "compatibleEnergy=%s" % compatibleEnergy, - "Cullen=%s" % boolCullenViscosity, - "filter=%f" % filter, - "%s" % nodeMotion, - "nrad=%i_ntheta=%i" % (nRadial, nTheta)) -restartDir = os.path.join(dataDir, "restarts") -restartBaseName = os.path.join(restartDir, "Noh-cylindrical-2d-%ix%i" % (nRadial, nTheta)) - -vizDir = os.path.join(dataDir, "visit") +if dataDir: + dataDir = os.path.join(dataDir, + hydroname, + "nPerh=%f" % nPerh, + "compatibleEnergy=%s" % compatibleEnergy, + "Cullen=%s" % boolCullenViscosity, + "filter=%f" % filter, + "%s" % nodeMotion, + "nrad=%i_ntheta=%i" % (nRadial, nTheta)) + restartDir = os.path.join(dataDir, "restarts") + restartBaseName = os.path.join(restartDir, "Noh-cylindrical-2d-%ix%i" % (nRadial, nTheta)) + vizDir = os.path.join(dataDir, "visit") +else: + restartBaseName = None + vizDir = None if vizTime is None and vizCycle is None: vizBaseName = None else: @@ -240,7 +242,7 @@ #------------------------------------------------------------------------------- # Check if the necessary output directories exist. If not, create them. #------------------------------------------------------------------------------- -if mpi.rank == 0: +if mpi.rank == 0 and dataDir: if clearDirectories and os.path.exists(dataDir): shutil.rmtree(dataDir) if not os.path.exists(restartDir): @@ -331,7 +333,6 @@ output("db.appendNodeList(nodes1)") output("db.numNodeLists") output("db.numFluidNodeLists") -adiak_value("total_points", db.globalNumInternalNodes) #------------------------------------------------------------------------------- # Construct the hydro physics object. diff --git a/tests/functional/Hydro/Noh/Noh-spherical-3d.py b/tests/functional/Hydro/Noh/Noh-spherical-3d.py index 465827cdb..4e12c3d75 100644 --- a/tests/functional/Hydro/Noh/Noh-spherical-3d.py +++ b/tests/functional/Hydro/Noh/Noh-spherical-3d.py @@ -7,7 +7,6 @@ import os, shutil, sys from math import * from SolidSpheral3d import * -from SpheralUtilities import adiak_value from SpheralTestUtilities import * from GenerateNodeDistribution3d import * @@ -159,17 +158,20 @@ if solid: hydroname = "Solid" + hydroname -dataDir = os.path.join(dataDir, - hydroname, - "nPerh=%f" % nPerh, - "compatibleEnergy=%s" % compatibleEnergy, - "Cullen=%s" % boolCullenViscosity, - "filter=%f" % filter, - "nx=%i_ny=%i_nz=%i" % (nx, ny, nz)) -restartDir = os.path.join(dataDir, "restarts") -restartBaseName = os.path.join(restartDir, "Noh-spherical-3d-%ix%ix%i" % (nx, ny, nz)) - -vizDir = os.path.join(dataDir, "visit") +if dataDir: + dataDir = os.path.join(dataDir, + hydroname, + "nPerh=%f" % nPerh, + "compatibleEnergy=%s" % compatibleEnergy, + "Cullen=%s" % boolCullenViscosity, + "filter=%f" % filter, + "nx=%i_ny=%i_nz=%i" % (nx, ny, nz)) + restartDir = os.path.join(dataDir, "restarts") + restartBaseName = os.path.join(restartDir, "Noh-spherical-3d-%ix%ix%i" % (nx, ny, nz)) + vizDir = os.path.join(dataDir, "visit") +else: + restartBaseName = None + vizDir = None if vizTime is None and vizCycle is None: vizBaseName = None else: @@ -178,7 +180,7 @@ #------------------------------------------------------------------------------- # Check if the necessary output directories exist. If not, create them. #------------------------------------------------------------------------------- -if mpi.rank == 0: +if mpi.rank == 0 and dataDir: if clearDirectories and os.path.exists(dataDir): shutil.rmtree(dataDir) if not os.path.exists(restartDir): @@ -255,7 +257,6 @@ output("db.appendNodeList(nodes1)") output("db.numNodeLists") output("db.numFluidNodeLists") -adiak_value("total_points", db.globalNumInternalNodes) #------------------------------------------------------------------------------- # Construct the hydro physics object. diff --git a/tests/integration.ats b/tests/integration.ats index 21573b92a..585c19704 100644 --- a/tests/integration.ats +++ b/tests/integration.ats @@ -7,6 +7,7 @@ glue(fsisph = False) glue(gsph = False) glue(svph = False) +glue(independent = True) # Geometry unit tests source("unit/Geometry/testVector.py") diff --git a/tests/performance.py.in b/tests/performance.py.in new file mode 100644 index 000000000..104d80246 --- /dev/null +++ b/tests/performance.py.in @@ -0,0 +1,145 @@ +#!/user/bin/env python3 + +# This file runs and compares performance tests through the ats system. +# Run using: ./spheral-ats tests/performance.py + +import sys, shutil, os, time +import numpy as np + +# Current system architecture from Spack +spheral_sys_arch = "@SPHERAL_SYS_ARCH@" +# Current install configuration from Spack +spheral_install_config = "@SPHERAL_CONFIGURATION@" + +# Function called on exit to do timing comparisons +def compare_times(manager): + for test in manager.testlist: + run_dir = test.directory + cali_file = test.options["caliper_filename"] + cfile = os.path.join(run_dir, test.options["caliper_filename"]) + ref_regions = test.options["regions"] + ref_timers = test.options["timers"] + r = cr.CaliperReader() + r.read(cfile) + # Get the Caliper timing records + records = r.records + # Get the Caliper metadata, including Adiak data + gls = r.globals + # Filter out the commandLine inputs + metadata = {} + for key, val in gls.items(): + if (r.attribute(key).get('adiak.subcategory') != "spheral_input"): + metadata.update({key: val}) + # WIP: Compare timers against historical timers + if (ci_run): + ref_caliper_dir = test.options["ref_cali_dir"] + if (not os.path.exists(ref_caliper_dir)): + os.makedirs(ref_caliper_dir) + new_cali_data = os.path.join(ref_caliper_dir, cfile) + shutil.copyfile(cfile, new_cali_data) + + # Get the number of nodes (ie hardware nodes) + num_comp_nodes = eval(gls["numhosts"]) + # Get the number of MPI ranks + num_ranks = eval(gls["jobsize"]) + # Get the number of SPH nodes + total_internal_nodes = eval(gls["total_internal_nodes"]) + nodes_per_rank = int(total_internal_nodes / num_ranks) + print(f"SPH nodes per rank {nodes_per_rank}") + walltime = eval(metadata["walltime"]) + print(f"Walltime {walltime}") + # Extract current times + times = {} + # Iterate over list of records + for rec in records: + if ("region" in rec): + fname = rec["region"] + if (type(fname) is list): + fname = fname[-1] + if (fname in ref_regions): + if (fname in times): + for t in ref_timers: + times[fname][t] += float(rec[t]) + else: + new_dict = {} + for t in ref_timers: + new_dict.update({t: float(rec[t])}) + times.update({fname: new_dict}) + +onExit(compare_times) +glue(keep=True) + +if ("power" in spheral_sys_arch): + num_nodes = 1 + num_cores = 40 +elif ("broadwell" in spheral_sys_arch): + num_nodes = 2 + num_cores = 36 + +# NOH tests +test_dir = "@SPHERAL_TEST_INSTALL_PREFIX@/tests/functional/Hydro/Noh" + +# Select which timing regions to post-process +regions = ["CheapRK2", + "CheapRK2PreInit", + "ConnectivityMap_computeConnectivity", + "ConnectivityMap_patch", + "CheapRK2EvalDerivs", + "CheapRK2EndStep"] +# Select which timers to use to post-process the regions above +timers = ["sum#inclusive#sum#time.duration"] # Means the sum of the time from all ranks + +# General input for all Noh tests +gen_noh_inps = "--crksph False --cfl 0.25 --Cl 1.0 --Cq 1.0 --filter 0.0 "+\ + "--nPerh 2.01 --graphics False --clearDirectories False --doCompare False "+\ + "--dataDir None --vizTime None --vizCycle None" + +group(name="NOH 2D tests") +test_file = "Noh-cylindrical-2d.py" +nRadial = 100 +test_path = os.path.join(test_dir, test_file) +test_name_base = "NC2D" + +# Test with varying number of ranks +ranks = [1, 2, 4] +for i, n in enumerate(ranks): + test_name = f"{test_name_base}_{i}" + caliper_filename = f"{test_name}_{int(time.time())}.cali" + inps = f"{gen_noh_inps} --caliperFilename {caliper_filename} --nRadial {nRadial} --steps 10 --adiakData 'test_name: {test_name}'" + ncores = int(num_nodes*num_cores/n) + t = test(script=test_path, clas=inps, label=f"{test_name}", + np=ncores, + nn=num_nodes, + caliper_filename=caliper_filename, + regions=regions, + timers=timers, + install_config=spheral_install_config) + +endgroup() + +group(name="NOH 3D tests") +test_file = "Noh-spherical-3d.py" +test_path = os.path.join(test_dir, test_file) +test_name_base = "NS3D" + +# Test with varying number of SPH nodes per rank +npcore = [100, 200, 300] +for i, n in enumerate(npcore): + test_name = f"{test_name_base}_{i}" + caliper_filename = f"{test_name}_{int(time.time())}.cali" + total_sph_nodes = n*num_cores + npd = int(np.cbrt(total_sph_nodes)) + node_inps = f"--nx {npd} --ny {npd} --nz {npd}" + inps = f"{gen_noh_inps} {node_inps} --caliperFilename {caliper_filename} --steps 3 --adiakData 'test_name: {test_name}'" + # WIP: Path to benchmark timing data + ncores = int(num_cores) + t = test(script=test_path, clas=inps, label=f"{test_name}", + np=ncores, + nn=num_nodes, + independent=False, + caliper_filename=caliper_filename, + regions=regions, + timers=timers, + install_config=spheral_install_config) +# Add a wait to ensure all timer files are done +wait() diff --git a/tests/unit/CMakeLists.txt b/tests/unit/CMakeLists.txt index 9182c51f9..d0e8fce37 100644 --- a/tests/unit/CMakeLists.txt +++ b/tests/unit/CMakeLists.txt @@ -2,5 +2,5 @@ add_subdirectory(CXXTests) configure_file( "${CMAKE_CURRENT_SOURCE_DIR}/Utilities/testTimers.py.in" - "${SPHERAL_TEST_INSTALL_PREFIX}/tests/unit/Utilities/testTimers.py" + "${SPHERAL_TEST_INSTALL_PREFIX}/unit/Utilities/testTimers.py" ) diff --git a/tests/unit/CXXTests/CMakeLists.txt b/tests/unit/CXXTests/CMakeLists.txt index 264480b59..df1182b3c 100644 --- a/tests/unit/CXXTests/CMakeLists.txt +++ b/tests/unit/CXXTests/CMakeLists.txt @@ -33,7 +33,7 @@ foreach(test ${gtest_spheral_tests}) if (NOT ENABLE_CXXONLY) configure_file("${CMAKE_CURRENT_SOURCE_DIR}/pyRunCXXTest.in" - "${SPHERAL_TEST_INSTALL_PREFIX}/tests/unit/CXXTests/${test_name}.py" + "${SPHERAL_TEST_INSTALL_PREFIX}/unit/CXXTests/${test_name}.py" ) endif() @@ -50,6 +50,6 @@ string(REPLACE ";" ", " TEST_LIST "${TESTS}") if (NOT ENABLE_CXXONLY) configure_file("${CMAKE_CURRENT_SOURCE_DIR}/runCXXTests.in" - "${SPHERAL_TEST_INSTALL_PREFIX}/tests/unit/CXXTests/runCXXTests.ats" + "${SPHERAL_TEST_INSTALL_PREFIX}/unit/CXXTests/runCXXTests.ats" ) endif() From 09aeaa15feddfbf36e66b7de1aa410a5a72105aa Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Wed, 23 Oct 2024 13:20:55 -0700 Subject: [PATCH 13/44] Removed vector, tensor etc types as possible adiak inputs --- src/PYB11/Utilities/Utilities_PYB11.py | 15 --------------- 1 file changed, 15 deletions(-) diff --git a/src/PYB11/Utilities/Utilities_PYB11.py b/src/PYB11/Utilities/Utilities_PYB11.py index 7ed49a7d3..705374a1e 100644 --- a/src/PYB11/Utilities/Utilities_PYB11.py +++ b/src/PYB11/Utilities/Utilities_PYB11.py @@ -807,18 +807,3 @@ def clippedVolume(poly = "const Dim<3>::FacetedVolume&", adiak_value{label} = PYB11TemplateFunction(adiak_value, "{value}", pyname="adiak_value") adiak_value2{label} = PYB11TemplateFunction(adiak_value2, "{value}", pyname="adiak_value") """) -array_types = ["Scalar", - "Vector", - "Tensor", - "SymTensor", - "ThirdRankTensor", - "FourthRankTensor", - "FifthRankTensor"] -for ndim in dims: - for ctype in array_types: - value = f"Dim::<{ndim}>::{ctype}" - label = f"{ctype}{ndim}" - exec(f""" -adiak_value{label} = PYB11TemplateFunction(adiak_value, "{value}", pyname="adiak_value") -adiak_value2{label} = PYB11TemplateFunction(adiak_value2, "{value}", pyname="adiak_value") -""") From c63f8f009125e24189691117a5e8e2126b1a6e3a Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Wed, 23 Oct 2024 17:07:01 -0700 Subject: [PATCH 14/44] More minor changes to tests and builds --- .gitlab/scripts.yml | 8 +++++- Dockerfile | 2 +- cmake/SetupSpheral.cmake | 2 ++ scripts/spheral_ats.py.in | 39 ++++++++++++++++++--------- tests/performance.py.in | 5 +--- tests/unit/Utilities/testTimers.py.in | 19 +++---------- 6 files changed, 40 insertions(+), 35 deletions(-) diff --git a/.gitlab/scripts.yml b/.gitlab/scripts.yml index 69fdc435b..c5ad9f9ab 100644 --- a/.gitlab/scripts.yml +++ b/.gitlab/scripts.yml @@ -55,7 +55,7 @@ - CI_BUILD_DIR=$(cat ci-dir.txt) - cd $CI_BUILD_DIR && cat job-name.txt - - ./build_gitlab/install/spheral ./build_gitlab/install/spheral-ats --ciRun ./build_gitlab/install/$ATS_FILE || exit_code=$? + - ./build_gitlab/install/spheral-ats ./build_gitlab/install/$ATS_FILE || exit_code=$? - cp -r test-logs $CI_PROJECT_DIR - exit $exit_code artifacts: @@ -84,6 +84,8 @@ .update_tpls: stage: update_tpls + variables: + GIT_STRATEGY: none script: - $BUILD_ALLOC ./$SCRIPT_DIR/devtools/tpl-manager.py --spec-list="$SCRIPT_DIR/devtools/spec-list.json" --spheral-spack-dir=$UPSTREAM_DIR @@ -112,6 +114,8 @@ .build_dev_pkg: stage: generate_buildcache + variables: + GIT_STRATEGY: none extends: [.spheral_rev_str] script: - INSTALL_DIR=/usr/gapps/Spheral/$SYS_TYPE/spheral-$SPHERAL_REV_STR @@ -131,6 +135,8 @@ .install_dev_pkg: stage: install_production + variables: + GIT_STRATEGY: none script: - INSTALL_DIR=$(cat install-dir.txt) - DEV_PKG_NAME=$(cat dev-pkg-name.txt) diff --git a/Dockerfile b/Dockerfile index ba0889335..e4d69f13f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -75,5 +75,5 @@ RUN make install # Run ATS testing suite. WORKDIR ../install ENV MPLBACKEND=agg -RUN ./spheral-ats --filter="level<100" tests/integration.ats +RUN ./spheral-ats --level 99 tests/integration.ats # ----------------------------------------------------------------------------- diff --git a/cmake/SetupSpheral.cmake b/cmake/SetupSpheral.cmake index 5a9f99ee2..30c88c387 100644 --- a/cmake/SetupSpheral.cmake +++ b/cmake/SetupSpheral.cmake @@ -165,7 +165,9 @@ if (ENABLE_TESTS) USE_SOURCE_PERMISSIONS DESTINATION "${SPHERAL_TEST_INSTALL_PREFIX}" PATTERN "*CMakeLists.txt*" EXCLUDE + PATTERN "*.cmake" EXCLUDE PATTERN "*.in" EXCLUDE + PATTERN "*.pyc" EXCLUDE PATTERN "*~" EXCLUDE) add_subdirectory(${SPHERAL_ROOT_DIR}/tests/unit) endif() diff --git a/scripts/spheral_ats.py.in b/scripts/spheral_ats.py.in index b87fe5f68..892620381 100644 --- a/scripts/spheral_ats.py.in +++ b/scripts/spheral_ats.py.in @@ -1,4 +1,4 @@ -#!/usr/bin/env python3 +#!@CMAKE_INSTALL_PREFIX@/spheral import os, time, sys import argparse @@ -52,8 +52,13 @@ def report_results(output_dir): def run_and_report(run_command, ci_output, num_runs): if (num_runs > max_reruns): raise Exception ("Exceeded number of ATS reruns") + ats_cont_file = os.path.join(ci_output, "continue.ats") + new_run_command = run_command + if (os.path.exists(ats_cont_file) and num_runs == 0): + new_run_command = f"{run_command} {ats_cont_file}" + print("Restarting from previous job") try: - sexe(run_command) + sexe(new_run_command) except Exception as e: print(e) tests_passed = report_results(ci_output) @@ -97,8 +102,8 @@ def main(): parser.add_argument("--timeLimit", type=int, default=None, help="Time limit for allocation.") - parser.add_argument("--ciRun", action="store_true", - help="Pass if running the CI.") + parser.add_argument("--perfTest", action="store_true", + help="Turn on if doing a performance test.") parser.add_argument("--atsHelp", action="store_true", help="Print the help output for ATS. Useful for seeing ATS options.") options, unknown_options = parser.parse_known_args() @@ -113,21 +118,23 @@ def main(): ats_args = [install_filters, "--allInteractive"] numNodes = options.numNodes timeLimit = options.timeLimit + ciRun = False if options.perfTest else True if ("broadwell" in spheral_sys_arch): os.environ["MACHINE_TYPE"] = "slurm36" numNodes = numNodes if numNodes else 2 timeLimit = timeLimit if timeLimit else 120 time_limit = 120 - mac_args = ["--npMax=36"] + mac_args = [] launch_cmd = f"salloc --exclusive -N {numNodes} -t {timeLimit} " + if (ciRun): + mac_args.append("--npMax=36") elif ("power9" in spheral_sys_arch): numNodes = numNodes if numNodes else 1 timeLimit = timeLimit if timeLimit else 60 - mac_args = ["--smpi_off", "--npMax=40"] + mac_args = ["--smpi_off"] launch_cmd = f"lalloc {numNodes} -W {timeLimit} " - if (options.ciRun): + if (ciRun): launch_cmd += "-q pdebug -G guests " - mac_args.append("--lrun_pack") else: mac_args.append("--lrun") ats_args.extend(mac_args) @@ -140,19 +147,25 @@ def main(): if (timeLimit): ats_args.append(f"--timelimit {timeLimit}") # If doing a CI run, set some more options - if (options.ciRun): - ats_args.append(f"--logs {test_log_name}") + if (ciRun): + if ("--logs" not in unknown_options): + ats_args.append(f"--logs {test_log_name}") + log_name = test_log_name + else: + log_name_indx = unknown_options.index("--logs") + 1 + log_name = unknown_options[log_name_indx] + ats_args.append("--glue='independent=True'") ats_args = " ".join(str(x) for x in ats_args) other_args = " ".join(str(x) for x in unknown_options) run_command = f"{launch_cmd}{ats_exe} -e {spheral_exe} {ats_args} {other_args}" print(f"\nRunning: {run_command}\n") - if (not options.ciRun): + if (ciRun): + run_and_report(run_command, log_name, 0) + else: try: sexe(run_command) except Exception as e: print(e) - else: - run_and_report(run_command, test_log_name, 0) if __name__ == "__main__": main() diff --git a/tests/performance.py.in b/tests/performance.py.in index 104d80246..b34251333 100644 --- a/tests/performance.py.in +++ b/tests/performance.py.in @@ -66,7 +66,6 @@ def compare_times(manager): new_dict.update({t: float(rec[t])}) times.update({fname: new_dict}) -onExit(compare_times) glue(keep=True) if ("power" in spheral_sys_arch): @@ -77,7 +76,7 @@ elif ("broadwell" in spheral_sys_arch): num_cores = 36 # NOH tests -test_dir = "@SPHERAL_TEST_INSTALL_PREFIX@/tests/functional/Hydro/Noh" +test_dir = "@SPHERAL_TEST_INSTALL_PREFIX@/functional/Hydro/Noh" # Select which timing regions to post-process regions = ["CheapRK2", @@ -109,7 +108,6 @@ for i, n in enumerate(ranks): ncores = int(num_nodes*num_cores/n) t = test(script=test_path, clas=inps, label=f"{test_name}", np=ncores, - nn=num_nodes, caliper_filename=caliper_filename, regions=regions, timers=timers, @@ -135,7 +133,6 @@ for i, n in enumerate(npcore): ncores = int(num_cores) t = test(script=test_path, clas=inps, label=f"{test_name}", np=ncores, - nn=num_nodes, independent=False, caliper_filename=caliper_filename, regions=regions, diff --git a/tests/unit/Utilities/testTimers.py.in b/tests/unit/Utilities/testTimers.py.in index 8d3ef1002..1ab566620 100644 --- a/tests/unit/Utilities/testTimers.py.in +++ b/tests/unit/Utilities/testTimers.py.in @@ -20,20 +20,11 @@ test_dict = {"perf_test": "weak_scaling", for key, val in test_dict.items(): adiak_value(key, val) -# Test the --adiakData input. This must match what is hard-coded in the ATS lines +# Test the --adiakData input. This must match what is +# hard-coded in the ATS magic lines adiak_data_dict = {"adiak_test": 1, "test_adiak": "two"} -# Test that commmandLine inputs are being passed to Adiak -test_int = 4 -test_str = "hello" -test_float = 4.224 -inp_test_dict = {"test_int": test_int, - "test_str": test_str, - "test_float": test_float} - -commandLine(test_int=test_int, - test_str=test_str, - test_float=test_float) +commandLine() # Remove cali files from previous test runs caliper_file = TimerMgr.get_filename() @@ -89,10 +80,6 @@ if (do_timers and TimerMgr.get_filename()): assert test_dict.items() <= adiak_inp.items(),\ "incorrect Adiak values found in Caliper file" - # Test Adiak outputs for commandLine() inputs - assert inp_test_dict.items() <= adiak_inp.items(),\ - "incorrect commandLine() inputs found in Caliper file Adiak values" - # Test --adiakData command line input if ("adiakData" in adiak_inp): assert adiak_data_dict.items() <= adiak_inp.items(),\ From 78af0bab6fd6275346280a2ed500e82fe1286433 Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Thu, 24 Oct 2024 10:51:47 -0700 Subject: [PATCH 15/44] Updated py-ats version and added proper patch --- scripts/spack/packages/py-ats/exit_code.patch | 14 +++++++++ scripts/spack/packages/py-ats/package.py | 30 +++++++++++-------- scripts/spack/packages/spheral/package.py | 2 +- scripts/spheral_ats.py.in | 2 ++ 4 files changed, 34 insertions(+), 14 deletions(-) create mode 100644 scripts/spack/packages/py-ats/exit_code.patch diff --git a/scripts/spack/packages/py-ats/exit_code.patch b/scripts/spack/packages/py-ats/exit_code.patch new file mode 100644 index 000000000..f8f6845b6 --- /dev/null +++ b/scripts/spack/packages/py-ats/exit_code.patch @@ -0,0 +1,14 @@ +--- a/ats/management.py ++++ b/ats/management.py +@@ -703,9 +703,9 @@ + self.finalBanner() + + if (core_result and postprocess_result and report_result): +- return True ++ return 0 + else: +- return False ++ return 1 + + def preprocess(self): + "Call beforeRunRoutines." diff --git a/scripts/spack/packages/py-ats/package.py b/scripts/spack/packages/py-ats/package.py index 3d69395a1..3fa3aeccb 100644 --- a/scripts/spack/packages/py-ats/package.py +++ b/scripts/spack/packages/py-ats/package.py @@ -1,9 +1,9 @@ -# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other +# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) -from spack import * +from spack.package import * class PyAts(PythonPackage): @@ -12,19 +12,23 @@ class PyAts(PythonPackage): of high performance computers.""" homepage = "https://github.com/LLNL/ATS" - git = "https://github.com/LLNL/ATS.git" + git = "https://github.com/LLNL/ATS.git" - maintainers = ['white238'] + maintainers("white238") - version('main', branch='main') - version('exit', branch='bugfix/exit-code') - version('7.0.100', tag='7.0.100') - version('7.0.9', tag='7.0.9') - version('7.0.5', tag='7.0.5') + license("MIT") + + version("main", branch="main") + version('7.0.117', commit='1aa5c381d201306d16397cc0e76a81b4450438b2') + version("7.0.105", tag="7.0.105", commit="3a3461061d4493a002018f5bb3715db702212f72") + version("7.0.100", tag="7.0.100", commit="202c18d11b8f1c14f1a3361a6e45c9e4f83a3fa1") + version("7.0.5", tag="7.0.5", commit="86b0b18b96b179f97008393170f5e5bc95118867") # TODO: Add flux variant when Flux functionality works in ATS - depends_on("python@3.8:", type=('build', 'run')) - depends_on("py-numpy", type=('build', 'run')) - depends_on('py-setuptools', type='build') - depends_on('py-poetry-core', type='build') + depends_on("python@3.8:", type=("build", "run")) + depends_on("py-numpy", type=("build", "run")) + depends_on("py-setuptools", type="build") + depends_on("py-poetry-core", type="build") + + patch('exit_code.patch') diff --git a/scripts/spack/packages/spheral/package.py b/scripts/spack/packages/spheral/package.py index 000fb8fc4..c0a1dede4 100644 --- a/scripts/spack/packages/spheral/package.py +++ b/scripts/spack/packages/spheral/package.py @@ -83,7 +83,7 @@ class Spheral(CachedCMakePackage, CudaPackage): depends_on('py-h5py@3.9.0', type='build') depends_on('py-docutils@0.18.1', type='build') depends_on('py-scipy@1.12.0', type='build') - depends_on('py-ats@exit', type='build') + depends_on('py-ats@7.0.117', type='build') depends_on('py-mpi4py@3.1.5', type='build', when='+mpi') depends_on('py-sphinx', type='build') diff --git a/scripts/spheral_ats.py.in b/scripts/spheral_ats.py.in index 892620381..73323b096 100644 --- a/scripts/spheral_ats.py.in +++ b/scripts/spheral_ats.py.in @@ -2,6 +2,7 @@ import os, time, sys import argparse +import ats # This is a wrapper for running Spheral through ATS @@ -119,6 +120,7 @@ def main(): numNodes = options.numNodes timeLimit = options.timeLimit ciRun = False if options.perfTest else True + launch_cmd = "" if ("broadwell" in spheral_sys_arch): os.environ["MACHINE_TYPE"] = "slurm36" numNodes = numNodes if numNodes else 2 From 7e89b2454c6cfd5659beacbb9954404465636199 Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Fri, 25 Oct 2024 14:31:37 -0700 Subject: [PATCH 16/44] Added failing test to ensure tests are working, more fixes for option parsing, removed use of None string in all existing tests, removed py-ats patch --- scripts/spack/packages/py-ats/exit_code.patch | 14 ------- scripts/spack/packages/py-ats/package.py | 1 - scripts/spheral_ats.py.in | 41 +++++++++---------- src/SimulationControl/SpheralOptionParser.py | 26 +++++++----- .../Damage/TensileDisk/TensileDisk-2d.py | 2 +- .../Damage/TensileRod/TensileRod-1d.py | 8 ++-- .../Damage/TensileRod/TensileRod-2d.py | 2 +- .../Gravity/CollisionlessSphereCollapse.py | 2 +- .../Hydro/AcousticWave/AcousticWave-1d.py | 8 ++-- .../AcousticWave/AcousticWave-1d_gamma2.py | 10 ++--- .../Hydro/AcousticWave/StandingWave-1d.py | 10 ++--- .../Hydro/ConvectionTest/ConvectionTest.py | 3 -- .../Hydro/Discontinuity/Discontinuity.py | 6 +-- .../Hydro/Discontinuity/Discontinuity2d.py | 8 ++-- .../Hydro/FreeExpansion/FreeExpansion-1d.py | 2 +- .../Hydro/FreeExpansion/FreeExpansion-2d.py | 2 +- .../Hydro/GreshoVortex/GreshoVortex.py | 4 +- .../Hydro/GreshoVortex/GreshoVortexOverlay.py | 1 - .../KelvinHelmholtz/KelvinHelmholtz-2d.py | 3 +- .../KelvinHelmholtz-2d_McNally.py | 2 - .../KelvinHelmholtz/KelvinHelmholtz-3d.py | 3 +- .../functional/Hydro/KeplerDisk/TwoMatDisk.py | 8 ++-- tests/functional/Hydro/Noh/Noh-RZ.py | 14 +++---- .../Hydro/Noh/Noh-cylindrical-2d.py | 8 ++-- tests/functional/Hydro/Noh/Noh-planar-1d.py | 41 +++++++++++-------- tests/functional/Hydro/Noh/Noh-shear-2d.py | 41 +++++++++++-------- .../functional/Hydro/Noh/Noh-spherical-1d.py | 18 ++++---- .../functional/Hydro/Noh/Noh-spherical-3d.py | 16 ++++---- .../Hydro/RayleighTaylor/Hydrostatic-1d.py | 4 +- .../functional/Hydro/RayleighTaylor/RT-2d.py | 4 +- .../Hydro/RayleighTaylor/RT-2d_Hopkins.py | 2 +- .../Hydro/RayleighTaylor/RT-const-rho.py | 4 +- tests/functional/Hydro/Riemann/Riemann.py | 4 +- .../functional/Hydro/Sedov/Sedov-2d-ratio.py | 4 +- tests/functional/Hydro/Sedov/Sedov-RZ.py | 10 ++--- .../Hydro/Sedov/Sedov-cylindrical-2d.py | 4 +- .../functional/Hydro/Sedov/Sedov-planar-1d.py | 4 +- .../Hydro/Sedov/Sedov-spherical-1d.py | 4 +- .../Hydro/Sedov/Sedov-spherical-3d.py | 4 +- .../ShockBubble/ShockBubble-Variant-2d.py | 2 +- tests/functional/Hydro/Sod/Sod-RZ.py | 6 +-- .../Hydro/Sod/Sod-planar-1d-WaterGas.py | 4 +- tests/functional/Hydro/Sod/Sod-planar-1d.py | 4 +- .../Hydro/Sod/Sod-planar-2d-WaterGas.py | 4 +- tests/functional/Hydro/Sod/Sod-planar-2d.py | 4 +- tests/functional/Hydro/Sod/Sod-planar-3d.py | 4 +- .../functional/Hydro/Sod/Sod-spherical-1d.py | 4 +- .../functional/Hydro/Sod/convSod-planar-1d.py | 4 +- .../SphericalCollapse/SphericalCollapse.py | 2 +- tests/functional/Hydro/Turbulence/Stir-3d.py | 4 +- tests/functional/Hydro/YeeVortex/YeeVortex.py | 4 +- tests/functional/Interpolation/one-mass.py | 8 ++-- tests/functional/Interpolation/one-node.py | 8 ++-- tests/functional/Interpolation/two-nodes.py | 4 +- .../PlanarCompaction/PlanarCompaction-1d.py | 4 +- .../CollidingPlates/CollidingPlates-1d.py | 10 ++--- tests/functional/Strength/Piston/Piston.py | 2 +- .../Strength/PlateImpact/PlateImpact-1d.py | 8 ++-- .../Strength/PlateImpact/TP106-1d.py | 2 +- tests/functional/Strength/Verney/Verney-2d.py | 2 +- tests/functional/Strength/Verney/Verney-3d.py | 2 +- tests/functional/Strength/Verney/Verney-RZ.py | 2 +- .../Strength/Verney/Verney-spherical.py | 6 +-- tests/functional/Surfaces/1d.py | 2 +- tests/integration.ats | 3 ++ tests/performance.py.in | 2 +- tests/unit/CRKSPH/testConsistency.py | 9 ++-- tests/unit/CRKSPH/testInterpolation.py | 6 +-- tests/unit/SPH/testLinearVelocityGradient.py | 1 - tests/unit/Utilities/testFails.py | 5 +++ 70 files changed, 238 insertions(+), 246 deletions(-) delete mode 100644 scripts/spack/packages/py-ats/exit_code.patch create mode 100644 tests/unit/Utilities/testFails.py diff --git a/scripts/spack/packages/py-ats/exit_code.patch b/scripts/spack/packages/py-ats/exit_code.patch deleted file mode 100644 index f8f6845b6..000000000 --- a/scripts/spack/packages/py-ats/exit_code.patch +++ /dev/null @@ -1,14 +0,0 @@ ---- a/ats/management.py -+++ b/ats/management.py -@@ -703,9 +703,9 @@ - self.finalBanner() - - if (core_result and postprocess_result and report_result): -- return True -+ return 0 - else: -- return False -+ return 1 - - def preprocess(self): - "Call beforeRunRoutines." diff --git a/scripts/spack/packages/py-ats/package.py b/scripts/spack/packages/py-ats/package.py index 3fa3aeccb..e915b197c 100644 --- a/scripts/spack/packages/py-ats/package.py +++ b/scripts/spack/packages/py-ats/package.py @@ -31,4 +31,3 @@ class PyAts(PythonPackage): depends_on("py-setuptools", type="build") depends_on("py-poetry-core", type="build") - patch('exit_code.patch') diff --git a/scripts/spheral_ats.py.in b/scripts/spheral_ats.py.in index 73323b096..6ea89d438 100644 --- a/scripts/spheral_ats.py.in +++ b/scripts/spheral_ats.py.in @@ -2,14 +2,10 @@ import os, time, sys import argparse -import ats +import ats.util.generic_utils as ats_utils # This is a wrapper for running Spheral through ATS -# These are set by CMake -spheral_sys_arch = "@SPHERAL_SYS_ARCH@" -spheral_install_config = "@SPHERAL_CONFIGURATION@" - # Find spheralutils.py install_prefix = "@CMAKE_INSTALL_PREFIX@" sys.path.append(os.path.join(install_prefix, "scripts")) @@ -29,7 +25,6 @@ ats_exe = os.path.join(install_prefix, ".venv/bin/ats") spheral_exe = os.path.join(install_prefix, "spheral") #------------------------------------------------------------------------------ - # Run ats.py to check results and return the number of failed tests def report_results(output_dir): ats_py = os.path.join(output_dir, "atsr.py") @@ -84,7 +79,16 @@ def run_and_report(run_command, ci_output, num_runs): print("WARNING: Test failure, rerunning ATS") run_and_report(rerun_command, ci_output, num_runs + 1) +#--------------------------------------------------------------------------- +# Main routine +#--------------------------------------------------------------------------- def main(): + # Use ATS to for some machine specific functions + if "MACHINE_TYPE" not in os.environ: + ats_utils.set_machine_type_based_on_sys_type() + hostname = None + if "HOSTNAME" in os.environ: + hostname = os.environ["HOSTNAME"] #--------------------------------------------------------------------------- # Setup argument parser #--------------------------------------------------------------------------- @@ -116,38 +120,32 @@ def main(): # Setup machine info classes #--------------------------------------------------------------------------- mac_args = [] - ats_args = [install_filters, "--allInteractive"] + ats_args = [install_filters] numNodes = options.numNodes timeLimit = options.timeLimit ciRun = False if options.perfTest else True launch_cmd = "" - if ("broadwell" in spheral_sys_arch): - os.environ["MACHINE_TYPE"] = "slurm36" + + if "rzgenie" in hostname or "ruby" in hostname: numNodes = numNodes if numNodes else 2 timeLimit = timeLimit if timeLimit else 120 time_limit = 120 - mac_args = [] + mac_args = [f"--numNodes {numNodes}"] launch_cmd = f"salloc --exclusive -N {numNodes} -t {timeLimit} " if (ciRun): - mac_args.append("--npMax=36") - elif ("power9" in spheral_sys_arch): + launch_cmd += "-p pdebug " + elif "lassen" in hostname or "rzansel" in hostname: numNodes = numNodes if numNodes else 1 timeLimit = timeLimit if timeLimit else 60 - mac_args = ["--smpi_off"] - launch_cmd = f"lalloc {numNodes} -W {timeLimit} " + mac_args = ["--smpi_off", f"--numNodes {numNodes}"] + launch_cmd = f"bsub -nnodes {numNodes} -Is -XF -W {timeLimit} -core_isolation 2 " if (ciRun): - launch_cmd += "-q pdebug -G guests " - else: - mac_args.append("--lrun") + launch_cmd += "-q pdebug " ats_args.extend(mac_args) #--------------------------------------------------------------------------- # Launch ATS #--------------------------------------------------------------------------- - if (numNodes): - ats_args.append(f"--numNodes {numNodes}") - if (timeLimit): - ats_args.append(f"--timelimit {timeLimit}") # If doing a CI run, set some more options if (ciRun): if ("--logs" not in unknown_options): @@ -157,6 +155,7 @@ def main(): log_name_indx = unknown_options.index("--logs") + 1 log_name = unknown_options[log_name_indx] ats_args.append("--glue='independent=True'") + ats_args.append("--continueFreq=15") ats_args = " ".join(str(x) for x in ats_args) other_args = " ".join(str(x) for x in unknown_options) run_command = f"{launch_cmd}{ats_exe} -e {spheral_exe} {ats_args} {other_args}" diff --git a/src/SimulationControl/SpheralOptionParser.py b/src/SimulationControl/SpheralOptionParser.py index e85893e13..7903f5273 100644 --- a/src/SimulationControl/SpheralOptionParser.py +++ b/src/SimulationControl/SpheralOptionParser.py @@ -9,14 +9,24 @@ from SpheralTestUtilities import globalFrame import SpheralTimingParser +def parse_value(value): + gd = globalFrame().f_globals + try: + return eval(value, gd) + except: + return value + def commandLine(**options): # Build a command line parser with the keyword arguments passed to us. parser = argparse.ArgumentParser() - for key in options: - parser.add_argument("--" + key, - dest = key, - default = options[key]) + for key, default in options.items(): + if default == "None": + raise SyntaxError(f"ERROR: {key}, None as a default value cannot be a string") + elif type(default) is str: + parser.add_argument(f"--{key}", type = str, default = default) + else: + parser.add_argument(f"--{key}", type = parse_value, default = default) # Add the universal options supported by all Spheral++ scripts. parser.add_argument("-v", "--verbose", @@ -44,12 +54,8 @@ def commandLine(**options): # Set all the variables. gd = globalFrame().f_globals for key, val in arg_dict.items(): - if key in options: - if (type(val) != type(options[key])): - if (type(val) == str and val == "None"): - val = None - else: - val = eval(val, gd) + if val == "None": + val = None gd[key] = val # Initialize timers and add inputs as Adiak metadata SpheralTimingParser.init_timer(args) diff --git a/tests/functional/Damage/TensileDisk/TensileDisk-2d.py b/tests/functional/Damage/TensileDisk/TensileDisk-2d.py index 94c35df66..e512ad874 100644 --- a/tests/functional/Damage/TensileDisk/TensileDisk-2d.py +++ b/tests/functional/Damage/TensileDisk/TensileDisk-2d.py @@ -75,7 +75,7 @@ plotFlaws = False, clearDirectories = False, dataDirBase = "dumps-TensileDisk-2d", - outputFile = "None", + outputFile = None, # Should we restart (-1 => find most advanced available restart) restoreCycle = -1, diff --git a/tests/functional/Damage/TensileRod/TensileRod-1d.py b/tests/functional/Damage/TensileRod/TensileRod-1d.py index 4610a2c2c..eb1478c9f 100644 --- a/tests/functional/Damage/TensileRod/TensileRod-1d.py +++ b/tests/functional/Damage/TensileRod/TensileRod-1d.py @@ -171,8 +171,8 @@ def restoreState(self, file, path): clearDirectories = False, referenceFile = "Reference/TensileRod-GradyKippOwen-1d-1proc-reproducing-20240816.gnu", dataDirBase = "dumps-TensileRod-1d", - outputFile = "None", - comparisonFile = "None", + outputFile = None, + comparisonFile = None, ) # On the IBM BlueOS machines we have some tolerance issues... @@ -744,7 +744,7 @@ def restoreState(self, file, path): #------------------------------------------------------------------------------- # If requested, write out the state in a global ordering to a file. #------------------------------------------------------------------------------- -if outputFile != "None": +if outputFile: from SpheralTestUtilities import multiSort state = State(db, integrator.physicsPackages()) outputFile = os.path.join(dataDir, outputFile) @@ -786,7 +786,7 @@ def restoreState(self, file, path): # Also we can optionally compare the current results with another file for # bit level consistency. #--------------------------------------------------------------------------- - if comparisonFile != "None" and BuildData.cxx_compiler_id != "IntelLLVM": + if comparisonFile and BuildData.cxx_compiler_id != "IntelLLVM": comparisonFile = os.path.join(dataDir, comparisonFile) import filecmp assert filecmp.cmp(outputFile, comparisonFile) diff --git a/tests/functional/Damage/TensileRod/TensileRod-2d.py b/tests/functional/Damage/TensileRod/TensileRod-2d.py index 65dd5fadd..efc9a3283 100644 --- a/tests/functional/Damage/TensileRod/TensileRod-2d.py +++ b/tests/functional/Damage/TensileRod/TensileRod-2d.py @@ -166,7 +166,7 @@ def restoreState(self, file, path): clearDirectories = False, dataDirBase = "dumps-TensileRod-2d", - outputFile = "None", + outputFile = None, ) dx = xlength/nx diff --git a/tests/functional/Gravity/CollisionlessSphereCollapse.py b/tests/functional/Gravity/CollisionlessSphereCollapse.py index 13545182f..e30313858 100644 --- a/tests/functional/Gravity/CollisionlessSphereCollapse.py +++ b/tests/functional/Gravity/CollisionlessSphereCollapse.py @@ -263,7 +263,7 @@ #------------------------------------------------------------------------------- # If requested, write out the profiles #------------------------------------------------------------------------------- -if outputFile != "None" and mpi.rank == 0: +if outputFile and mpi.rank == 0: outputFile = os.path.join(dataDir, outputFile) f = open(outputFile, "w") f.write(("# " + 14*"%15s " + "\n") % ("r", "x", "y", "z", "vx", "vy", "vz", "Hxx", "Hxy", "Hxz", "Hyy", "Hyz", "Hzz", "phi")) diff --git a/tests/functional/Hydro/AcousticWave/AcousticWave-1d.py b/tests/functional/Hydro/AcousticWave/AcousticWave-1d.py index e9318205f..761c5f21c 100644 --- a/tests/functional/Hydro/AcousticWave/AcousticWave-1d.py +++ b/tests/functional/Hydro/AcousticWave/AcousticWave-1d.py @@ -469,7 +469,7 @@ def printTotalEnergy(cycle,time,dt): #------------------------------------------------------------------------------- # If requested, write out the state in a global ordering to a file. #------------------------------------------------------------------------------- -if outputFile != "None": +if outputFile: outputFile = os.path.join(dataDir, outputFile) from SpheralTestUtilities import multiSort mprof = mpi.reduce(nodes1.mass().internalValues(), mpi.SUM) @@ -500,7 +500,7 @@ def printTotalEnergy(cycle,time,dt): # While we're at it compute and report the error norms. import Pnorm print("\tQuantity \t\tL1 \t\t\tL2 \t\t\tLinf") - if normOutputFile != "None": + if normOutputFile: f = open(normOutputFile, "a") if writeOutputLabel: f.write(("#" + 13*"%17s " + "\n") % ('"nx"', @@ -522,12 +522,12 @@ def printTotalEnergy(cycle,time,dt): L2 = Pn.gridpnorm(2, xmin, xmax) Linf = Pn.gridpnorm("inf", xmin, xmax) print("\t%s \t\t%g \t\t%g \t\t%g" % (name, L1, L2, Linf)) - if normOutputFile != "None": + if normOutputFile: f.write((3*"%16.12e ") % (L1, L2, Linf)) # if name == "Mass Density": # pickleDumpL1 = L1 - if normOutputFile != "None": + if normOutputFile: f.write("\n") f.close() diff --git a/tests/functional/Hydro/AcousticWave/AcousticWave-1d_gamma2.py b/tests/functional/Hydro/AcousticWave/AcousticWave-1d_gamma2.py index a4ad8b963..fb170d66c 100644 --- a/tests/functional/Hydro/AcousticWave/AcousticWave-1d_gamma2.py +++ b/tests/functional/Hydro/AcousticWave/AcousticWave-1d_gamma2.py @@ -96,7 +96,7 @@ def smooth(x,window_len=11,window='hanning'): clearDirectories = True, dataDirBase = "dumps-planar-AcousticWave-1d", outputFile = "AcousticWave-planar-1d.gnu", - normOutputFile = "None", + normOutputFile = None, writeOutputLabel = True, graphics = "gnu", @@ -424,7 +424,7 @@ def __call__(self, x): #------------------------------------------------------------------------------- # If requested, write out the state in a global ordering to a file. #------------------------------------------------------------------------------- -if outputFile != "None": +if outputFile: outputFile = os.path.join(dataDir, outputFile) from SpheralTestUtilities import multiSort mprof = mpi.reduce(nodes1.mass().internalValues(), mpi.SUM) @@ -459,7 +459,7 @@ def __call__(self, x): # While we're at it compute and report the error norms. import Pnorm print("\tQuantity \t\tL1 \t\t\tL2 \t\t\tLinf") - if normOutputFile != "None": + if normOutputFile: f = open(normOutputFile, "a") if writeOutputLabel: f.write(("#" + 13*"%17s " + "\n") % ('"nx"', @@ -480,9 +480,9 @@ def __call__(self, x): L2 = Pn.pnormAverage(2, xmin, xmax) Linf = Pn.pnormAverage("inf", xmin, xmax) print("\t%s \t\t%g \t\t%g \t\t%g" % (name, L1, L2, Linf)) - if normOutputFile != "None": + if normOutputFile: f.write((3*"%16.12e ") % (L1, L2, Linf)) - if normOutputFile != "None": + if normOutputFile: f.write("\n") f.close() diff --git a/tests/functional/Hydro/AcousticWave/StandingWave-1d.py b/tests/functional/Hydro/AcousticWave/StandingWave-1d.py index 4cfa18521..8cf0d6a7f 100644 --- a/tests/functional/Hydro/AcousticWave/StandingWave-1d.py +++ b/tests/functional/Hydro/AcousticWave/StandingWave-1d.py @@ -70,7 +70,7 @@ clearDirectories = True, dataDirBase = "dumps-planar-StandingWave-1d", outputFile = "StandingWave-planar-1d.gnu", - normOutputFile = "None", + normOutputFile = None, writeOutputLabel = True, graphics = "gnu", @@ -369,7 +369,7 @@ def Minterval(xi0, xi1): #------------------------------------------------------------------------------- # If requested, write out the state in a global ordering to a file. #------------------------------------------------------------------------------- -if outputFile != "None": +if outputFile: outputFile = os.path.join(dataDir, outputFile) from SpheralTestUtilities import multiSort mprof = mpi.reduce(nodes1.mass().internalValues(), mpi.SUM) @@ -405,7 +405,7 @@ def Minterval(xi0, xi1): # While we're at it compute and report the error norms. import Pnorm print("\tQuantity \t\tL1 \t\t\tL2 \t\t\tLinf") - if normOutputFile != "None": + if normOutputFile: f = open(normOutputFile, "a") if writeOutputLabel: f.write(("#" + 13*"%17s " + "\n") % ('"nx"', @@ -426,9 +426,9 @@ def Minterval(xi0, xi1): L2 = Pn.gridpnorm(2, xmin, xmax) Linf = Pn.gridpnorm("inf", xmin, xmax) print("\t%s \t\t%g \t\t%g \t\t%g" % (name, L1, L2, Linf)) - if normOutputFile != "None": + if normOutputFile: f.write((3*"%16.12e ") % (L1, L2, Linf)) - if normOutputFile != "None": + if normOutputFile: f.write("\n") f.close() diff --git a/tests/functional/Hydro/ConvectionTest/ConvectionTest.py b/tests/functional/Hydro/ConvectionTest/ConvectionTest.py index 59a15aaab..ef547f40b 100644 --- a/tests/functional/Hydro/ConvectionTest/ConvectionTest.py +++ b/tests/functional/Hydro/ConvectionTest/ConvectionTest.py @@ -151,9 +151,6 @@ def finalize(self, t, dt, db, state, derivs): redistributeStep = 500, checkRestart = False, dataDir = "dumps-Convection-Test-2d", - outputFile = "None", - comparisonFile = "None", - serialDump = False, #whether to dump a serial ascii file at the end for viz bArtificialConduction = False, diff --git a/tests/functional/Hydro/Discontinuity/Discontinuity.py b/tests/functional/Hydro/Discontinuity/Discontinuity.py index eab0bd9b8..1f40ee886 100644 --- a/tests/functional/Hydro/Discontinuity/Discontinuity.py +++ b/tests/functional/Hydro/Discontinuity/Discontinuity.py @@ -2,8 +2,8 @@ #ATS:t1 = testif(t0, SELF, "--graphics None --clearDirectories False --checkError False --restartStep 20 --restoreCycle 20 --steps 20 --checkRestart True", label="Planar Noh problem -- 1-D (serial) RESTART CHECK") #ATS:t2 = test( SELF, "--graphics None --clearDirectories True --checkError True --dataDir 'dumps-planar-restartcheck' --restartStep 20", np=2, label="Planar Noh problem -- 1-D (parallel)") #ATS:t3 = testif(t2, SELF, "--graphics None --clearDirectories False --checkError False --dataDir 'dumps-planar-restartcheck' --restartStep 20 --restoreCycle 20 --steps 20 --checkRestart True", np=2, label="Planar Noh problem -- 1-D (parallel) RESTART CHECK") -#ATS:t4 = test( SELF, "--graphics None --clearDirectories True --checkError True --dataDir 'dumps-planar-reproducing' --domainIndependent True --outputFile 'Noh-planar-1proc-reproducing.txt'", label="Planar Noh problem -- 1-D (serial reproducing test setup)") -#ATS:t5 = testif(t4, SELF, "--graphics None --clearDirectories False --checkError True --dataDir 'dumps-planar-reproducing' --domainIndependent True --outputFile 'Noh-planar-4proc-reproducing.txt' --comparisonFile 'Noh-planar-1proc-reproducing.txt'", np=4, label="Planar Noh problem -- 1-D (4 proc reproducing test)") +#ATS:t4 = test( SELF, "--graphics None --clearDirectories True --checkError True --dataDir 'dumps-planar-reproducing' --domainIndependent True", label="Planar Noh problem -- 1-D (serial reproducing test setup)") +#ATS:t5 = testif(t4, SELF, "--graphics None --clearDirectories False --checkError True --dataDir 'dumps-planar-reproducing' --domainIndependent True", np=4, label="Planar Noh problem -- 1-D (4 proc reproducing test)") #------------------------------------------------------------------------------- # The Planar Noh test case run in 1-D. # @@ -91,8 +91,6 @@ restartStep = 10000, dataDir = "dumps-planar", restartBaseName = "Noh-planar-1d", - outputFile = "None", - comparisonFile = "None", # Parameters for the test scalePressure = 5.0, diff --git a/tests/functional/Hydro/Discontinuity/Discontinuity2d.py b/tests/functional/Hydro/Discontinuity/Discontinuity2d.py index 42305e3bb..afe76d223 100644 --- a/tests/functional/Hydro/Discontinuity/Discontinuity2d.py +++ b/tests/functional/Hydro/Discontinuity/Discontinuity2d.py @@ -2,8 +2,8 @@ #ATS:t1 = testif(t0, SELF, "--graphics None --clearDirectories False --checkError False --restartStep 20 --restoreCycle 20 --steps 20 --checkRestart True", label="Planar Noh problem -- 1-D (serial) RESTART CHECK") #ATS:t2 = test( SELF, "--graphics None --clearDirectories True --checkError True --dataDir 'dumps-planar-restartcheck' --restartStep 20", np=2, label="Planar Noh problem -- 1-D (parallel)") #ATS:t3 = testif(t2, SELF, "--graphics None --clearDirectories False --checkError False --dataDir 'dumps-planar-restartcheck' --restartStep 20 --restoreCycle 20 --steps 20 --checkRestart True", np=2, label="Planar Noh problem -- 1-D (parallel) RESTART CHECK") -#ATS:t4 = test( SELF, "--graphics None --clearDirectories True --checkError True --dataDir 'dumps-planar-reproducing' --domainIndependent True --outputFile 'Noh-planar-1proc-reproducing.txt'", label="Planar Noh problem -- 1-D (serial reproducing test setup)") -#ATS:t5 = testif(t4, SELF, "--graphics None --clearDirectories False --checkError True --dataDir 'dumps-planar-reproducing' --domainIndependent True --outputFile 'Noh-planar-4proc-reproducing.txt' --comparisonFile 'Noh-planar-1proc-reproducing.txt'", np=4, label="Planar Noh problem -- 1-D (4 proc reproducing test)") +#ATS:t4 = test( SELF, "--graphics None --clearDirectories True --checkError True --dataDir 'dumps-planar-reproducing' --domainIndependent True", label="Planar Noh problem -- 1-D (serial reproducing test setup)") +#ATS:t5 = testif(t4, SELF, "--graphics None --clearDirectories False --checkError True --dataDir 'dumps-planar-reproducing' --domainIndependent True", np=4, label="Planar Noh problem -- 1-D (4 proc reproducing test)") #------------------------------------------------------------------------------- # The Planar Noh test case run in 1-D. # @@ -100,9 +100,7 @@ redistributeStep = 200, dataDir = "dumps-planar", restartBaseName = "Noh-planar-1d", - outputFile = "None", - comparisonFile = "None", - + # Parameters for the test scalePressure = 5.0, scaleEnergy = 2.0, diff --git a/tests/functional/Hydro/FreeExpansion/FreeExpansion-1d.py b/tests/functional/Hydro/FreeExpansion/FreeExpansion-1d.py index d0a758fe5..89107c71f 100644 --- a/tests/functional/Hydro/FreeExpansion/FreeExpansion-1d.py +++ b/tests/functional/Hydro/FreeExpansion/FreeExpansion-1d.py @@ -309,7 +309,7 @@ def smooth(x,window_len=11,window='hanning'): #------------------------------------------------------------------------------- # If requested, write out the state in a global ordering to a file. #------------------------------------------------------------------------------- -if outputFile != "None": +if outputFile: outputFile = os.path.join(dataDir, outputFile) from SpheralTestUtilities import multiSort mprof = mpi.reduce(nodes1.mass().internalValues(), mpi.SUM) diff --git a/tests/functional/Hydro/FreeExpansion/FreeExpansion-2d.py b/tests/functional/Hydro/FreeExpansion/FreeExpansion-2d.py index 131da235c..eb58918ce 100644 --- a/tests/functional/Hydro/FreeExpansion/FreeExpansion-2d.py +++ b/tests/functional/Hydro/FreeExpansion/FreeExpansion-2d.py @@ -369,7 +369,7 @@ def smooth(x,window_len=11,window='hanning'): # #------------------------------------------------------------------------------- # # If requested, write out the state in a global ordering to a file. # #------------------------------------------------------------------------------- -# if outputFile != "None": +# if outputFile: # outputFile = os.path.join(dataDir, outputFile) # from SpheralTestUtilities import multiSort # mprof = mpi.reduce(nodes1.mass().internalValues(), mpi.SUM) diff --git a/tests/functional/Hydro/GreshoVortex/GreshoVortex.py b/tests/functional/Hydro/GreshoVortex/GreshoVortex.py index e8c83f091..74935250f 100644 --- a/tests/functional/Hydro/GreshoVortex/GreshoVortex.py +++ b/tests/functional/Hydro/GreshoVortex/GreshoVortex.py @@ -130,7 +130,7 @@ dataDir = "dumps-greshovortex-xy", graphics = True, smooth = None, - outputFile = "None", + outputFile = None, ) assert not(boolReduceViscosity and boolCullenViscosity) @@ -547,7 +547,7 @@ #------------------------------------------------------------------------------- # If requested, write out the state in a global ordering to a file. #------------------------------------------------------------------------------- -if outputFile != "None": +if outputFile: outputFile = os.path.join(baseDir, outputFile) from SpheralTestUtilities import multiSort P = ScalarField("pressure", nodes) diff --git a/tests/functional/Hydro/GreshoVortex/GreshoVortexOverlay.py b/tests/functional/Hydro/GreshoVortex/GreshoVortexOverlay.py index 14be3257d..13c17d458 100644 --- a/tests/functional/Hydro/GreshoVortex/GreshoVortexOverlay.py +++ b/tests/functional/Hydro/GreshoVortex/GreshoVortexOverlay.py @@ -123,7 +123,6 @@ dataDir = "dumps-greshovortex-xy", graphics = True, smooth = None, - outputFile = "None", ) assert not(boolReduceViscosity and boolCullenViscosity) diff --git a/tests/functional/Hydro/KelvinHelmholtz/KelvinHelmholtz-2d.py b/tests/functional/Hydro/KelvinHelmholtz/KelvinHelmholtz-2d.py index 51d7ae2ed..7bce63678 100644 --- a/tests/functional/Hydro/KelvinHelmholtz/KelvinHelmholtz-2d.py +++ b/tests/functional/Hydro/KelvinHelmholtz/KelvinHelmholtz-2d.py @@ -146,8 +146,7 @@ redistributeStep = 500, checkRestart = False, dataDir = "dumps-KelvinHelmholtz-2d", - outputFile = "None", - comparisonFile = "None", + serialDump = False, #whether to dump a serial ascii file at the end for viz ) diff --git a/tests/functional/Hydro/KelvinHelmholtz/KelvinHelmholtz-2d_McNally.py b/tests/functional/Hydro/KelvinHelmholtz/KelvinHelmholtz-2d_McNally.py index b0f2ce737..8108fc867 100644 --- a/tests/functional/Hydro/KelvinHelmholtz/KelvinHelmholtz-2d_McNally.py +++ b/tests/functional/Hydro/KelvinHelmholtz/KelvinHelmholtz-2d_McNally.py @@ -151,8 +151,6 @@ redistributeStep = None, checkRestart = False, dataDir = "dumps-KelvinHelmholtz-2d_McNally", - outputFile = "None", - comparisonFile = "None", graphMixing = False, mixInterval = 0.02, mixFile = "MixingModeAmp.gnu", diff --git a/tests/functional/Hydro/KelvinHelmholtz/KelvinHelmholtz-3d.py b/tests/functional/Hydro/KelvinHelmholtz/KelvinHelmholtz-3d.py index 4091c56fa..c17ca4d32 100644 --- a/tests/functional/Hydro/KelvinHelmholtz/KelvinHelmholtz-3d.py +++ b/tests/functional/Hydro/KelvinHelmholtz/KelvinHelmholtz-3d.py @@ -106,8 +106,7 @@ redistributeStep = 500, checkRestart = False, dataDir = "dumps-KelvinHelmholtz-3d", - outputFile = "None", - + bArtificialConduction = False, arCondAlpha = 0.5, ) diff --git a/tests/functional/Hydro/KeplerDisk/TwoMatDisk.py b/tests/functional/Hydro/KeplerDisk/TwoMatDisk.py index 26b529200..4fcd27303 100644 --- a/tests/functional/Hydro/KeplerDisk/TwoMatDisk.py +++ b/tests/functional/Hydro/KeplerDisk/TwoMatDisk.py @@ -160,8 +160,8 @@ def __call__(self, cycle, time, dt): dataDir = "twomat-%i", - outputFile = "None", - comparisonFile = "None", + outputFile = None, + comparisonFile = None, vizCycle = None, vizTime = 1.0, @@ -503,7 +503,7 @@ def __call__(self,r): else: control.step(steps) -if outputFile != "None": +if outputFile: outputFile = os.path.join(dataDir, outputFile) from SpheralTestUtilities import multiSort P1 = ScalarField("pressure",diskNodes1) @@ -558,7 +558,7 @@ def __call__(self,r): f.write((7*"%16.12e "+"\n") % (ri,xi,yi,rhoi,Pi,vi,mi)) f.close() - if comparisonFile != "None": + if comparisonFile: comparisonFile = os.path.join(dataDir, comparisonFile) import filecmp assert filecmp.cmp(outputFile,comparisonFile) diff --git a/tests/functional/Hydro/Noh/Noh-RZ.py b/tests/functional/Hydro/Noh/Noh-RZ.py index 6e5be1246..0a7c94353 100644 --- a/tests/functional/Hydro/Noh/Noh-RZ.py +++ b/tests/functional/Hydro/Noh/Noh-RZ.py @@ -118,8 +118,8 @@ restartStep = 10000, dataDirBase = "dump-rz-Noh", outputFile = "Noh-RZ.gnu", - comparisonFile = "None", - normOutputFile = "None", + comparisonFile = None, + normOutputFile = None, writeOutputLabel = True, graphics = True, @@ -453,7 +453,7 @@ for i in range(len(rho)): L1 = L1 + abs(rho[i]-rhoans[i]) L1_tot = L1 / len(rho) -# if mpi.rank == 0 and outputFile != "None": +# if mpi.rank == 0 and outputFile: # print "L1=",L1_tot,"\n" # with open("Converge.txt", "a") as myfile: # myfile.write("%s %s\n" % (nz, L1_tot)) @@ -567,7 +567,7 @@ #--------------------------------------------------------------------------- # Also we can optionally compare the current results with another file. #--------------------------------------------------------------------------- - if comparisonFile != "None": + if comparisonFile: comparisonFile = os.path.join(dataDir, comparisonFile) import filecmp assert filecmp.cmp(outputFile, comparisonFile) @@ -582,7 +582,7 @@ # failure = False # hD = [] -# if normOutputFile != "None": +# if normOutputFile: # f = open(normOutputFile, "a") # if writeOutputLabel: # f.write(("#" + 13*"%17s " + "\n") % ('"n"', @@ -605,7 +605,7 @@ # L2 = Pn.gridpnorm(2, rmin, rmax) # Linf = Pn.gridpnorm("inf", rmin, rmax) # print "\t%s \t\t%g \t\t%g \t\t%g" % (name, L1, L2, Linf) -# if normOutputFile != "None": +# if normOutputFile: # f.write((3*"%16.12e ") % (L1, L2, Linf)) # hD.append([L1,L2,Linf]) @@ -627,7 +627,7 @@ # # failure = True # # if failure: # # raise ValueError, "Error bounds violated." -# # if normOutputFile != "None": +# # if normOutputFile: # # f.write("\n") # # # print "%d\t %g\t %g\t %g\t %g\t %g\t %g\t %g\t %g\t %g\t %g\t %g\t %g\t" % (nz,hD[0][0],hD[1][0],hD[2][0],hD[3][0], diff --git a/tests/functional/Hydro/Noh/Noh-cylindrical-2d.py b/tests/functional/Hydro/Noh/Noh-cylindrical-2d.py index 6c0848936..26c06cffc 100644 --- a/tests/functional/Hydro/Noh/Noh-cylindrical-2d.py +++ b/tests/functional/Hydro/Noh/Noh-cylindrical-2d.py @@ -169,8 +169,8 @@ restartStep = 1000, checkRestart = False, dataDir = "dumps-cylindrical-Noh", - outputFile = "None", - comparisonFile = "None", + outputFile = None, + comparisonFile = None, doCompare = True, graphics = True, @@ -735,7 +735,7 @@ #------------------------------------------------------------------------------- # If requested, write out the state in a global ordering to a file. #------------------------------------------------------------------------------- -if outputFile != "None": +if outputFile: outputFile = os.path.join(dataDir, outputFile) from SpheralTestUtilities import multiSort P = ScalarField("pressure", nodes1) @@ -789,7 +789,7 @@ #--------------------------------------------------------------------------- # Also we can optionally compare the current results with another file. #--------------------------------------------------------------------------- - if comparisonFile != "None": + if comparisonFile: comparisonFile = os.path.join(dataDir, comparisonFile) import filecmp assert filecmp.cmp(outputFile, comparisonFile) diff --git a/tests/functional/Hydro/Noh/Noh-planar-1d.py b/tests/functional/Hydro/Noh/Noh-planar-1d.py index 1a0097c95..a60cdf04a 100644 --- a/tests/functional/Hydro/Noh/Noh-planar-1d.py +++ b/tests/functional/Hydro/Noh/Noh-planar-1d.py @@ -174,9 +174,9 @@ restartBaseName = "Noh-planar-1d", restartFileConstructor = SiloFileIO, SPIOFileCountPerTimeslice = None, - outputFile = "None", - comparisonFile = "None", - normOutputFile = "None", + outputFile = None, + comparisonFile = None, + normOutputFile = None, writeOutputLabel = True, doCompare = True, @@ -207,15 +207,20 @@ if solid: hydroPath = "Solid" + hydroPath -dataDir = os.path.join(dataDirBase, - hydroPath, - "nPerh=%f" % nPerh, - "compatibleEnergy=%s" % compatibleEnergy, - "fhourglass=%s" % fhourglass, - "Cullen=%s" % boolCullenViscosity, - "filter=%f" % filter) -restartDir = os.path.join(dataDir, "restarts") -restartBaseName = os.path.join(restartDir, "Noh-planar-1d-%i" % nx1) +if dataDirBase: + dataDir = os.path.join(dataDirBase, + hydroPath, + "nPerh=%f" % nPerh, + "compatibleEnergy=%s" % compatibleEnergy, + "fhourglass=%s" % fhourglass, + "Cullen=%s" % boolCullenViscosity, + "filter=%f" % filter) + restartDir = os.path.join(dataDir, "restarts") + restartBaseName = os.path.join(restartDir, "Noh-planar-1d-%i" % nx1) +else: + dataDir = None + restartDir = None + restartBaseName = None dx = (x1 - x0)/nx1 @@ -705,7 +710,7 @@ for i in range(len(rho)): L1 = L1 + abs(rho[i]-rhoans[i]) L1_tot = L1 / len(rho) -if mpi.rank == 0 and outputFile != "None": +if mpi.rank == 0 and outputFile: print("L1=",L1_tot,"\n") with open("Converge.txt", "a") as myfile: myfile.write("%s %s\n" % (nx1, L1_tot)) @@ -782,7 +787,7 @@ #------------------------------------------------------------------------------- # If requested, write out the state in a global ordering to a file. #------------------------------------------------------------------------------- -if outputFile != "None": +if outputFile: outputFile = os.path.join(dataDir, outputFile) from SpheralTestUtilities import multiSort mof = mortonOrderIndices(db) @@ -819,7 +824,7 @@ #--------------------------------------------------------------------------- # Also we can optionally compare the current results with another file. #--------------------------------------------------------------------------- - if comparisonFile != "None": + if comparisonFile: comparisonFile = os.path.join(dataDir, comparisonFile) import filecmp assert filecmp.cmp(outputFile, comparisonFile) @@ -834,7 +839,7 @@ print("Quantity \t\tL1 \t\t\t\tL2 \t\t\t\tLinf") failure = False - if normOutputFile != "None": + if normOutputFile: f = open(normOutputFile, "a") if writeOutputLabel: f.write(("#" + 13*"%17s " + "\n") % ('"nx"', @@ -856,7 +861,7 @@ L2 = Pn.gridpnorm(2, rmin, rmax) Linf = Pn.gridpnorm("inf", rmin, rmax) print(f"{name}\t\t{L1} \t\t{L2} \t\t{Linf}") - if normOutputFile != "None": + if normOutputFile: f.write((3*"%16.12e ") % (L1, L2, Linf)) if checkError and not (np.allclose(L1, LnormRef[hydroType][name]["L1"], tol, tol) and @@ -865,7 +870,7 @@ print("Failing Lnorm tolerance for ", name, (L1, L2, Linf), LnormRef[hydroType][name]) failure = True - if normOutputFile != "None": + if normOutputFile: f.write("\n") if checkError and failure: diff --git a/tests/functional/Hydro/Noh/Noh-shear-2d.py b/tests/functional/Hydro/Noh/Noh-shear-2d.py index 848ad2e93..b099d8892 100644 --- a/tests/functional/Hydro/Noh/Noh-shear-2d.py +++ b/tests/functional/Hydro/Noh/Noh-shear-2d.py @@ -115,28 +115,33 @@ dataRoot = "dumps-shearingNoh-2d", graphics = True, - outputFile = "None", - comparisonFile = "None", + outputFile = None, + comparisonFile = None, ) assert not(boolReduceViscosity and boolCullenViscosity) hydroType = hydroType.upper() -dataDir = os.path.join(dataRoot, - hydroType, - Qconstructor.__name__, - "basaraShearCorrection=%s_Qlimiter=%s" % (balsaraCorrection, Qlimiter), - "nperh=%4.2f" % nPerh, - "XSPH=%s" % XSPH, - "densityUpdate=%s" % densityUpdate, - "compatibleEnergy=%s" % compatibleEnergy, - "Cullen=%s" % boolCullenViscosity, - "gradhCorrection=%s" % gradhCorrection, - "nx=%i_ny=%i" % (nx, ny)) -restartDir = os.path.join(dataDir, "restarts") -vizDir = os.path.join(dataDir, "visit") -restartBaseName = os.path.join(restartDir, "Noh-shear-2d-%ix%i" % (nx, ny)) +if dataRoot: + dataDir = os.path.join(dataRoot, + hydroType, + Qconstructor.__name__, + "basaraShearCorrection=%s_Qlimiter=%s" % (balsaraCorrection, Qlimiter), + "nperh=%4.2f" % nPerh, + "XSPH=%s" % XSPH, + "densityUpdate=%s" % densityUpdate, + "compatibleEnergy=%s" % compatibleEnergy, + "Cullen=%s" % boolCullenViscosity, + "gradhCorrection=%s" % gradhCorrection, + "nx=%i_ny=%i" % (nx, ny)) + restartDir = os.path.join(dataDir, "restarts") + vizDir = os.path.join(dataDir, "visit") + restartBaseName = os.path.join(restartDir, "Noh-shear-2d-%ix%i" % (nx, ny)) +else: + restartDir = None + vizDir = None + restartBaseName = None if vizTime is None and vizCycle is None: vizBaseName = None else: @@ -495,7 +500,7 @@ #------------------------------------------------------------------------------- # If requested, write out the state in a global ordering to a file. #------------------------------------------------------------------------------- -if outputFile != "None": +if outputFile: outputFile = os.path.join(dataDir, outputFile) from SpheralTestUtilities import multiSort P = ScalarField("pressure", nodes1) @@ -538,7 +543,7 @@ #--------------------------------------------------------------------------- # Also we can optionally compare the current results with another file. #--------------------------------------------------------------------------- - if comparisonFile != "None": + if comparisonFile: comparisonFile = os.path.join(dataDir, comparisonFile) import filecmp assert filecmp.cmp(outputFile, comparisonFile) diff --git a/tests/functional/Hydro/Noh/Noh-spherical-1d.py b/tests/functional/Hydro/Noh/Noh-spherical-1d.py index 5d4d2b95b..c552dace7 100644 --- a/tests/functional/Hydro/Noh/Noh-spherical-1d.py +++ b/tests/functional/Hydro/Noh/Noh-spherical-1d.py @@ -133,9 +133,9 @@ restartStep = 10000, dataDirBase = "dumps-spherical-Noh", restartBaseName = "Noh-spherical-1d", - outputFile = "None", - comparisonFile = "None", - normOutputFile = "None", + outputFile = None, + comparisonFile = None, + normOutputFile = None, writeOutputLabel = True, # Parameters for the test acceptance., @@ -506,7 +506,7 @@ for i in range(len(rho)): L1 = L1 + abs(rho[i]-rhoans[i]) L1_tot = L1 / len(rho) -if mpi.rank == 0 and outputFile != "None": +if mpi.rank == 0 and outputFile: print("L1=",L1_tot,"\n") with open("Converge.txt", "a") as myfile: myfile.write("%s %s\n" % (nr, L1_tot)) @@ -583,7 +583,7 @@ #------------------------------------------------------------------------------- # If requested, write out the state in a global ordering to a file. #------------------------------------------------------------------------------- -if outputFile != "None": +if outputFile: outputFile = os.path.join(dataDir, outputFile) from SpheralTestUtilities import multiSort mof = mortonOrderIndices(db) @@ -620,7 +620,7 @@ #--------------------------------------------------------------------------- # Also we can optionally compare the current results with another file. #--------------------------------------------------------------------------- - if comparisonFile != "None": + if comparisonFile: comparisonFile = os.path.join(dataDir, comparisonFile) import filecmp assert filecmp.cmp(outputFile, comparisonFile) @@ -635,7 +635,7 @@ failure = False hD = [] - if normOutputFile != "None": + if normOutputFile: f = open(normOutputFile, "a") if writeOutputLabel: f.write(("#" + 13*"%17s " + "\n") % ('"nx"', @@ -658,7 +658,7 @@ L2 = Pn.gridpnorm(2, rmin, rmax) Linf = Pn.gridpnorm("inf", rmin, rmax) print("\t%s \t\t%g \t\t%g \t\t%g" % (name, L1, L2, Linf)) - if normOutputFile != "None": + if normOutputFile: f.write((3*"%16.12e ") % (L1, L2, Linf)) hD.append([L1,L2,Linf]) @@ -697,7 +697,7 @@ Linf, Linfexpect)) failure = True - if normOutputFile != "None": + if normOutputFile: f.write("\n") if failure: diff --git a/tests/functional/Hydro/Noh/Noh-spherical-3d.py b/tests/functional/Hydro/Noh/Noh-spherical-3d.py index 544d90e66..715817ba1 100644 --- a/tests/functional/Hydro/Noh/Noh-spherical-3d.py +++ b/tests/functional/Hydro/Noh/Noh-spherical-3d.py @@ -88,7 +88,7 @@ XSPH = False, epsilonTensile = 0.0, nTensile = 8, - filter = 0.0, + xfilter = 0.0, IntegratorConstructor = CheapSynchronousRK2Integrator, goalTime = 0.6, @@ -123,7 +123,7 @@ checkRestart = False, dataDir = "dumps-spherical-Noh", outputFile = "Noh_spherical_profiles.gnu", - comparisonFile = "None", + comparisonFile = None, doCompare = True, graphics = True, @@ -165,7 +165,7 @@ "nPerh=%f" % nPerh, "compatibleEnergy=%s" % compatibleEnergy, "Cullen=%s" % boolCullenViscosity, - "filter=%f" % filter, + "xfilter=%f" % xfilter, "nx=%i_ny=%i_nz=%i" % (nx, ny, nz)) restartDir = os.path.join(dataDir, "restarts") restartBaseName = os.path.join(restartDir, "Noh-spherical-3d-%ix%ix%i" % (nx, ny, nz)) @@ -280,7 +280,7 @@ elif crksph: hydro = CRKSPH(dataBase = db, W = WT, - filter = filter, + filter = xfilter, cfl = cfl, compatibleEnergyEvolution = compatibleEnergy, XSPH = XSPH, @@ -355,7 +355,7 @@ elif psph: hydro = PSPH(dataBase = db, W = WT, - filter = filter, + filter = xfilter, cfl = cfl, compatibleEnergyEvolution = compatibleEnergy, evolveTotalEnergy = evolveTotalEnergy, @@ -368,7 +368,7 @@ else: hydro = SPH(dataBase = db, W = WT, - filter = filter, + filter = xfilter, cfl = cfl, compatibleEnergyEvolution = compatibleEnergy, evolveTotalEnergy = evolveTotalEnergy, @@ -643,7 +643,7 @@ rmaxnorm = 0.35 rminnorm = 0.05 -if outputFile != "None": +if outputFile: outputFile = os.path.join(dataDir, outputFile) from SpheralTestUtilities import multiSort P = ScalarField("pressure", nodes1) @@ -704,7 +704,7 @@ #--------------------------------------------------------------------------- # Also we can optionally compare the current results with another file. #--------------------------------------------------------------------------- - if comparisonFile != "None": + if comparisonFile: comparisonFile = os.path.join(dataDir, comparisonFile) import filecmp assert filecmp.cmp(outputFile, comparisonFile) diff --git a/tests/functional/Hydro/RayleighTaylor/Hydrostatic-1d.py b/tests/functional/Hydro/RayleighTaylor/Hydrostatic-1d.py index 31925b0e7..16a0e13a3 100644 --- a/tests/functional/Hydro/RayleighTaylor/Hydrostatic-1d.py +++ b/tests/functional/Hydro/RayleighTaylor/Hydrostatic-1d.py @@ -97,9 +97,7 @@ def __call__(self, r): redistributeStep = 500, checkRestart = False, dataDir = "dumps-Rayleigh-Taylor-1d_hopkins", - outputFile = "None", - comparisonFile = "None", - + serialDump = False, #whether to dump a serial ascii file at the end for viz graphics = True, diff --git a/tests/functional/Hydro/RayleighTaylor/RT-2d.py b/tests/functional/Hydro/RayleighTaylor/RT-2d.py index c0203ce2e..652d1ba16 100644 --- a/tests/functional/Hydro/RayleighTaylor/RT-2d.py +++ b/tests/functional/Hydro/RayleighTaylor/RT-2d.py @@ -164,8 +164,8 @@ def __call__(self, r): redistributeStep = 50000, checkRestart = False, dataDir = "dumps-Rayleigh-Taylor-2d", - outputFile = "None", - comparisonFile = "None", + outputFile = None, + comparisonFile = None, serialDump = False, #whether to dump a serial ascii file at the end for viz ) diff --git a/tests/functional/Hydro/RayleighTaylor/RT-2d_Hopkins.py b/tests/functional/Hydro/RayleighTaylor/RT-2d_Hopkins.py index 3175acd48..01c7d7b69 100644 --- a/tests/functional/Hydro/RayleighTaylor/RT-2d_Hopkins.py +++ b/tests/functional/Hydro/RayleighTaylor/RT-2d_Hopkins.py @@ -157,7 +157,7 @@ def __call__(self, r): sampleFreq = 20, dataDir = "dumps-Rayleigh-Taylor-2d_hopkins", outputFile = "RT_Hopkins.txt", - comparisonFile = "None", + comparisonFile = None, serialDump = False, #whether to dump a serial ascii file at the end for viz useVoronoiOutput = False, diff --git a/tests/functional/Hydro/RayleighTaylor/RT-const-rho.py b/tests/functional/Hydro/RayleighTaylor/RT-const-rho.py index 57edc18a6..e13ce6200 100644 --- a/tests/functional/Hydro/RayleighTaylor/RT-const-rho.py +++ b/tests/functional/Hydro/RayleighTaylor/RT-const-rho.py @@ -119,8 +119,8 @@ def __call__(self, r): redistributeStep = 500, checkRestart = False, dataDir = "dumps-Rayleigh-Taylor-2d-constRho", - outputFile = "None", - comparisonFile = "None", + outputFile = None, + comparisonFile = None, serialDump = False, #whether to dump a serial ascii file at the end for viz diff --git a/tests/functional/Hydro/Riemann/Riemann.py b/tests/functional/Hydro/Riemann/Riemann.py index 9ecbdf04f..15d824ebe 100644 --- a/tests/functional/Hydro/Riemann/Riemann.py +++ b/tests/functional/Hydro/Riemann/Riemann.py @@ -87,7 +87,7 @@ restoreCycle = -1, restartStep = 10000, dataDirBase = "dumps-", - outputFile = "None", + outputFile = None, checkRestart = False, graphics = True, @@ -515,7 +515,7 @@ def createList(x): rmax = x2 if mpi.rank == 0: multiSort(mo, xprof, rhoprof, Pprof, vprof, epsprof, hprof) - if outputFile != "None": + if outputFile: outputFile = os.path.join(dataDir, outputFile) f = open(outputFile, "w") f.write(("# " + 19*"'%s' " + "\n") % ("x", "rho", "P", "v", "eps", "A", "h", "mo", diff --git a/tests/functional/Hydro/Sedov/Sedov-2d-ratio.py b/tests/functional/Hydro/Sedov/Sedov-2d-ratio.py index 20322965c..62dd5d5bf 100644 --- a/tests/functional/Hydro/Sedov/Sedov-2d-ratio.py +++ b/tests/functional/Hydro/Sedov/Sedov-2d-ratio.py @@ -104,7 +104,7 @@ useVoronoiOutput = False, clearDirectories = False, dataDirBase = "dumps-cylindrical-Sedov", - outputFile = "None", + outputFile = None, serialDump=True, xlmin = 0.4, @@ -527,7 +527,7 @@ #------------------------------------------------------------------------------- # If requested, write out the state in a global ordering to a file. #------------------------------------------------------------------------------- -if outputFile != "None" and mpi.rank == 0: +if outputFile and mpi.rank == 0: outputFile = os.path.join(dataDir, outputFile) f = open(outputFile, "w") f.write(("# " + 17*"%16s " + "\n") % ("r", "x", "y", "rho", "m", "P", "v", "eps", "A", "hr", "ht", diff --git a/tests/functional/Hydro/Sedov/Sedov-RZ.py b/tests/functional/Hydro/Sedov/Sedov-RZ.py index ec41a93d8..faa1948c8 100644 --- a/tests/functional/Hydro/Sedov/Sedov-RZ.py +++ b/tests/functional/Hydro/Sedov/Sedov-RZ.py @@ -89,8 +89,8 @@ checkEnergy = False, restoreCycle = -1, restartStep = 10000, - comparisonFile = "None", - normOutputFile = "None", + comparisonFile = None, + normOutputFile = None, writeOutputLabel = True, graphics = True, @@ -460,7 +460,7 @@ for i in range(len(rho)): L1 = L1 + abs(rho[i]-rhoans[i]) L1_tot = L1 / len(rho) -# if mpi.rank == 0 and outputFile != "None": +# if mpi.rank == 0 and outputFile: # print "L1=",L1_tot,"\n" # with open("Converge.txt", "a") as myfile: # myfile.write("%s %s\n" % (nz, L1_tot)) @@ -513,7 +513,7 @@ #------------------------------------------------------------------------------- # If requested, write out the state in a global ordering to a file. #------------------------------------------------------------------------------- -if outputFile != "None": +if outputFile: outputFile = os.path.join(dataDir, outputFile) from SpheralTestUtilities import multiSort mof = mortonOrderIndices(db) @@ -551,7 +551,7 @@ # #--------------------------------------------------------------------------- # # Also we can optionally compare the current results with another file. # #--------------------------------------------------------------------------- - # if comparisonFile != "None": + # if comparisonFile: # comparisonFile = os.path.join(dataDir, comparisonFile) # import filecmp # assert filecmp.cmp(outputFile, comparisonFile) diff --git a/tests/functional/Hydro/Sedov/Sedov-cylindrical-2d.py b/tests/functional/Hydro/Sedov/Sedov-cylindrical-2d.py index 29a6472f4..8e54c4988 100644 --- a/tests/functional/Hydro/Sedov/Sedov-cylindrical-2d.py +++ b/tests/functional/Hydro/Sedov/Sedov-cylindrical-2d.py @@ -115,7 +115,7 @@ useVoronoiOutput = False, clearDirectories = False, dataDirBase = "dumps-cylindrical-Sedov", - outputFile = "None", + outputFile = None, serialDump=True, ) @@ -590,7 +590,7 @@ #------------------------------------------------------------------------------- # If requested, write out the state in a global ordering to a file. #------------------------------------------------------------------------------- -if outputFile != "None" and mpi.rank == 0: +if outputFile and mpi.rank == 0: outputFile = os.path.join(dataDir, outputFile) f = open(outputFile, "w") f.write(("# " + 17*"%16s " + "\n") % ("r", "x", "y", "rho", "m", "P", "v", "eps", "A", "hr", "ht", diff --git a/tests/functional/Hydro/Sedov/Sedov-planar-1d.py b/tests/functional/Hydro/Sedov/Sedov-planar-1d.py index 4f08c37d4..894a456cf 100644 --- a/tests/functional/Hydro/Sedov/Sedov-planar-1d.py +++ b/tests/functional/Hydro/Sedov/Sedov-planar-1d.py @@ -96,7 +96,7 @@ graphics = True, clearDirectories = False, dataDirBase = "dumps-planar-Sedov", - outputFile = "None", + outputFile = None, ) if smallPressure: @@ -454,7 +454,7 @@ #------------------------------------------------------------------------------- # If requested, write out the state in a global ordering to a file. #------------------------------------------------------------------------------- -if outputFile != "None" and mpi.rank == 0: +if outputFile and mpi.rank == 0: outputFile = os.path.join(dataDir, outputFile) f = open(outputFile, "w") f.write(("# " + 14*"%15s " + "\n") % ("r", "x", "rho", "P", "v", "eps", "A", "hr", diff --git a/tests/functional/Hydro/Sedov/Sedov-spherical-1d.py b/tests/functional/Hydro/Sedov/Sedov-spherical-1d.py index 079e18dfb..da8ad66c8 100644 --- a/tests/functional/Hydro/Sedov/Sedov-spherical-1d.py +++ b/tests/functional/Hydro/Sedov/Sedov-spherical-1d.py @@ -66,7 +66,7 @@ graphics = True, clearDirectories = True, dataDirBase = "dumps-spherical-Sedov", - outputFile = "None", + outputFile = None, ) if smallPressure: @@ -332,7 +332,7 @@ #------------------------------------------------------------------------------- # If requested, write out the state in a global ordering to a file. #------------------------------------------------------------------------------- -if outputFile != "None" and mpi.rank == 0: +if outputFile and mpi.rank == 0: outputFile = os.path.join(dataDir, outputFile) f = open(outputFile, "w") f.write(("# " + 16*"%15s " + "\n") % ("r", "x", "y", "z", "rho", "m", "P", "v", "eps", "A", diff --git a/tests/functional/Hydro/Sedov/Sedov-spherical-3d.py b/tests/functional/Hydro/Sedov/Sedov-spherical-3d.py index ac15f2855..a62e231e2 100644 --- a/tests/functional/Hydro/Sedov/Sedov-spherical-3d.py +++ b/tests/functional/Hydro/Sedov/Sedov-spherical-3d.py @@ -110,7 +110,7 @@ graphics = True, clearDirectories = False, dataDirBase = "dumps-spherical-Sedov", - outputFile = "None", + outputFile = None, ) if smallPressure: @@ -509,7 +509,7 @@ #------------------------------------------------------------------------------- # If requested, write out the state in a global ordering to a file. #------------------------------------------------------------------------------- -if outputFile != "None" and mpi.rank == 0: +if outputFile and mpi.rank == 0: outputFile = os.path.join(dataDir, outputFile) f = open(outputFile, "w") f.write(("# " + 16*"%15s " + "\n") % ("r", "x", "y", "z", "rho", "m", "P", "v", "eps", "A", diff --git a/tests/functional/Hydro/ShockBubble/ShockBubble-Variant-2d.py b/tests/functional/Hydro/ShockBubble/ShockBubble-Variant-2d.py index 197b2b6d6..43fbbc9a4 100644 --- a/tests/functional/Hydro/ShockBubble/ShockBubble-Variant-2d.py +++ b/tests/functional/Hydro/ShockBubble/ShockBubble-Variant-2d.py @@ -98,7 +98,7 @@ checkRestart = False, dataDir = "dumps-bubbleShock-variant-2d", vizName = "ShockBubble-variant-2d", - outputFile = "None", + outputFile = None, ) airEnergy = airPressure/((airGamma - 1.0)*airDensity) diff --git a/tests/functional/Hydro/Sod/Sod-RZ.py b/tests/functional/Hydro/Sod/Sod-RZ.py index 0a54a8258..c14368975 100644 --- a/tests/functional/Hydro/Sod/Sod-RZ.py +++ b/tests/functional/Hydro/Sod/Sod-RZ.py @@ -114,9 +114,9 @@ checkEnergy = False, restoreCycle = -1, restartStep = 100, - outputFile = "None", - comparisonFile = "None", - normOutputFile = "None", + outputFile = None, + comparisonFile = None, + normOutputFile = None, writeOutputLabel = True, graphics = True, diff --git a/tests/functional/Hydro/Sod/Sod-planar-1d-WaterGas.py b/tests/functional/Hydro/Sod/Sod-planar-1d-WaterGas.py index 471c3c3e2..95a3f8cb1 100644 --- a/tests/functional/Hydro/Sod/Sod-planar-1d-WaterGas.py +++ b/tests/functional/Hydro/Sod/Sod-planar-1d-WaterGas.py @@ -130,7 +130,7 @@ restartStep = 10000, dataDirBase = "dumps-Sod-planar", restartBaseName = "Sod-planar-1d-restart", - outputFile = "None", + outputFile = None, checkRestart = False, graphics = True, ) @@ -610,7 +610,7 @@ def createList(x): rmax = x2 if mpi.rank == 0: multiSort(mo, xprof, rhoprof, Pprof, vprof, epsprof, hprof) - if outputFile != "None": + if outputFile: outputFile = os.path.join(dataDir, outputFile) f = open(outputFile, "w") f.write(("# " + 17*"'%s' " + "\n") % ("x", "rho", "P", "v", "eps", "h", "mo", diff --git a/tests/functional/Hydro/Sod/Sod-planar-1d.py b/tests/functional/Hydro/Sod/Sod-planar-1d.py index f87859be0..771d2e2cc 100644 --- a/tests/functional/Hydro/Sod/Sod-planar-1d.py +++ b/tests/functional/Hydro/Sod/Sod-planar-1d.py @@ -133,7 +133,7 @@ restartStep = 10000, dataDirBase = "dumps-Sod-planar", restartBaseName = "Sod-planar-1d-restart", - outputFile = "None", + outputFile = None, checkRestart = False, graphics = True, @@ -739,7 +739,7 @@ def createList(x): rmax = x2 if mpi.rank == 0: multiSort(mo, xprof, rhoprof, Pprof, vprof, epsprof, hprof) - if outputFile != "None": + if outputFile: outputFile = os.path.join(dataDir, outputFile) f = open(outputFile, "w") f.write(("# " + 19*"'%s' " + "\n") % ("x", "rho", "P", "v", "eps", "A", "h", "mo", diff --git a/tests/functional/Hydro/Sod/Sod-planar-2d-WaterGas.py b/tests/functional/Hydro/Sod/Sod-planar-2d-WaterGas.py index 9a318fb7a..d2e4f3dcf 100644 --- a/tests/functional/Hydro/Sod/Sod-planar-2d-WaterGas.py +++ b/tests/functional/Hydro/Sod/Sod-planar-2d-WaterGas.py @@ -129,7 +129,7 @@ restartStep = 100, dataDirBase = "dumps-Sod-planar-2d", restartBaseName = "Sod-planar-2d-restart", - outputFile = "None", + outputFile = None, checkRestart = False, vizCycle = None, @@ -623,7 +623,7 @@ def createList(x): rmin = x0 rmax = x2 if mpi.rank == 0: - if outputFile != "None": + if outputFile: outputFile = os.path.join(dataDir, outputFile) f = open(outputFile, "w") f.write(("#" + 10*" '%s'" + "\n") % ("x", "rho", "P", "v", "eps", "h", diff --git a/tests/functional/Hydro/Sod/Sod-planar-2d.py b/tests/functional/Hydro/Sod/Sod-planar-2d.py index 50e37cd62..dba9add79 100644 --- a/tests/functional/Hydro/Sod/Sod-planar-2d.py +++ b/tests/functional/Hydro/Sod/Sod-planar-2d.py @@ -108,7 +108,7 @@ restartStep = 100, dataDirBase = "dumps-Sod-planar-2d", restartBaseName = "Sod-planar-2d-restart", - outputFile = "None", + outputFile = None, checkRestart = False, vizCycle = None, @@ -709,7 +709,7 @@ def createList(x): rmin = x0 rmax = x2 if mpi.rank == 0: - if outputFile != "None": + if outputFile: outputFile = os.path.join(dataDir, outputFile) f = open(outputFile, "w") f.write(("#" + 12*" '%s'" + "\n") % ("x", "rho", "P", "v", "eps", "A", "h", diff --git a/tests/functional/Hydro/Sod/Sod-planar-3d.py b/tests/functional/Hydro/Sod/Sod-planar-3d.py index 80480592c..5b3e36d37 100644 --- a/tests/functional/Hydro/Sod/Sod-planar-3d.py +++ b/tests/functional/Hydro/Sod/Sod-planar-3d.py @@ -126,7 +126,7 @@ restartStep = 100, dataDirBase = "dumps-Sod-planar-3d", restartBaseName = "Sod-planar-3d-restart", - outputFile = "None", + outputFile = None, checkRestart = False, vizCycle = None, @@ -677,7 +677,7 @@ def createList(x): rmin = x0 rmax = x2 if mpi.rank == 0: - if outputFile != "None": + if outputFile: outputFile = os.path.join(dataDir, outputFile) f = open(outputFile, "w") f.write(("#" + 14*" '%s'" + "\n") % ("x", "rho", "P", "vx", "vy", "vz", "eps", "A", "h", diff --git a/tests/functional/Hydro/Sod/Sod-spherical-1d.py b/tests/functional/Hydro/Sod/Sod-spherical-1d.py index f229fcd41..6b7db95a1 100644 --- a/tests/functional/Hydro/Sod/Sod-spherical-1d.py +++ b/tests/functional/Hydro/Sod/Sod-spherical-1d.py @@ -116,7 +116,7 @@ restartStep = 10000, dataDirBase = "dumps-Sod-spherical", restartBaseName = "Sod-spherical-1d-restart", - outputFile = "None", + outputFile = None, checkRestart = False, graphics = True, @@ -601,7 +601,7 @@ def createList(x): # rmax = x2 # if mpi.rank == 0: # multiSort(mo, xprof, rhoprof, Pprof, vprof, epsprof, hprof) -# if outputFile != "None": +# if outputFile: # outputFile = os.path.join(dataDir, outputFile) # f = open(outputFile, "w") # f.write(("# " + 19*"'%s' " + "\n") % ("x", "rho", "P", "v", "eps", "A", "h", "mo", diff --git a/tests/functional/Hydro/Sod/convSod-planar-1d.py b/tests/functional/Hydro/Sod/convSod-planar-1d.py index c980a07da..4c000457b 100644 --- a/tests/functional/Hydro/Sod/convSod-planar-1d.py +++ b/tests/functional/Hydro/Sod/convSod-planar-1d.py @@ -77,7 +77,7 @@ restartStep = 200, dataDirBase = "Sod-planar-1d", restartBaseName = "Sod-planar-1d-restart", - outputFile = "None", + outputFile = None, graphics = "gnu", serialDump = False, #whether to dump a serial ascii file at the end for viz @@ -466,7 +466,7 @@ def createList(x): rmax = x2 if mpi.rank == 0: multiSort(mo, xprof, rhoprof, Pprof, vprof, epsprof, hprof) - if outputFile != "None": + if outputFile: outputFile = os.path.join(dataDir, outputFile) f = open(outputFile, "w") f.write(("# " + 17*"'%s' " + "\n") % ("x", "rho", "P", "v", "eps", "h", "mo", diff --git a/tests/functional/Hydro/SphericalCollapse/SphericalCollapse.py b/tests/functional/Hydro/SphericalCollapse/SphericalCollapse.py index ab5cdd428..3e06cf8e0 100644 --- a/tests/functional/Hydro/SphericalCollapse/SphericalCollapse.py +++ b/tests/functional/Hydro/SphericalCollapse/SphericalCollapse.py @@ -107,7 +107,7 @@ graphics = True, clearDirectories = False, dataRoot = "dumps-spherical-collapse", - outputFile = "None", + outputFile = None, ) diff --git a/tests/functional/Hydro/Turbulence/Stir-3d.py b/tests/functional/Hydro/Turbulence/Stir-3d.py index 53e1d963b..dbb7e5c95 100644 --- a/tests/functional/Hydro/Turbulence/Stir-3d.py +++ b/tests/functional/Hydro/Turbulence/Stir-3d.py @@ -93,8 +93,8 @@ redistributeStep = 500, checkRestart = False, dataDir = "stir-3d", - outputFile = "None", - comparisonFile = "None", + outputFile = None, + comparisonFile = None, serialDump = False, #whether to dump a serial ascii file at the end for viz ) diff --git a/tests/functional/Hydro/YeeVortex/YeeVortex.py b/tests/functional/Hydro/YeeVortex/YeeVortex.py index 0f7b51829..af8af494a 100644 --- a/tests/functional/Hydro/YeeVortex/YeeVortex.py +++ b/tests/functional/Hydro/YeeVortex/YeeVortex.py @@ -163,7 +163,7 @@ def __call__(self, r): dataDir = "dumps-yeevortex-xy", graphics = True, smooth = False, - outputFileBase = ".out", + outputFile = ".out", convergenceFileBase = "xstaglattice_converge.txt", ) @@ -598,7 +598,7 @@ def __call__(self, r): #------------------------------------------------------------------------------- # If requested, write out the state in a global ordering to a file. #------------------------------------------------------------------------------- -if outputFile != "None": +if outputFile: outputFile = os.path.join(baseDir, outputFile) from SpheralTestUtilities import multiSort P = ScalarField("pressure", nodes) diff --git a/tests/functional/Interpolation/one-mass.py b/tests/functional/Interpolation/one-mass.py index 81f13fcfc..ae56e2c19 100644 --- a/tests/functional/Interpolation/one-mass.py +++ b/tests/functional/Interpolation/one-mass.py @@ -94,8 +94,8 @@ restartStep = 10000, dataDir = "dumps-planar", restartBaseName = "Noh-planar-1d", - outputFile = "None", - comparisonFile = "None", + outputFile = None, + comparisonFile = None, graphics = True, serialDump = False #whether to dump a serial ascii file at the end for viz @@ -354,7 +354,7 @@ #------------------------------------------------------------------------------- # If requested, write out the state in a global ordering to a file. #------------------------------------------------------------------------------- -if outputFile != "None": +if outputFile: outputFile = os.path.join(dataDir, outputFile) from SpheralTestUtilities import multiSort mof = mortonOrderIndices(db) @@ -391,7 +391,7 @@ #--------------------------------------------------------------------------- # Also we can optionally compare the current results with another file. #--------------------------------------------------------------------------- - if comparisonFile != "None": + if comparisonFile: comparisonFile = os.path.join(dataDir, comparisonFile) import filecmp assert filecmp.cmp(outputFile, comparisonFile) diff --git a/tests/functional/Interpolation/one-node.py b/tests/functional/Interpolation/one-node.py index f3fab14b2..9dfcef329 100644 --- a/tests/functional/Interpolation/one-node.py +++ b/tests/functional/Interpolation/one-node.py @@ -94,8 +94,8 @@ restartStep = 10000, dataDir = "dumps-planar", restartBaseName = "Noh-planar-1d", - outputFile = "None", - comparisonFile = "None", + outputFile = None, + comparisonFile = None, graphics = True, serialDump = False #whether to dump a serial ascii file at the end for viz @@ -187,7 +187,7 @@ #------------------------------------------------------------------------------- # If requested, write out the state in a global ordering to a file. #------------------------------------------------------------------------------- -if outputFile != "None": +if outputFile: outputFile = os.path.join(dataDir, outputFile) from SpheralTestUtilities import multiSort mof = mortonOrderIndices(db) @@ -224,7 +224,7 @@ #--------------------------------------------------------------------------- # Also we can optionally compare the current results with another file. #--------------------------------------------------------------------------- - if comparisonFile != "None": + if comparisonFile: comparisonFile = os.path.join(dataDir, comparisonFile) import filecmp assert filecmp.cmp(outputFile, comparisonFile) diff --git a/tests/functional/Interpolation/two-nodes.py b/tests/functional/Interpolation/two-nodes.py index dd16dae1b..42c46d19b 100644 --- a/tests/functional/Interpolation/two-nodes.py +++ b/tests/functional/Interpolation/two-nodes.py @@ -82,8 +82,8 @@ restartStep = 10000, dataDir = "dumps-2p", restartBaseName = "2p", - outputFile = "None", - comparisonFile = "None", + outputFile = None, + comparisonFile = None, graphics = True, serialDump = True #whether to dump a serial ascii file at the end for viz diff --git a/tests/functional/Porosity/PlanarCompaction/PlanarCompaction-1d.py b/tests/functional/Porosity/PlanarCompaction/PlanarCompaction-1d.py index 725a83be1..6f2b2e314 100644 --- a/tests/functional/Porosity/PlanarCompaction/PlanarCompaction-1d.py +++ b/tests/functional/Porosity/PlanarCompaction/PlanarCompaction-1d.py @@ -106,8 +106,8 @@ dataDirBase = "dumps-PlanarCompaction-1d", checkError = False, checkRestart = False, - outputFile = "None", - comparisonFile = "None", + outputFile = None, + comparisonFile = None, # Parameters for the test acceptance., tol = 1.0e-5, diff --git a/tests/functional/Strength/CollidingPlates/CollidingPlates-1d.py b/tests/functional/Strength/CollidingPlates/CollidingPlates-1d.py index db0827488..14abf8ee3 100644 --- a/tests/functional/Strength/CollidingPlates/CollidingPlates-1d.py +++ b/tests/functional/Strength/CollidingPlates/CollidingPlates-1d.py @@ -83,8 +83,8 @@ clearDirectories = False, referenceFile = "Reference/CollidingPlates-1d-reference-compatible-20220422.txt", dataDirBase = "dumps-CollidingPlates-1d", - outputFile = "None", - comparisonFile = "None", + outputFile = None, + comparisonFile = None, ) if crksph: @@ -350,7 +350,7 @@ #------------------------------------------------------------------------------- # If requested, write out the state in a global ordering to a file. #------------------------------------------------------------------------------- -if outputFile != "None": +if outputFile: state = State(db, integrator.physicsPackages()) outputFile = os.path.join(dataDir, outputFile) pos = state.vectorFields(HydroFieldNames.position) @@ -384,7 +384,7 @@ #--------------------------------------------------------------------------- # Check the floating values for the state against reference data. #--------------------------------------------------------------------------- - if referenceFile != "None": + if referenceFile: import filearraycmp as fcomp assert fcomp.filearraycmp(outputFile, referenceFile, testtol, testtol) print("Floating point comparison test passed.") @@ -393,7 +393,7 @@ # Also we can optionally compare the current results with another file for # bit level consistency. #--------------------------------------------------------------------------- - if comparisonFile != "None" and BuildData.cxx_compiler_id != "IntelLLVM": + if comparisonFile and BuildData.cxx_compiler_id != "IntelLLVM": import filecmp print("Compare files : %s <---> %s" % (outputFile, comparisonFile)) assert filecmp.cmp(outputFile, comparisonFile) diff --git a/tests/functional/Strength/Piston/Piston.py b/tests/functional/Strength/Piston/Piston.py index 359dea456..acc0ddcec 100644 --- a/tests/functional/Strength/Piston/Piston.py +++ b/tests/functional/Strength/Piston/Piston.py @@ -129,7 +129,7 @@ restartStep = 10000, dataDirBase = "dumps-Piston-1d-Cu", restartBaseName = "Piston-1d-Cu-restart", - outputFile = "None", + outputFile = None, checkRestart = False, graphics = True, ) diff --git a/tests/functional/Strength/PlateImpact/PlateImpact-1d.py b/tests/functional/Strength/PlateImpact/PlateImpact-1d.py index 0bc8a2d87..13cd0f8c7 100644 --- a/tests/functional/Strength/PlateImpact/PlateImpact-1d.py +++ b/tests/functional/Strength/PlateImpact/PlateImpact-1d.py @@ -93,8 +93,8 @@ # Should we run in domain independent mode, and if so should we check # for domain independence? domainIndependent = False, - outputFile = "None", - comparisonFile = "None", + outputFile = None, + comparisonFile = None, ) Sapphire1Range = (0.0, Sapphire1Thickness) @@ -639,7 +639,7 @@ def restoreState(self, file, path): #------------------------------------------------------------------------------- # If requested, write out the state in a global ordering to a file. #------------------------------------------------------------------------------- -if outputFile != "None": +if outputFile: outputFile = os.path.join(restartDir, outputFile) pos = db.fluidPosition rho = db.fluidMassDensity @@ -672,7 +672,7 @@ def restoreState(self, file, path): #--------------------------------------------------------------------------- # Also we can optionally compare the current results with another file. #--------------------------------------------------------------------------- - if comparisonFile != "None": + if comparisonFile: comparisonFile = os.path.join(restartDir, comparisonFile) import filecmp assert filecmp.cmp(outputFile, comparisonFile) diff --git a/tests/functional/Strength/PlateImpact/TP106-1d.py b/tests/functional/Strength/PlateImpact/TP106-1d.py index 4528edd81..b69dd48eb 100644 --- a/tests/functional/Strength/PlateImpact/TP106-1d.py +++ b/tests/functional/Strength/PlateImpact/TP106-1d.py @@ -394,7 +394,7 @@ def tp106tracersample(nodes, indices): #------------------------------------------------------------------------------- # If requested, write out the state in a global ordering to a file. #------------------------------------------------------------------------------- -if outputFile != "None": +if outputFile: from SpheralTestUtilities import multiSort state = State(db, integrator.physicsPackages()) outputFile = os.path.join(dataDir, outputFile) diff --git a/tests/functional/Strength/Verney/Verney-2d.py b/tests/functional/Strength/Verney/Verney-2d.py index 200efc1c0..a409e69f7 100644 --- a/tests/functional/Strength/Verney/Verney-2d.py +++ b/tests/functional/Strength/Verney/Verney-2d.py @@ -428,7 +428,7 @@ def verneySample(nodes, indices): #------------------------------------------------------------------------------- # If requested, write out the state in a global ordering to a file. #------------------------------------------------------------------------------- -if outputFile != "None": +if outputFile: from SpheralTestUtilities import multiSort state = State(db, integrator.physicsPackages()) outputFile = os.path.join(dataDir, outputFile) diff --git a/tests/functional/Strength/Verney/Verney-3d.py b/tests/functional/Strength/Verney/Verney-3d.py index e80582190..08cdec3e7 100644 --- a/tests/functional/Strength/Verney/Verney-3d.py +++ b/tests/functional/Strength/Verney/Verney-3d.py @@ -437,7 +437,7 @@ def verneySample(nodes, indices): #------------------------------------------------------------------------------- # If requested, write out the state in a global ordering to a file. #------------------------------------------------------------------------------- -if outputFile != "None": +if outputFile: from SpheralTestUtilities import multiSort state = State(db, integrator.physicsPackages()) outputFile = os.path.join(dataDir, outputFile) diff --git a/tests/functional/Strength/Verney/Verney-RZ.py b/tests/functional/Strength/Verney/Verney-RZ.py index 6c0dea1a5..6e8d830d6 100644 --- a/tests/functional/Strength/Verney/Verney-RZ.py +++ b/tests/functional/Strength/Verney/Verney-RZ.py @@ -430,7 +430,7 @@ def verneySample(nodes, indices): #------------------------------------------------------------------------------- # If requested, write out the state in a global ordering to a file. #------------------------------------------------------------------------------- -if outputFile != "None": +if outputFile: from SpheralTestUtilities import multiSort state = State(db, integrator.physicsPackages()) outputFile = os.path.join(dataDir, outputFile) diff --git a/tests/functional/Strength/Verney/Verney-spherical.py b/tests/functional/Strength/Verney/Verney-spherical.py index 846af0e5a..f4efee7e5 100644 --- a/tests/functional/Strength/Verney/Verney-spherical.py +++ b/tests/functional/Strength/Verney/Verney-spherical.py @@ -121,7 +121,7 @@ def __call__(self, x): clearDirectories = False, dataDirBase = "dumps-Verney-Be-R", outputFile = "Verney-Be-R.gnu", - comparisonFile = "None", + comparisonFile = None, # Testing checkRestart = False, @@ -432,7 +432,7 @@ def verneySample(nodes, indices): #------------------------------------------------------------------------------- # If requested, write out the state in a global ordering to a file. #------------------------------------------------------------------------------- -if outputFile != "None": +if outputFile: from SpheralTestUtilities import multiSort state = State(db, integrator.physicsPackages()) outputFile = os.path.join(dataDir, outputFile) @@ -475,7 +475,7 @@ def verneySample(nodes, indices): #--------------------------------------------------------------------------- # Also we can optionally compare the current results with another file. #--------------------------------------------------------------------------- - if comparisonFile != "None": + if comparisonFile: comparisonFile = os.path.join(dataDir, comparisonFile) import filecmp assert filecmp.cmp(outputFile, comparisonFile) diff --git a/tests/functional/Surfaces/1d.py b/tests/functional/Surfaces/1d.py index 424723c69..9faab7d79 100644 --- a/tests/functional/Surfaces/1d.py +++ b/tests/functional/Surfaces/1d.py @@ -352,7 +352,7 @@ def smooth(x,window_len=11,window='hanning'): #------------------------------------------------------------------------------- # If requested, write out the state in a global ordering to a file. #------------------------------------------------------------------------------- -if outputFile != "None": +if outputFile: outputFile = os.path.join(dataDir, outputFile) from SpheralTestUtilities import multiSort mprof = mpi.reduce(nodes1.mass().internalValues(), mpi.SUM) diff --git a/tests/integration.ats b/tests/integration.ats index 8ad8b0e73..9f829b45b 100644 --- a/tests/integration.ats +++ b/tests/integration.ats @@ -9,6 +9,9 @@ glue(gsph = False) glue(svph = False) glue(independent = True) +# Fail test to make sure tests are working +source("unit/Utilities/testFails.py") + # Geometry unit tests source("unit/Geometry/testVector.py") source("unit/Geometry/testTensor.py") diff --git a/tests/performance.py.in b/tests/performance.py.in index b34251333..d00a10ea5 100644 --- a/tests/performance.py.in +++ b/tests/performance.py.in @@ -89,7 +89,7 @@ regions = ["CheapRK2", timers = ["sum#inclusive#sum#time.duration"] # Means the sum of the time from all ranks # General input for all Noh tests -gen_noh_inps = "--crksph False --cfl 0.25 --Cl 1.0 --Cq 1.0 --filter 0.0 "+\ +gen_noh_inps = "--crksph False --cfl 0.25 --Cl 1.0 --Cq 1.0 --xfilter 0.0 "+\ "--nPerh 2.01 --graphics False --clearDirectories False --doCompare False "+\ "--dataDir None --vizTime None --vizCycle None" diff --git a/tests/unit/CRKSPH/testConsistency.py b/tests/unit/CRKSPH/testConsistency.py index 229f40aeb..bf62113b6 100644 --- a/tests/unit/CRKSPH/testConsistency.py +++ b/tests/unit/CRKSPH/testConsistency.py @@ -74,12 +74,11 @@ graphics = True, graphBij = False, plotKernels = False, - outputFile = "None", + outputFile = None, plotSPH = True, dataCut = False, dataCutMin = 0.0, dataCutMax = 1.0, - outfile = "None", ) assert testCase in ("linear", "quadratic", "cubic", "step") @@ -843,13 +842,13 @@ Pnorm(errxBRKSPHIV, xans).pnorm(1))) print("Maximum errors: CRKSPH = %g, RKSPH I = %g, RKSPH II = %g, RKSPH IV = %g, RKSPH V = %g, SPH = %g, BCRKSPH = %g, BRKSPHII = %g, BRKSPHIV = %g" % (maxaxCRKSPHerror, maxaxRKSPHIerror, maxaxRKSPHIIerror, maxaxRKSPHIVerror, maxaxRKSPHVerror, maxaxSPHerror, maxaxBCRKSPHerror, maxaxBRKSPHIIerror, maxaxBRKSPHIVerror)) print("L1 Interpolation Error RK = %g, Max err = %g, L1 Derivative Error Rk = %g, Max err = %g" % (Pnorm(errfRK, xans).pnorm(1),maxfRKerror, Pnorm(errgfRK, xans).pnorm(1),maxgfRKerror)) -if outfile != "None": - fl = open(outfile+".ascii", "w") +if outputFile: + fl = open(outputFile+".ascii", "w") fl.write(("# " + 8*"%15s \t " + "\n") % ("x", "Dv/Dt", "CRK", "RK Type 1", "SPH", "CRK Error (difference)", "RK Error", "SPH Error")) for i in range(len(xidx)): fl.write((8*"%16.12e " + "\n") % (xans[i], axans[i], accCRKSPH[i], accRKSPHI[i], accSPH[i], errxCRKSPH[i], errxRKSPHI[i], errxSPH[i])) fl.close() - fl = open(outfile+"_interpolate.ascii", "w") + fl = open(outputFile+"_interpolate.ascii", "w") fl.write(("# " + 7*"%15s \t" + "\n") % ("x", "P", "grad P", "RK (P estimate)", "RK (grad P estimate)", "SPH (P estimate)", "SPH (grad P estimate)")) for i in range(len(xidx)): fl.write((7*"%16.12e " + "\n") % (xans[i], f[i], gf[i], fRK[i], gfRK[i], fSPH[i], gfSPH[i])) diff --git a/tests/unit/CRKSPH/testInterpolation.py b/tests/unit/CRKSPH/testInterpolation.py index 45844bc81..1cb0bed40 100644 --- a/tests/unit/CRKSPH/testInterpolation.py +++ b/tests/unit/CRKSPH/testInterpolation.py @@ -67,7 +67,7 @@ graphics = True, plotKernels = False, - outputFile = "None", + outputFile = None, ) assert testCase in ("linear", "quadratic", "step") @@ -465,7 +465,7 @@ def flattenFieldList(fl): p7.plot(xvals, WR, "g-", label="RK") p7.axes.legend() plt.title("Kernel") - if outputFile != "None": + if outputFile: f = open("Kernel_" + outputFile, "w") f.write(("#" + 3*' "%20s"' + "\n") % ("eta", "Wj", "WRj")) for xi, Wi, WRi in zip(xvals, W, WR): @@ -473,7 +473,7 @@ def flattenFieldList(fl): f.close() # We may want a gnu/pdv style text file. - if outputFile != "None" and testDim == "2d": + if outputFile and testDim == "2d": of = open(outputFile, "w") of.write(('#' + 7*' "%20s"' + '\n') % ("x", "interp answer", "grad answer", "interp SPH", "interp CRK", "grad SPH", "grad CRK")) for iNodeList, nodes in enumerate(db.nodeLists()): diff --git a/tests/unit/SPH/testLinearVelocityGradient.py b/tests/unit/SPH/testLinearVelocityGradient.py index 9acce429f..02b5b5e51 100644 --- a/tests/unit/SPH/testLinearVelocityGradient.py +++ b/tests/unit/SPH/testLinearVelocityGradient.py @@ -64,7 +64,6 @@ graphics = True, plotKernels = False, - outputFile = "None", plotSPH = True, ) diff --git a/tests/unit/Utilities/testFails.py b/tests/unit/Utilities/testFails.py new file mode 100644 index 000000000..e4c3fa4df --- /dev/null +++ b/tests/unit/Utilities/testFails.py @@ -0,0 +1,5 @@ +#ATS:~test(SELF, label="Failing test") + +import sys + +sys.exit(1) From ef2b1db79df270e44843c0c99ae6726b641bb910 Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Fri, 25 Oct 2024 15:37:22 -0700 Subject: [PATCH 17/44] Fix ats filters passed from CMake --- scripts/CMakeLists.txt | 10 +++++----- scripts/spheral_ats.py.in | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/scripts/CMakeLists.txt b/scripts/CMakeLists.txt index 31eaddb53..7ebf938f2 100644 --- a/scripts/CMakeLists.txt +++ b/scripts/CMakeLists.txt @@ -11,23 +11,23 @@ if (NOT ENABLE_CXXONLY) set(SPHERAL_ATS_BUILD_CONFIG_ARGS ) if (CMAKE_BUILD_TYPE STREQUAL "Debug") - list(APPEND SPHERAL_ATS_BUILD_CONFIG_ARGS "--filter='\"level<100\"'") + list(APPEND SPHERAL_ATS_BUILD_CONFIG_ARGS "--filter=\"level<100\"") endif() if (NOT ENABLE_MPI) - list(APPEND SPHERAL_ATS_BUILD_CONFIG_ARGS "--filter='\"np<2\"'") + list(APPEND SPHERAL_ATS_BUILD_CONFIG_ARGS "--filter=\"np<2\"") endif() if (NOT SPHERAL_ENABLE_FSISPH) - list(APPEND SPHERAL_ATS_BUILD_CONFIG_ARGS "--filter='\"not fsisph\"'") + list(APPEND SPHERAL_ATS_BUILD_CONFIG_ARGS "--filter=\"not fsisph\"") endif() if (NOT SPHERAL_ENABLE_GSPH) - list(APPEND SPHERAL_ATS_BUILD_CONFIG_ARGS "--filter='\"not gsph\"'") + list(APPEND SPHERAL_ATS_BUILD_CONFIG_ARGS "--filter=\"not gsph\"") endif() if (NOT SPHERAL_ENABLE_SVPH) - list(APPEND SPHERAL_ATS_BUILD_CONFIG_ARGS "--filter='\"not svph\"'") + list(APPEND SPHERAL_ATS_BUILD_CONFIG_ARGS "--filter=\"not svph\"") endif() string(REPLACE ";" " " SPHERAL_ATS_BUILD_CONFIG_ARGS_STRING "${SPHERAL_ATS_BUILD_CONFIG_ARGS}") diff --git a/scripts/spheral_ats.py.in b/scripts/spheral_ats.py.in index 6ea89d438..427574b81 100644 --- a/scripts/spheral_ats.py.in +++ b/scripts/spheral_ats.py.in @@ -12,7 +12,7 @@ sys.path.append(os.path.join(install_prefix, "scripts")) from spheralutils import sexe # Apply filters set during install -install_filters = "@SPHERAL_ATS_BUILD_CONFIG_ARGS_STRING@" +install_filters = """@SPHERAL_ATS_BUILD_CONFIG_ARGS_STRING@""" # Options for running CI # If the number of failed tests exceeds this value, ATS is not rerun From 1ca7d9d7653691679c959167da1da1f398e374a8 Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Mon, 28 Oct 2024 07:21:06 -0700 Subject: [PATCH 18/44] More bug fixes for ats arguments --- scripts/spheral_ats.py.in | 37 +++++++++++++++++++------------------ 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/scripts/spheral_ats.py.in b/scripts/spheral_ats.py.in index 427574b81..f1ef06e0f 100644 --- a/scripts/spheral_ats.py.in +++ b/scripts/spheral_ats.py.in @@ -12,7 +12,7 @@ sys.path.append(os.path.join(install_prefix, "scripts")) from spheralutils import sexe # Apply filters set during install -install_filters = """@SPHERAL_ATS_BUILD_CONFIG_ARGS_STRING@""" +install_filters = '@SPHERAL_ATS_BUILD_CONFIG_ARGS_STRING@' # Options for running CI # If the number of failed tests exceeds this value, ATS is not rerun @@ -119,29 +119,30 @@ def main(): #--------------------------------------------------------------------------- # Setup machine info classes #--------------------------------------------------------------------------- - mac_args = [] ats_args = [install_filters] numNodes = options.numNodes timeLimit = options.timeLimit ciRun = False if options.perfTest else True launch_cmd = "" - if "rzgenie" in hostname or "ruby" in hostname: - numNodes = numNodes if numNodes else 2 - timeLimit = timeLimit if timeLimit else 120 - time_limit = 120 - mac_args = [f"--numNodes {numNodes}"] - launch_cmd = f"salloc --exclusive -N {numNodes} -t {timeLimit} " - if (ciRun): - launch_cmd += "-p pdebug " - elif "lassen" in hostname or "rzansel" in hostname: - numNodes = numNodes if numNodes else 1 - timeLimit = timeLimit if timeLimit else 60 - mac_args = ["--smpi_off", f"--numNodes {numNodes}"] - launch_cmd = f"bsub -nnodes {numNodes} -Is -XF -W {timeLimit} -core_isolation 2 " - if (ciRun): - launch_cmd += "-q pdebug " - ats_args.extend(mac_args) + if hostname: + mac_args = [] + if "rzgenie" in hostname or "ruby" in hostname: + numNodes = numNodes if numNodes else 2 + timeLimit = timeLimit if timeLimit else 120 + time_limit = 120 + mac_args = [f"--numNodes {numNodes}"] + launch_cmd = f"salloc --exclusive -N {numNodes} -t {timeLimit} " + if (ciRun): + launch_cmd += "-p pdebug " + elif "lassen" in hostname or "rzansel" in hostname: + numNodes = numNodes if numNodes else 1 + timeLimit = timeLimit if timeLimit else 60 + mac_args = ["--smpi_off", f"--numNodes {numNodes}"] + launch_cmd = f"bsub -nnodes {numNodes} -Is -XF -W {timeLimit} -core_isolation 2 " + if (ciRun): + launch_cmd += "-q pdebug " + ats_args.extend(mac_args) #--------------------------------------------------------------------------- # Launch ATS From a8f85c479a69cf28710a269907857118c2851679 Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Mon, 28 Oct 2024 11:11:31 -0700 Subject: [PATCH 19/44] Make sure entire run command is in quotes to avoid bsub issue with inputs using less than symbol --- scripts/spheral_ats.py.in | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/scripts/spheral_ats.py.in b/scripts/spheral_ats.py.in index f1ef06e0f..4da47a3fb 100644 --- a/scripts/spheral_ats.py.in +++ b/scripts/spheral_ats.py.in @@ -12,7 +12,7 @@ sys.path.append(os.path.join(install_prefix, "scripts")) from spheralutils import sexe # Apply filters set during install -install_filters = '@SPHERAL_ATS_BUILD_CONFIG_ARGS_STRING@' +install_filters = '''@SPHERAL_ATS_BUILD_CONFIG_ARGS_STRING@''' # Options for running CI # If the number of failed tests exceeds this value, ATS is not rerun @@ -124,6 +124,7 @@ def main(): timeLimit = options.timeLimit ciRun = False if options.perfTest else True launch_cmd = "" + blueOS = False if hostname: mac_args = [] @@ -136,6 +137,7 @@ def main(): if (ciRun): launch_cmd += "-p pdebug " elif "lassen" in hostname or "rzansel" in hostname: + blueOS = True numNodes = numNodes if numNodes else 1 timeLimit = timeLimit if timeLimit else 60 mac_args = ["--smpi_off", f"--numNodes {numNodes}"] @@ -155,11 +157,17 @@ def main(): else: log_name_indx = unknown_options.index("--logs") + 1 log_name = unknown_options[log_name_indx] - ats_args.append("--glue='independent=True'") - ats_args.append("--continueFreq=15") + ats_args.append('--glue="independent=True"') + ats_args.append('--continueFreq=15') ats_args = " ".join(str(x) for x in ats_args) other_args = " ".join(str(x) for x in unknown_options) - run_command = f"{launch_cmd}{ats_exe} -e {spheral_exe} {ats_args} {other_args}" + cmd = f"{ats_exe} -e {spheral_exe} {ats_args} {other_args}" + if blueOS: + # Launches using Bsub have issues with '<' being in command + # so entire run statment must be in quotes + run_command = f"{launch_cmd} '{cmd}'" + else: + run_command = f"{launch_cmd}{cmd}" print(f"\nRunning: {run_command}\n") if (ciRun): run_and_report(run_command, log_name, 0) From 9f300b282b5fb7df7f96c2373d48718dbffd8add Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Mon, 28 Oct 2024 15:32:43 -0700 Subject: [PATCH 20/44] Another attempt to fix job launch ats stuff --- scripts/spheral_ats.py.in | 43 +++++++++++++++++++-------------------- 1 file changed, 21 insertions(+), 22 deletions(-) diff --git a/scripts/spheral_ats.py.in b/scripts/spheral_ats.py.in index 4da47a3fb..6625b13f2 100644 --- a/scripts/spheral_ats.py.in +++ b/scripts/spheral_ats.py.in @@ -83,12 +83,12 @@ def run_and_report(run_command, ci_output, num_runs): # Main routine #--------------------------------------------------------------------------- def main(): + temp_uname = os.uname() + hostname = temp_uname[1] + sys_type = os.getenv("SYS_TYPE") # Use ATS to for some machine specific functions if "MACHINE_TYPE" not in os.environ: ats_utils.set_machine_type_based_on_sys_type() - hostname = None - if "HOSTNAME" in os.environ: - hostname = os.environ["HOSTNAME"] #--------------------------------------------------------------------------- # Setup argument parser #--------------------------------------------------------------------------- @@ -126,25 +126,24 @@ def main(): launch_cmd = "" blueOS = False - if hostname: - mac_args = [] - if "rzgenie" in hostname or "ruby" in hostname: - numNodes = numNodes if numNodes else 2 - timeLimit = timeLimit if timeLimit else 120 - time_limit = 120 - mac_args = [f"--numNodes {numNodes}"] - launch_cmd = f"salloc --exclusive -N {numNodes} -t {timeLimit} " - if (ciRun): - launch_cmd += "-p pdebug " - elif "lassen" in hostname or "rzansel" in hostname: - blueOS = True - numNodes = numNodes if numNodes else 1 - timeLimit = timeLimit if timeLimit else 60 - mac_args = ["--smpi_off", f"--numNodes {numNodes}"] - launch_cmd = f"bsub -nnodes {numNodes} -Is -XF -W {timeLimit} -core_isolation 2 " - if (ciRun): - launch_cmd += "-q pdebug " - ats_args.extend(mac_args) + mac_args = [] + if hostname and ("rzgenie" in hostname or "ruby" in hostname): + numNodes = numNodes if numNodes else 2 + timeLimit = timeLimit if timeLimit else 120 + time_limit = 120 + mac_args = [f"--numNodes {numNodes}"] + launch_cmd = f"salloc --exclusive -N {numNodes} -t {timeLimit} " + if (ciRun): + launch_cmd += "-p pdebug " + elif 'blueos_3_ppc64le_ib_p9' in sys_type: + blueOS = True + numNodes = numNodes if numNodes else 1 + timeLimit = timeLimit if timeLimit else 60 + mac_args = ["--smpi_off", f"--numNodes {numNodes}"] + launch_cmd = f"bsub -nnodes {numNodes} -Is -XF -W {timeLimit} -core_isolation 2 " + if (ciRun): + launch_cmd += "-q pdebug " + ats_args.extend(mac_args) #--------------------------------------------------------------------------- # Launch ATS From 02b432fbe73294659a63971860c49ff72d85a0be Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Tue, 29 Oct 2024 09:20:51 -0700 Subject: [PATCH 21/44] Fix issue for running ATS on non-LC machines --- scripts/gitlab/performance_analysis.py.in | 3 ++- scripts/spheral_ats.py.in | 4 ++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/scripts/gitlab/performance_analysis.py.in b/scripts/gitlab/performance_analysis.py.in index 97739b3ab..2b2130d0a 100644 --- a/scripts/gitlab/performance_analysis.py.in +++ b/scripts/gitlab/performance_analysis.py.in @@ -23,10 +23,11 @@ def main(): if (os.path.isdir(args.atsOutput)): atsFile = os.path.join(args.atsOutput, "atsr.py") if (not os.path.exists(atsFile)): - raise Exception(f"File {atsFile} does not exist") + raise Exception("ATS file not found") # Run atsr.py and put values into globals exec(compile(open(atsFile).read(), atsFile, 'exec'), globals()) state = globals()["state"] + if __name__=="__main__": main() diff --git a/scripts/spheral_ats.py.in b/scripts/spheral_ats.py.in index 6625b13f2..6daf8dee2 100644 --- a/scripts/spheral_ats.py.in +++ b/scripts/spheral_ats.py.in @@ -29,7 +29,7 @@ spheral_exe = os.path.join(install_prefix, "spheral") def report_results(output_dir): ats_py = os.path.join(output_dir, "atsr.py") if (not os.path.exists(ats_py)): - raise Exception(f"{ats_py} does not exists") + raise Exception("ats.py does not exists. Tests likely did not run.") exec(compile(open(ats_py).read(), ats_py, 'exec'), globals()) state = globals()["state"] failed_tests = [t for t in state['testlist'] if t['status'] in [FAILED,TIMEDOUT] ] @@ -135,7 +135,7 @@ def main(): launch_cmd = f"salloc --exclusive -N {numNodes} -t {timeLimit} " if (ciRun): launch_cmd += "-p pdebug " - elif 'blueos_3_ppc64le_ib_p9' in sys_type: + elif sys_type and 'blueos_3_ppc64le_ib_p9' in sys_type: blueOS = True numNodes = numNodes if numNodes else 1 timeLimit = timeLimit if timeLimit else 60 From d881ac1a22aa6df77a976b946a8b179b414bdf09 Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Tue, 29 Oct 2024 17:13:08 -0700 Subject: [PATCH 22/44] Added mpi for docker ats tests, improved machine config tests for spheral ats --- Dockerfile | 2 +- scripts/gitlab/performance_analysis.py.in | 41 ++++++++++++++- scripts/spheral_ats.py.in | 39 +++++++------- tests/performance.py.in | 64 +++-------------------- 4 files changed, 69 insertions(+), 77 deletions(-) diff --git a/Dockerfile b/Dockerfile index 9b8dddef5..da9e3b1c4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -75,5 +75,5 @@ RUN make install # Run ATS testing suite. WORKDIR ../install ENV MPLBACKEND=agg -RUN ./spheral-ats --level 99 tests/integration.ats +RUN ./spheral-ats --level 99 --mpiexec /usr/bin/mpirun --npMax $JCXX tests/integration.ats # ----------------------------------------------------------------------------- diff --git a/scripts/gitlab/performance_analysis.py.in b/scripts/gitlab/performance_analysis.py.in index 2b2130d0a..6af7939b8 100644 --- a/scripts/gitlab/performance_analysis.py.in +++ b/scripts/gitlab/performance_analysis.py.in @@ -1,6 +1,6 @@ #!/user/bin/env python3 -import os, sys +import os, sys, shutil, glob import argparse # Location of benchmark data @@ -27,7 +27,46 @@ def main(): # Run atsr.py and put values into globals exec(compile(open(atsFile).read(), atsFile, 'exec'), globals()) state = globals()["state"] + tests = state["testlist"] + for test in tests: + # Retrieve the Caliper file from run + run_dir = test["directory"] + options = test["options"] + cali_file = options["caliper_filename"] + cfile = os.path.join(run_dir, cali_file) + install_config = options["install_config"] + # Grab list of regions and timers + ref_regions = options["regions"] + ref_timers = options["timers"] + # Read Caliper file + r = cr.CaliperReader() + r.read(cfile) + # Get adiak metadata + gls = r.globals + test_name = gls["test_name"] + + # Extract relevant regions and timers + times = {} + for rec in records: + if ("region" in rec): + fname = rec["region"] + if (type(fname) is list): + fname = fname[-1] + if (fname in ref_regions): + if (fname in times): + for t in ref_timers: + times[fname][t] += float(rec[t]) + else: + new_dict = {} + for t in ref_timers: + new_dict.update({t: float(rec[t])}) + times.update({fname: new_dict}) + # Get historical timing data + cali_ref_dir = os.path.join(benchmark_dir, install_config, test_name) + if (not os.path.exists(cali_ref_dir)): + os.mkdir(cali_ref_dir) + shutils.copyfile(cfile, os.path.join(cali_ref_dir, cali_file)) if __name__=="__main__": main() diff --git a/scripts/spheral_ats.py.in b/scripts/spheral_ats.py.in index 6daf8dee2..7f5617dc7 100644 --- a/scripts/spheral_ats.py.in +++ b/scripts/spheral_ats.py.in @@ -24,6 +24,9 @@ test_log_name = "test-logs" ats_exe = os.path.join(install_prefix, ".venv/bin/ats") spheral_exe = os.path.join(install_prefix, "spheral") +toss_machine_names = ["rzgenie", "rzwhippet", "rzhound", "ruby"] +blueos_machine_names = ["rzansel", "lassen"] + #------------------------------------------------------------------------------ # Run ats.py to check results and return the number of failed tests def report_results(output_dir): @@ -126,24 +129,24 @@ def main(): launch_cmd = "" blueOS = False - mac_args = [] - if hostname and ("rzgenie" in hostname or "ruby" in hostname): - numNodes = numNodes if numNodes else 2 - timeLimit = timeLimit if timeLimit else 120 - time_limit = 120 - mac_args = [f"--numNodes {numNodes}"] - launch_cmd = f"salloc --exclusive -N {numNodes} -t {timeLimit} " - if (ciRun): - launch_cmd += "-p pdebug " - elif sys_type and 'blueos_3_ppc64le_ib_p9' in sys_type: - blueOS = True - numNodes = numNodes if numNodes else 1 - timeLimit = timeLimit if timeLimit else 60 - mac_args = ["--smpi_off", f"--numNodes {numNodes}"] - launch_cmd = f"bsub -nnodes {numNodes} -Is -XF -W {timeLimit} -core_isolation 2 " - if (ciRun): - launch_cmd += "-q pdebug " - ats_args.extend(mac_args) + if hostname: + mac_args = [] + if any(x in hostname for x in toss_machine_names): + numNodes = numNodes if numNodes else 2 + timeLimit = timeLimit if timeLimit else 120 + mac_args = [f"--numNodes {numNodes}"] + launch_cmd = f"salloc --exclusive -N {numNodes} -t {timeLimit} " + if (ciRun): + launch_cmd += "-p pdebug " + elif any(x in hostname for x in blueos_machine_names): + blueOS = True + numNodes = numNodes if numNodes else 1 + timeLimit = timeLimit if timeLimit else 60 + mac_args = ["--smpi_off", f"--numNodes {numNodes}"] + launch_cmd = f"bsub -nnodes {numNodes} -Is -XF -W {timeLimit} -core_isolation 2 " + if (ciRun): + launch_cmd += "-q pdebug " + ats_args.extend(mac_args) #--------------------------------------------------------------------------- # Launch ATS diff --git a/tests/performance.py.in b/tests/performance.py.in index d00a10ea5..3114fb0d1 100644 --- a/tests/performance.py.in +++ b/tests/performance.py.in @@ -11,63 +11,11 @@ spheral_sys_arch = "@SPHERAL_SYS_ARCH@" # Current install configuration from Spack spheral_install_config = "@SPHERAL_CONFIGURATION@" -# Function called on exit to do timing comparisons -def compare_times(manager): - for test in manager.testlist: - run_dir = test.directory - cali_file = test.options["caliper_filename"] - cfile = os.path.join(run_dir, test.options["caliper_filename"]) - ref_regions = test.options["regions"] - ref_timers = test.options["timers"] - r = cr.CaliperReader() - r.read(cfile) - # Get the Caliper timing records - records = r.records - # Get the Caliper metadata, including Adiak data - gls = r.globals - # Filter out the commandLine inputs - metadata = {} - for key, val in gls.items(): - if (r.attribute(key).get('adiak.subcategory') != "spheral_input"): - metadata.update({key: val}) - # WIP: Compare timers against historical timers - if (ci_run): - ref_caliper_dir = test.options["ref_cali_dir"] - if (not os.path.exists(ref_caliper_dir)): - os.makedirs(ref_caliper_dir) - new_cali_data = os.path.join(ref_caliper_dir, cfile) - shutil.copyfile(cfile, new_cali_data) - - # Get the number of nodes (ie hardware nodes) - num_comp_nodes = eval(gls["numhosts"]) - # Get the number of MPI ranks - num_ranks = eval(gls["jobsize"]) - # Get the number of SPH nodes - total_internal_nodes = eval(gls["total_internal_nodes"]) - nodes_per_rank = int(total_internal_nodes / num_ranks) - print(f"SPH nodes per rank {nodes_per_rank}") - walltime = eval(metadata["walltime"]) - print(f"Walltime {walltime}") - # Extract current times - times = {} - # Iterate over list of records - for rec in records: - if ("region" in rec): - fname = rec["region"] - if (type(fname) is list): - fname = fname[-1] - if (fname in ref_regions): - if (fname in times): - for t in ref_timers: - times[fname][t] += float(rec[t]) - else: - new_dict = {} - for t in ref_timers: - new_dict.update({t: float(rec[t])}) - times.update({fname: new_dict}) - glue(keep=True) +def add_timer_cmds(cali_name, test_name): + return f"--caliperFilename {cali_name} --adiakData 'test_name: {test_name}, install_config: {spheral_install_config}'" + if ("power" in spheral_sys_arch): num_nodes = 1 num_cores = 40 @@ -104,7 +52,8 @@ ranks = [1, 2, 4] for i, n in enumerate(ranks): test_name = f"{test_name_base}_{i}" caliper_filename = f"{test_name}_{int(time.time())}.cali" - inps = f"{gen_noh_inps} --caliperFilename {caliper_filename} --nRadial {nRadial} --steps 10 --adiakData 'test_name: {test_name}'" + timer_cmds = add_timer_cmds(caliper_filename, test_name) + inps = f"{gen_noh_inps} --nRadial {nRadial} --steps 10 {timer_cmds}" ncores = int(num_nodes*num_cores/n) t = test(script=test_path, clas=inps, label=f"{test_name}", np=ncores, @@ -128,7 +77,8 @@ for i, n in enumerate(npcore): total_sph_nodes = n*num_cores npd = int(np.cbrt(total_sph_nodes)) node_inps = f"--nx {npd} --ny {npd} --nz {npd}" - inps = f"{gen_noh_inps} {node_inps} --caliperFilename {caliper_filename} --steps 3 --adiakData 'test_name: {test_name}'" + timer_cmds = add_timer_cmds(caliper_filename, test_name) + inps = f"{gen_noh_inps} {node_inps} --steps 3 {timer_cmds}" # WIP: Path to benchmark timing data ncores = int(num_cores) t = test(script=test_path, clas=inps, label=f"{test_name}", From 10a41213b92a25726b4828e013d6893817d1a4e1 Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Thu, 31 Oct 2024 11:52:49 -0700 Subject: [PATCH 23/44] Added SpheralConfigs.py.in to remove files configured by CMake, brought bash executable back for spheral_ats --- Dockerfile | 8 +++- cmake/SetupSpheral.cmake | 8 ---- docs/developer/dev/docker_dev_env.rst | 18 +++---- scripts/CMakeLists.txt | 39 +++------------ scripts/atstest.in | 3 ++ ...analysis.py.in => performance_analysis.py} | 6 ++- scripts/spheral-setup-venv.in | 8 ++-- scripts/{spheral_ats.py.in => spheral_ats.py} | 48 ++++++++++++------- src/SimulationControl/CMakeLists.txt | 17 +++++++ src/SimulationControl/SpheralConfigs.py.in | 25 ++++++++++ tests/{performance.py.in => performance.py} | 10 ++-- tests/unit/CMakeLists.txt | 5 -- .../{testTimers.py.in => testTimers.py} | 7 +-- 13 files changed, 118 insertions(+), 84 deletions(-) create mode 100644 scripts/atstest.in rename scripts/gitlab/{performance_analysis.py.in => performance_analysis.py} (96%) rename scripts/{spheral_ats.py.in => spheral_ats.py} (85%) create mode 100644 src/SimulationControl/SpheralConfigs.py.in rename tests/{performance.py.in => performance.py} (89%) rename tests/unit/Utilities/{testTimers.py.in => testTimers.py} (94%) diff --git a/Dockerfile b/Dockerfile index da9e3b1c4..32db7e2ba 100644 --- a/Dockerfile +++ b/Dockerfile @@ -75,5 +75,11 @@ RUN make install # Run ATS testing suite. WORKDIR ../install ENV MPLBACKEND=agg -RUN ./spheral-ats --level 99 --mpiexec /usr/bin/mpirun --npMax $JCXX tests/integration.ats +# ATS currently does not allow us to run in parallel for regular linux machines +# If it did, we would need some of the following commands +#RUN export OMP_NUM_THREADS=1 +#RUN export MACHINE_TYPE="winParallel" +#RUN ./spheral-ats --level 99 --mpiexe mpiexec --npMax $JCXX tests/integration.ats +# Instead, we will just run it normally +RUN ./spheral-ats --level 99 test/integration.ats # ----------------------------------------------------------------------------- diff --git a/cmake/SetupSpheral.cmake b/cmake/SetupSpheral.cmake index 30c88c387..3738456e6 100644 --- a/cmake/SetupSpheral.cmake +++ b/cmake/SetupSpheral.cmake @@ -153,14 +153,6 @@ endif() # Build C++ tests and install tests to install directory #------------------------------------------------------------------------------- if (ENABLE_TESTS) - configure_file( - "${SPHERAL_ROOT_DIR}/tests/performance.py.in" - "${CMAKE_BINARY_DIR}/tests/performance.py" - ) - install(FILES - "${CMAKE_BINARY_DIR}/tests/performance.py" - DESTINATION "${SPHERAL_TEST_INSTALL_PREFIX}" - ) install(DIRECTORY ${SPHERAL_ROOT_DIR}/tests/ USE_SOURCE_PERMISSIONS DESTINATION "${SPHERAL_TEST_INSTALL_PREFIX}" diff --git a/docs/developer/dev/docker_dev_env.rst b/docs/developer/dev/docker_dev_env.rst index 3618b4125..625732622 100644 --- a/docs/developer/dev/docker_dev_env.rst +++ b/docs/developer/dev/docker_dev_env.rst @@ -10,9 +10,9 @@ on local machines. Creating a Dev Environment =========================== -We will use ``docker dev create`` with our spheral docker image and a -local repository. This will allow us to skip setting up a linux system with -external packages, gives us pre-built TPLs and allows us to edit a cloned +We will use ``docker dev create`` with our Spheral docker image and a +local repository. This will allow us to skip setting up a linux system with +external packages, gives us pre-built TPLs and allows us to edit a cloned repository from our local machines IDE/text editor.bash:: > rm /compose-dev.yaml @@ -46,18 +46,18 @@ repository from our local machines IDE/text editor.bash:: Connecting to a Dev Container ============================= -Once the continaer has ben started you can connect directly through the terminal +Once the container has been started, you can connect directly through the terminal with the **Container** name (**NOT** the **Dev Environment** name).:: > docker exec -it spheral-recursing_darwin-app-1 /bin/bash root@671dab5d0b00:/home/spheral/workspace/build_docker-gcc/install# This drops you into the install location of the ``spheral@develop`` build from -github, this is a fully installed version of the latest ``develop`` spheral. +github, this is a fully installed version of the latest ``develop`` Spheral. -.. tip:: +.. tip:: VSCode & Docker Desktop: - * Open **Docker Desktop** and navigate to the **Dev Environment** tab. + * Open **Docker Desktop** and navigate to the **Dev Environment** tab. * Find the container name and select **OPEN IN VSCODE**. @@ -68,8 +68,8 @@ Development Work Your local Spheral repo is mounted from your local filesystem. You can develop directly from your IDE or text editor of choice. Then you can compile and run from within the container itself. -- The local Spheral repository will be mounted in the container at ``/com.docker.devenvironments.code/``. +- The local Spheral repository will be mounted in the container at ``/com.docker.devenvironments.code/``. -- There already exists a full build and install of Spheral at ``develop`` in ``/home/spheral/workspace/build_docker-gcc/install``. +- There already exists a full build and install of Spheral at ``develop`` in ``/home/spheral/workspace/build_docker-gcc/install``. - An updated host config file can be found at ``/home/spheral/wokspace/docker-gcc.cmake``. diff --git a/scripts/CMakeLists.txt b/scripts/CMakeLists.txt index 7ebf938f2..19001e771 100644 --- a/scripts/CMakeLists.txt +++ b/scripts/CMakeLists.txt @@ -8,30 +8,6 @@ if (NOT ENABLE_CXXONLY) # our virtual env in spheral-setup-venv.sh string(REGEX REPLACE "lib\/python3.9\/site-packages\/?[A-Za-z]*:" ";" VIRTUALENV_PYTHONPATH_COPY "${SPACK_PYTHONPATH}:") - set(SPHERAL_ATS_BUILD_CONFIG_ARGS ) - - if (CMAKE_BUILD_TYPE STREQUAL "Debug") - list(APPEND SPHERAL_ATS_BUILD_CONFIG_ARGS "--filter=\"level<100\"") - endif() - - if (NOT ENABLE_MPI) - list(APPEND SPHERAL_ATS_BUILD_CONFIG_ARGS "--filter=\"np<2\"") - endif() - - if (NOT SPHERAL_ENABLE_FSISPH) - list(APPEND SPHERAL_ATS_BUILD_CONFIG_ARGS "--filter=\"not fsisph\"") - endif() - - if (NOT SPHERAL_ENABLE_GSPH) - list(APPEND SPHERAL_ATS_BUILD_CONFIG_ARGS "--filter=\"not gsph\"") - endif() - - if (NOT SPHERAL_ENABLE_SVPH) - list(APPEND SPHERAL_ATS_BUILD_CONFIG_ARGS "--filter=\"not svph\"") - endif() - - string(REPLACE ";" " " SPHERAL_ATS_BUILD_CONFIG_ARGS_STRING "${SPHERAL_ATS_BUILD_CONFIG_ARGS}") - configure_file( "${CMAKE_CURRENT_SOURCE_DIR}/spheral-setup-venv.in" "${CMAKE_CURRENT_BINARY_DIR}/spheral-setup-venv.sh" @@ -43,20 +19,16 @@ if (NOT ENABLE_CXXONLY) ) configure_file( - "${CMAKE_CURRENT_SOURCE_DIR}/spheral_ats.py.in" - "${CMAKE_CURRENT_BINARY_DIR}/spheral_ats.py" - ) - - configure_file( - "${CMAKE_CURRENT_SOURCE_DIR}/gitlab/performance_analysis.py.in" - "${CMAKE_CURRENT_BINARY_DIR}/performance_analysis.py" + "${CMAKE_CURRENT_SOURCE_DIR}/atstest.in" + "${CMAKE_CURRENT_BINARY_DIR}/atstest.sh" ) install(FILES "${CMAKE_CURRENT_BINARY_DIR}/spheral-setup-venv.sh" "${CMAKE_CURRENT_BINARY_DIR}/spheral-env.sh" - "${CMAKE_CURRENT_BINARY_DIR}/spheral_ats.py" - "${CMAKE_CURRENT_BINARY_DIR}/performance_analysis.py" + "${CMAKE_CURRENT_BINARY_DIR}/atstest.sh" + "${CMAKE_CURRENT_SOURCE_DIR}/spheral_ats.py" + "${CMAKE_CURRENT_SOURCE_DIR}/gitlab/performance_analysis.py" "${CMAKE_CURRENT_SOURCE_DIR}/spheralutils.py" DESTINATION "${CMAKE_INSTALL_PREFIX}/scripts" ) @@ -66,6 +38,7 @@ if (NOT ENABLE_CXXONLY) 'Spheral>')" ) + # Copy over all of the python TPL files, with a few exceptions foreach(_venv_dir ${VIRTUALENV_PYTHONPATH_COPY}) if(NOT ${_venv_dir} MATCHES "sphinx") install(DIRECTORY ${_venv_dir} diff --git a/scripts/atstest.in b/scripts/atstest.in new file mode 100644 index 000000000..04aa78013 --- /dev/null +++ b/scripts/atstest.in @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +@CMAKE_INSTALL_PREFIX@/spheral @CMAKE_INSTALL_PREFIX@/scripts/spheral_ats.py "$@" \ No newline at end of file diff --git a/scripts/gitlab/performance_analysis.py.in b/scripts/gitlab/performance_analysis.py similarity index 96% rename from scripts/gitlab/performance_analysis.py.in rename to scripts/gitlab/performance_analysis.py index 6af7939b8..8f70cadd5 100644 --- a/scripts/gitlab/performance_analysis.py.in +++ b/scripts/gitlab/performance_analysis.py @@ -3,11 +3,13 @@ import os, sys, shutil, glob import argparse +import SpheralConfigs + # Location of benchmark data benchmark_dir = "/usr/gapps/Spheral/benchmarks" -caliper_loc = "@CONFIG_CALIPER_DIR@" -sys.path.append(os.path.join(caliper_loc, "lib64/caliper")) +caliper_loc = SpheralConfigs.caliper_module_path() +sys.path.append(caliper_loc) import caliperreader as cr def main(): diff --git a/scripts/spheral-setup-venv.in b/scripts/spheral-setup-venv.in index af82efa4b..2991641e4 100644 --- a/scripts/spheral-setup-venv.in +++ b/scripts/spheral-setup-venv.in @@ -5,7 +5,7 @@ cp @SPHERAL_SITE_PACKAGES_PATH@/Spheral.pth .venv/@SPHERAL_SITE_PACKAGES_PATH@/ mkdir -p .venv/@SPHERAL_SITE_PACKAGES_PATH@/Spheral cd @CMAKE_INSTALL_PREFIX@/.venv/@SPHERAL_SITE_PACKAGES_PATH@/Spheral cp --symbolic-link @CMAKE_INSTALL_PREFIX@/@SPHERAL_SITE_PACKAGES_PATH@/Spheral/* . > /dev/null 2>&1 -cd - > /dev/null +cd - > /dev/null # We need to reconfigure ATS to use our virtual env python otherwise ats will not be able to launch properly. echo "Reconfigure ATS executing python to virtual env python..." @@ -15,10 +15,10 @@ sed -i 's|XXXXXX|\x27\x27\x27exec\x27 @CMAKE_INSTALL_PREFIX@/.venv/bin/python "$ echo "Creating spheral symlink to spheral-env script ..." cd @CMAKE_INSTALL_PREFIX@ chmod u+x scripts/spheral-env.sh -chmod u+x scripts/spheral_ats.py +chmod u+x scripts/atstest.sh cp --symbolic-link scripts/spheral-env.sh spheral &> /dev/null -cp --symbolic-link scripts/spheral_ats.py spheral-ats &> /dev/null -cd - > /dev/null +cp --symbolic-link scripts/atstest.sh spheral-ats &> /dev/null +cd - > /dev/null echo "Byte-compiling packages in install path ..." @CMAKE_INSTALL_PREFIX@/spheral -m compileall -q @CMAKE_INSTALL_PREFIX@/.venv/@SPHERAL_SITE_PACKAGES_PATH@ diff --git a/scripts/spheral_ats.py.in b/scripts/spheral_ats.py similarity index 85% rename from scripts/spheral_ats.py.in rename to scripts/spheral_ats.py index 7f5617dc7..84daf6a31 100644 --- a/scripts/spheral_ats.py.in +++ b/scripts/spheral_ats.py @@ -1,31 +1,29 @@ -#!@CMAKE_INSTALL_PREFIX@/spheral +#!/usr/bin/env python3 import os, time, sys import argparse import ats.util.generic_utils as ats_utils +import SpheralConfigs +import mpi # This is a wrapper for running Spheral through ATS -# Find spheralutils.py -install_prefix = "@CMAKE_INSTALL_PREFIX@" -sys.path.append(os.path.join(install_prefix, "scripts")) -from spheralutils import sexe - -# Apply filters set during install -install_filters = '''@SPHERAL_ATS_BUILD_CONFIG_ARGS_STRING@''' - # Options for running CI # If the number of failed tests exceeds this value, ATS is not rerun max_test_failures = 10 # Number of times to rerun the ATS tests max_reruns = 1 -test_log_name = "test-logs" +# Use current path to find spheralutils module +cur_dir = os.path.dirname(__file__) +# Set current directory to install prefix +if (os.path.islink(__file__)): + cur_dir = os.path.join(cur_dir, os.readlink(__file__)) +install_prefix = os.path.join(cur_dir, "..") ats_exe = os.path.join(install_prefix, ".venv/bin/ats") spheral_exe = os.path.join(install_prefix, "spheral") - -toss_machine_names = ["rzgenie", "rzwhippet", "rzhound", "ruby"] -blueos_machine_names = ["rzansel", "lassen"] +sys.path.append(cur_dir) +from spheralutils import sexe #------------------------------------------------------------------------------ # Run ats.py to check results and return the number of failed tests @@ -46,7 +44,6 @@ def report_results(output_dir): return 0 #------------------------------------------------------------------------------ - # Run the tests and check if any failed def run_and_report(run_command, ci_output, num_runs): if (num_runs > max_reruns): @@ -82,22 +79,41 @@ def run_and_report(run_command, ci_output, num_runs): print("WARNING: Test failure, rerunning ATS") run_and_report(rerun_command, ci_output, num_runs + 1) +#------------------------------------------------------------------------------ +# Add any build specific ATS arguments +def install_ats_args(): + install_args = [] + if (SpheralConfigs.build_type() == "Debug"): + install_args.append('--level 99') + if (mpi.is_fake_mpi()): + install_args.append('--filter="np<2"') + comp_configs = SpheralConfigs.component_configs() + test_comps = ["FSISPH", "GSPH", "SVPH"] + for ts in test_comps: + if ts not in comp_configs: + install_args.append(f'--filter="not {ts.lower()}"') + return install_args + #--------------------------------------------------------------------------- # Main routine #--------------------------------------------------------------------------- def main(): + test_log_name = "test-logs" + toss_machine_names = ["rzgenie", "rzwhippet", "rzhound", "ruby"] + blueos_machine_names = ["rzansel", "lassen"] temp_uname = os.uname() hostname = temp_uname[1] sys_type = os.getenv("SYS_TYPE") # Use ATS to for some machine specific functions if "MACHINE_TYPE" not in os.environ: ats_utils.set_machine_type_based_on_sys_type() + #--------------------------------------------------------------------------- # Setup argument parser #--------------------------------------------------------------------------- parser = argparse.ArgumentParser(allow_abbrev=False, usage=""" - ./spheral-ats --numNodes 2 tests/integration.ats --filter="level<100" + ./spheral spheral_ats.py --numNodes 2 tests/integration.ats --filter="level<100" """, description=""" Launches and runs Spheral using the ATS system. @@ -122,7 +138,7 @@ def main(): #--------------------------------------------------------------------------- # Setup machine info classes #--------------------------------------------------------------------------- - ats_args = [install_filters] + ats_args = install_ats_args() numNodes = options.numNodes timeLimit = options.timeLimit ciRun = False if options.perfTest else True diff --git a/src/SimulationControl/CMakeLists.txt b/src/SimulationControl/CMakeLists.txt index 32f7ca438..578733220 100644 --- a/src/SimulationControl/CMakeLists.txt +++ b/src/SimulationControl/CMakeLists.txt @@ -25,6 +25,22 @@ configure_file( ${CMAKE_CURRENT_SOURCE_DIR}/Spheral_banner.py ${CMAKE_CURRENT_BINARY_DIR}/Spheral_banner.py) +# Configure SpheralConfigs.py.in +set(SPHERAL_COMP_CONFIG) +if (SPHERAL_ENABLE_FSISPH) + list(APPEND SPHERAL_COMP_CONFIG "\"FSISPH\"") +endif() +if (SPHERAL_ENABLE_GSPH) + list(APPEND SPHERAL_COMP_CONFIG "\"GSPH\"") +endif() +if (SPHERAL_ENABLE_SVPH) + list(APPEND SPHERAL_COMP_CONFIG "\"SVPH\"") +endif() +string(REPLACE ";" ", " SPHERAL_COMP_CONFIG_STR "${SPHERAL_COMP_CONFIG}") +configure_file( + ${CMAKE_CURRENT_SOURCE_DIR}/SpheralConfigs.py.in + ${CMAKE_CURRENT_BINARY_DIR}/SpheralConfigs.py) + set(_dims 1) if(ENABLE_2D) list(APPEND _dims 2) @@ -80,4 +96,5 @@ spheral_install_python_files( CaptureStdout.py ${CMAKE_CURRENT_BINARY_DIR}/Spheral_banner.py ${CMAKE_CURRENT_BINARY_DIR}/spheralDimensions.py + ${CMAKE_CURRENT_BINARY_DIR}/SpheralConfigs.py ) diff --git a/src/SimulationControl/SpheralConfigs.py.in b/src/SimulationControl/SpheralConfigs.py.in new file mode 100644 index 000000000..487fb9a8a --- /dev/null +++ b/src/SimulationControl/SpheralConfigs.py.in @@ -0,0 +1,25 @@ + +''' +This module allows access to the Spheral build configuration information +''' + +import sys, os + +def build_type(): + return "@CMAKE_BUILD_TYPE@" + +def sys_arch(): + return "@SPHERAL_SYS_ARCH@" + +def config(): + return "@SPHERAL_CONFIGURATION@" + +def component_configs(): + return [@SPHERAL_COMP_CONFIG_STR@] + +def caliper_module_path(): + caliper_loc = "@CONFIG_CALIPER_DIR@" + if (caliper_loc and os.path.exists(caliper_loc)): + return os.path.join(caliper_loc, "lib64/caliper") + else: + return None diff --git a/tests/performance.py.in b/tests/performance.py similarity index 89% rename from tests/performance.py.in rename to tests/performance.py index 3114fb0d1..0d4e48406 100644 --- a/tests/performance.py.in +++ b/tests/performance.py @@ -5,11 +5,15 @@ import sys, shutil, os, time import numpy as np +cur_dir = os.path.dirname(__file__) +spheral_path = os.path.join(cur_dir, "../lib/python3.9/site-packages/Spheral") +sys.path.append(spheral_path) +import SpheralConfigs # Current system architecture from Spack -spheral_sys_arch = "@SPHERAL_SYS_ARCH@" +spheral_sys_arch = SpheralConfigs.sys_arch() # Current install configuration from Spack -spheral_install_config = "@SPHERAL_CONFIGURATION@" +spheral_install_config = SpheralConfigs.config() glue(keep=True) @@ -24,7 +28,7 @@ def add_timer_cmds(cali_name, test_name): num_cores = 36 # NOH tests -test_dir = "@SPHERAL_TEST_INSTALL_PREFIX@/functional/Hydro/Noh" +test_dir = os.path.join(SpheralConfigs.test_install_path(), "functional/Hydro/Noh") # Select which timing regions to post-process regions = ["CheapRK2", diff --git a/tests/unit/CMakeLists.txt b/tests/unit/CMakeLists.txt index d0e8fce37..4f009ae8a 100644 --- a/tests/unit/CMakeLists.txt +++ b/tests/unit/CMakeLists.txt @@ -1,6 +1 @@ add_subdirectory(CXXTests) - -configure_file( - "${CMAKE_CURRENT_SOURCE_DIR}/Utilities/testTimers.py.in" - "${SPHERAL_TEST_INSTALL_PREFIX}/unit/Utilities/testTimers.py" - ) diff --git a/tests/unit/Utilities/testTimers.py.in b/tests/unit/Utilities/testTimers.py similarity index 94% rename from tests/unit/Utilities/testTimers.py.in rename to tests/unit/Utilities/testTimers.py index 1ab566620..32ff49920 100644 --- a/tests/unit/Utilities/testTimers.py.in +++ b/tests/unit/Utilities/testTimers.py @@ -10,6 +10,7 @@ from SpheralOptionParser import * from SpheralUtilities import * import mpi +import SpheralConfigs import sys, os, time @@ -50,10 +51,10 @@ adiak_fini() TimerMgr.fini() mpi.barrier() - caliper_loc = "@CONFIG_CALIPER_DIR@" - sys.path.append(os.path.join(caliper_loc, "lib64/caliper")) - if (not os.path.exists(caliper_file)): + caliper_loc = SpheralConfigs.caliper_module_path() + if (not caliper_loc): raise FileNotFoundError("Caliper file not found") + sys.path.append(caliper_loc) import caliperreader as cr r = cr.CaliperReader() r.read(caliper_file) From 0db33f4298011b0108642fe4b428995cc1cbda46 Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Thu, 31 Oct 2024 14:52:21 -0700 Subject: [PATCH 24/44] Add line to docs about valgrind, fix typo in dockerfile --- Dockerfile | 4 +++- docs/developer/dev/diagnostic_tools.rst | 19 ++++++++++++++----- 2 files changed, 17 insertions(+), 6 deletions(-) diff --git a/Dockerfile b/Dockerfile index 32db7e2ba..3c8e9bad0 100644 --- a/Dockerfile +++ b/Dockerfile @@ -75,11 +75,13 @@ RUN make install # Run ATS testing suite. WORKDIR ../install ENV MPLBACKEND=agg + # ATS currently does not allow us to run in parallel for regular linux machines # If it did, we would need some of the following commands #RUN export OMP_NUM_THREADS=1 #RUN export MACHINE_TYPE="winParallel" #RUN ./spheral-ats --level 99 --mpiexe mpiexec --npMax $JCXX tests/integration.ats + # Instead, we will just run it normally -RUN ./spheral-ats --level 99 test/integration.ats +RUN ./spheral-ats --level 99 tests/integration.ats # ----------------------------------------------------------------------------- diff --git a/docs/developer/dev/diagnostic_tools.rst b/docs/developer/dev/diagnostic_tools.rst index a607db329..782bfc72f 100644 --- a/docs/developer/dev/diagnostic_tools.rst +++ b/docs/developer/dev/diagnostic_tools.rst @@ -1,15 +1,24 @@ -Code Performance Diagnostics -############################ +Code Debugging and Diagnostics +############################## -Spheral uses Caliper to preform code diagnostics, such as timing. To enable this functionality in the code, Spheral needs to be configured with ``ENABLE_TIMER=ON``. Otherwise, the timing regions are no-ops for improved preformance. +Valgrind +======== + +We advise using Valgrind to check memory leaks when doing development on Spheral. +When using Valgrind to check Spheral, be sure to use the provided suppression file :: - ./scripts/devtools/host-config-build.py -.cmake -DENABLE_TIMER=ON + valgrind --suppressions=./scripts/devtools/valgrind_python_suppression ./spheral Using Caliper ============= +Spheral uses Caliper to preform code diagnostics, such as timing. To enable this functionality in the code, Spheral needs to be configured with ``ENABLE_TIMER=ON``. Otherwise, the timing regions are no-ops for improved preformance. +:: + + ./scripts/devtools/host-config-build.py -.cmake -DENABLE_TIMER=ON + Caliper is configured and started through the ``cali::ConfigManager``. The ``cali::ConfigManager`` is wrapped in a ``TimerMgr`` singleton class, which has a python interface. @@ -99,7 +108,7 @@ Adiak metadata can be added inside python code using the following function call .. _manual_caliper: Starting Caliper Manually -======================== +========================= As mentioned above, the Caliper timing manager is normally configured and started in the ``commandLine()`` routine. However, Caliper can be directly configured and started through the python interface, if desired. This can be done by putting the following into the python file: :: From 4e1d42da15db17b0d8dec176bfacd50c632184c6 Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Wed, 6 Nov 2024 13:59:06 -0800 Subject: [PATCH 25/44] Remove pdebug queue setting in spheral-ats --- scripts/spheral_ats.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/scripts/spheral_ats.py b/scripts/spheral_ats.py index 84daf6a31..33c128606 100644 --- a/scripts/spheral_ats.py +++ b/scripts/spheral_ats.py @@ -152,16 +152,12 @@ def main(): timeLimit = timeLimit if timeLimit else 120 mac_args = [f"--numNodes {numNodes}"] launch_cmd = f"salloc --exclusive -N {numNodes} -t {timeLimit} " - if (ciRun): - launch_cmd += "-p pdebug " elif any(x in hostname for x in blueos_machine_names): blueOS = True numNodes = numNodes if numNodes else 1 timeLimit = timeLimit if timeLimit else 60 mac_args = ["--smpi_off", f"--numNodes {numNodes}"] launch_cmd = f"bsub -nnodes {numNodes} -Is -XF -W {timeLimit} -core_isolation 2 " - if (ciRun): - launch_cmd += "-q pdebug " ats_args.extend(mac_args) #--------------------------------------------------------------------------- From d4c9bf729625d8f543340a083bdc8b556e828451 Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Wed, 6 Nov 2024 16:10:04 -0800 Subject: [PATCH 26/44] Adding -l options to spheral-ats to hopefully remove stty error messages --- scripts/atstest.in | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/atstest.in b/scripts/atstest.in index 04aa78013..ebc94a084 100644 --- a/scripts/atstest.in +++ b/scripts/atstest.in @@ -1,3 +1,4 @@ #!/usr/bin/env bash -@CMAKE_INSTALL_PREFIX@/spheral @CMAKE_INSTALL_PREFIX@/scripts/spheral_ats.py "$@" \ No newline at end of file +# Running with exec bash -l prevents blueOS from printing stty error messages +exec bash -l @CMAKE_INSTALL_PREFIX@/spheral @CMAKE_INSTALL_PREFIX@/scripts/spheral_ats.py "$@" From 514f6a6f88d0add7d044915a94365246b426a6d6 Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Thu, 7 Nov 2024 13:52:13 -0800 Subject: [PATCH 27/44] Fix issue where allocations are scheduled inside of allocations, have spheral_ats check if we are inside an allocation --- .gitlab/machines.yml | 9 ++++----- .gitlab/scripts.yml | 20 +++++++++---------- scripts/atstest.in | 3 +-- scripts/lc/install-from-dev-pkg.sh | 8 +++----- scripts/spheral_ats.py | 31 ++++++++++++++++++++++-------- 5 files changed, 41 insertions(+), 30 deletions(-) diff --git a/.gitlab/machines.yml b/.gitlab/machines.yml index 56efee2f4..d7b68b90a 100644 --- a/.gitlab/machines.yml +++ b/.gitlab/machines.yml @@ -6,10 +6,8 @@ - ruby - shell variables: + SCHEDULER_PARAMETERS: "--res=ci --exclusive=user --deadline=now+1hour -N 2 -t 60" HOSTNAME: 'ruby' - PARTITION: pdebug - BUILD_ALLOC: srun -N 1 -c 36 -p pdebug -t 60 - CLEAN_ALLOC: srun -n 20 extends: [.on_toss_4_x86] .on_lassen: @@ -17,9 +15,10 @@ - lassen - shell variables: + SCHEDULER_PARAMETERS: "-nnodes 1 -W 60 -q pci -alloc_flags atsdisable" + LSB_JOB_STARTER: [ "ENVIRONMENT=BATCH" ] + ENVIRONMENT: "BATCH" HOSTNAME: 'lassen' - BUILD_ALLOC: lalloc 1 -W 60 - CLEAN_ALLOC: lalloc 1 lrun -n 20 LC_MODULES: "cuda/11.1.0" extends: [.on_blueos_3_ppc64] diff --git a/.gitlab/scripts.yml b/.gitlab/scripts.yml index c5ad9f9ab..f05759655 100644 --- a/.gitlab/scripts.yml +++ b/.gitlab/scripts.yml @@ -14,7 +14,7 @@ - cd $CI_BUILD_DIR - echo $SPEC - - $BUILD_ALLOC ./$SCRIPT_DIR/gitlab/build_and_install.py --spec="$SPEC" --tpls-only + - ./$SCRIPT_DIR/gitlab/build_and_install.py --spec="$SPEC" --tpls-only artifacts: paths: - ci-dir.txt @@ -27,7 +27,7 @@ script: - CI_BUILD_DIR=$(cat ci-dir.txt) - cd $CI_BUILD_DIR && cat job-name.txt - - $BUILD_ALLOC ./$SCRIPT_DIR/devtools/host-config-build.py --host-config gitlab.cmake --build $EXTRA_CMAKE_ARGS + - ./$SCRIPT_DIR/devtools/host-config-build.py --host-config gitlab.cmake --build $EXTRA_CMAKE_ARGS artifacts: paths: - ci-dir.txt @@ -55,7 +55,7 @@ - CI_BUILD_DIR=$(cat ci-dir.txt) - cd $CI_BUILD_DIR && cat job-name.txt - - ./build_gitlab/install/spheral-ats ./build_gitlab/install/$ATS_FILE || exit_code=$? + - ./build_gitlab/install/spheral-ats --ciRun ./build_gitlab/install/$ATS_FILE || exit_code=$? - cp -r test-logs $CI_PROJECT_DIR - exit $exit_code artifacts: @@ -76,7 +76,7 @@ - ml load mpifileutils - cd $SPHERAL_BUILDS_DIR - - $CLEAN_ALLOC drm $CI_BUILD_DIR/.. + - drm $CI_BUILD_DIR/.. # ------------------------------------------------------------------------------ # Shared TPL scripts. @@ -87,7 +87,7 @@ variables: GIT_STRATEGY: none script: - - $BUILD_ALLOC ./$SCRIPT_DIR/devtools/tpl-manager.py --spec-list="$SCRIPT_DIR/devtools/spec-list.json" --spheral-spack-dir=$UPSTREAM_DIR + - ./$SCRIPT_DIR/devtools/tpl-manager.py --spec-list="$SCRIPT_DIR/devtools/spec-list.json" --spheral-spack-dir=$UPSTREAM_DIR .toss_update_permissions: stage: update_permissions @@ -95,7 +95,7 @@ GIT_STRATEGY: none script: - ml load mpifileutils - - srun -N 1 -p $PARTITION -n 20 -t 10 dchmod --mode go+rx $UPSTREAM_DIR + - dchmod --mode go+rx $UPSTREAM_DIR # ------------------------------------------------------------------------------ # Production Installation scripts @@ -121,7 +121,7 @@ - INSTALL_DIR=/usr/gapps/Spheral/$SYS_TYPE/spheral-$SPHERAL_REV_STR - DEV_PKG_NAME=$SYS_TYPE-spheral-dev-pkg-$SPHERAL_REV_STR - - env SPHERAL_REV_STR=$SPHERAL_REV_STR INSTALL_DIR=$INSTALL_DIR SPEC=$SPEC SPACK_PKG_NAME=$SPACK_PKG_NAME BUILD_ALLOC="$BUILD_ALLOC" SCRIPT_DIR=$SCRIPT_DIR + - env SPHERAL_REV_STR=$SPHERAL_REV_STR INSTALL_DIR=$INSTALL_DIR SPEC=$SPEC SPACK_PKG_NAME=$SPACK_PKG_NAME SCRIPT_DIR=$SCRIPT_DIR bash ./$SCRIPT_DIR/lc/generate-buildcache.sh - echo $INSTALL_DIR &> install-dir.txt @@ -145,7 +145,7 @@ - tar -xzf $DEV_PKG_NAME.tar.gz - cd $DEV_PKG_NAME - - env INSTALL_DIR=$INSTALL_DIR SPEC=$SPEC SPACK_PKG_NAME=$SPACK_PKG_NAME BUILD_ALLOC="$BUILD_ALLOC" SCRIPT_DIR=$SCRIPT_DIR + - env INSTALL_DIR=$INSTALL_DIR SPEC=$SPEC SPACK_PKG_NAME=$SPACK_PKG_NAME SCRIPT_DIR=$SCRIPT_DIR bash ./$SCRIPT_DIR/lc/install-from-dev-pkg.sh artifacts: @@ -164,7 +164,7 @@ - chmod go+r /usr/gapps/Spheral/modulefiles/Spheral/"$ALIAS".lua - ml load mpifileutils - - srun -N 1 -p $PARTITION -n 20 -t 10 dchmod --mode go+rx $INSTALL_DIR + - dchmod --mode go+rx $INSTALL_DIR - ln -sfn $INSTALL_DIR /usr/gapps/Spheral/$SYS_TYPE/$ALIAS @@ -187,7 +187,7 @@ - echo $DIR_LIST - ml load mpifileutils - - if [[ $DIR_LIST ]]; then $CLEAN_ALLOC drm $DIR_LIST; else echo "No directories to remove at this time."; fi + - if [[ $DIR_LIST ]]; then drm $DIR_LIST; else echo "No directories to remove at this time."; fi when: always .merge_pr_rule: diff --git a/scripts/atstest.in b/scripts/atstest.in index ebc94a084..e1262dccb 100644 --- a/scripts/atstest.in +++ b/scripts/atstest.in @@ -1,4 +1,3 @@ #!/usr/bin/env bash -# Running with exec bash -l prevents blueOS from printing stty error messages -exec bash -l @CMAKE_INSTALL_PREFIX@/spheral @CMAKE_INSTALL_PREFIX@/scripts/spheral_ats.py "$@" +@CMAKE_INSTALL_PREFIX@/spheral @CMAKE_INSTALL_PREFIX@/scripts/spheral_ats.py "$@" diff --git a/scripts/lc/install-from-dev-pkg.sh b/scripts/lc/install-from-dev-pkg.sh index b4b6a1c6f..d109da586 100644 --- a/scripts/lc/install-from-dev-pkg.sh +++ b/scripts/lc/install-from-dev-pkg.sh @@ -2,7 +2,6 @@ trap 'echo "# $BASH_COMMAND"' DEBUG SPACK_PKG_NAME=${SPACK_PKG_NAME:-'spheral'} SPACK_URL=${SPACK_URL:-'https://github.com/spack/spack'} -BUILD_ALLOC=${BUILD_ALLOC} SCRIPT_DIR=${SCRIPT_DIR:-'scripts'} if [[ -z "${SPEC}" ]]; then @@ -20,7 +19,6 @@ echo $SPEC echo $SPACK_URL echo $INSTALL_DIR echo $SCRIPT_DIR -echo $BUILD_ALLOC rm -rf $INSTALL_DIR mkdir -p $INSTALL_DIR @@ -36,12 +34,12 @@ spack mirror add --unsigned spheral-mirror $PWD/resources/mirror spack mirror add --unsigned spheral-cache $PWD/resources spack buildcache update-index $PWD/resources/mirror -$BUILD_ALLOC spack install --fresh --deprecated --no-check-signature --only dependencies $SPACK_PKG_NAME@develop%$SPEC +spack install --fresh --deprecated --no-check-signature --only dependencies $SPACK_PKG_NAME@develop%$SPEC -$BUILD_ALLOC ./$SCRIPT_DIR/devtools/tpl-manager.py --spack-url $SPACK_URL --no-upstream --spheral-spack-dir $INSTALL_DIR/spheral-spack-tpls --spec $SPEC +./$SCRIPT_DIR/devtools/tpl-manager.py --spack-url $SPACK_URL --no-upstream --spheral-spack-dir $INSTALL_DIR/spheral-spack-tpls --spec $SPEC HOST_CONFIG_FILE=$(ls -t | grep -E "*\.cmake" | head -1) -$BUILD_ALLOC ./$SCRIPT_DIR/devtools/host-config-build.py --host-config $HOST_CONFIG_FILE -i $INSTALL_DIR --build --no-clean +./$SCRIPT_DIR/devtools/host-config-build.py --host-config $HOST_CONFIG_FILE -i $INSTALL_DIR --build --no-clean diff --git a/scripts/spheral_ats.py b/scripts/spheral_ats.py index 33c128606..e83509cb2 100644 --- a/scripts/spheral_ats.py +++ b/scripts/spheral_ats.py @@ -126,6 +126,8 @@ def main(): parser.add_argument("--timeLimit", type=int, default=None, help="Time limit for allocation.") + parser.add_argument("--ciRun", action="store_true", + help="Option to only be used by the CI") parser.add_argument("--perfTest", action="store_true", help="Turn on if doing a performance test.") parser.add_argument("--atsHelp", action="store_true", @@ -141,9 +143,12 @@ def main(): ats_args = install_ats_args() numNodes = options.numNodes timeLimit = options.timeLimit - ciRun = False if options.perfTest else True launch_cmd = "" blueOS = False + # These are environment variables to suggest we are in an allocation already + # NOTE: CI runs should already be in an allocation so the launch cmd is + # unused in those cases + inAllocVars = [] if hostname: mac_args = [] @@ -151,11 +156,15 @@ def main(): numNodes = numNodes if numNodes else 2 timeLimit = timeLimit if timeLimit else 120 mac_args = [f"--numNodes {numNodes}"] + inAllocVars = ["SLURM_JOB_NUM_NODES", "SLURM_NNODES"] launch_cmd = f"salloc --exclusive -N {numNodes} -t {timeLimit} " + if (options.ciRun): + launch_cmd += "-p pdebug " elif any(x in hostname for x in blueos_machine_names): blueOS = True numNodes = numNodes if numNodes else 1 timeLimit = timeLimit if timeLimit else 60 + inAllocVars = ["LSB_MAX_NUM_PROCESSORS"] mac_args = ["--smpi_off", f"--numNodes {numNodes}"] launch_cmd = f"bsub -nnodes {numNodes} -Is -XF -W {timeLimit} -core_isolation 2 " ats_args.extend(mac_args) @@ -164,7 +173,7 @@ def main(): # Launch ATS #--------------------------------------------------------------------------- # If doing a CI run, set some more options - if (ciRun): + if (not options.perfTest): if ("--logs" not in unknown_options): ats_args.append(f"--logs {test_log_name}") log_name = test_log_name @@ -176,14 +185,20 @@ def main(): ats_args = " ".join(str(x) for x in ats_args) other_args = " ".join(str(x) for x in unknown_options) cmd = f"{ats_exe} -e {spheral_exe} {ats_args} {other_args}" - if blueOS: - # Launches using Bsub have issues with '<' being in command - # so entire run statment must be in quotes - run_command = f"{launch_cmd} '{cmd}'" + # Check if are already in an allocation + inAlloc = any(e in list(os.environ.keys()) for e in inAllocVars) + # If already in allocation, do not do a launch + if inAlloc: + run_command = cmd else: - run_command = f"{launch_cmd}{cmd}" + if blueOS: + # Launches using Bsub have issues with '<' being in command + # so entire run statment must be in quotes + run_command = f"{launch_cmd} '{cmd}'" + else: + run_command = f"{launch_cmd}{cmd}" print(f"\nRunning: {run_command}\n") - if (ciRun): + if (options.ciRun): run_and_report(run_command, log_name, 0) else: try: From 57f644b61bf3da58050b7bea1f51aa937c9e220b Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Thu, 7 Nov 2024 14:19:06 -0800 Subject: [PATCH 28/44] Fix bug in machine.yaml --- .gitlab/machines.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.gitlab/machines.yml b/.gitlab/machines.yml index d7b68b90a..1cbeba69a 100644 --- a/.gitlab/machines.yml +++ b/.gitlab/machines.yml @@ -6,7 +6,7 @@ - ruby - shell variables: - SCHEDULER_PARAMETERS: "--res=ci --exclusive=user --deadline=now+1hour -N 2 -t 60" + SCHEDULER_PARAMETERS: "--res=ci --exclusive=user -N 2 -t 60" HOSTNAME: 'ruby' extends: [.on_toss_4_x86] @@ -16,7 +16,7 @@ - shell variables: SCHEDULER_PARAMETERS: "-nnodes 1 -W 60 -q pci -alloc_flags atsdisable" - LSB_JOB_STARTER: [ "ENVIRONMENT=BATCH" ] + LSB_JOB_STARTER: "ENVIRONMENT=BATCH /usr/tcetmp/bin/bsub_job_starter %USRCMD" ENVIRONMENT: "BATCH" HOSTNAME: 'lassen' LC_MODULES: "cuda/11.1.0" From 8b7c046d8df3bc56c7d78ef7aa50b3328259927a Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Thu, 7 Nov 2024 14:31:10 -0800 Subject: [PATCH 29/44] Removed shell input for machines.yaml --- .gitlab/machines.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.gitlab/machines.yml b/.gitlab/machines.yml index 1cbeba69a..9fc464513 100644 --- a/.gitlab/machines.yml +++ b/.gitlab/machines.yml @@ -4,7 +4,6 @@ .on_ruby: tags: - ruby - - shell variables: SCHEDULER_PARAMETERS: "--res=ci --exclusive=user -N 2 -t 60" HOSTNAME: 'ruby' @@ -13,7 +12,6 @@ .on_lassen: tags: - lassen - - shell variables: SCHEDULER_PARAMETERS: "-nnodes 1 -W 60 -q pci -alloc_flags atsdisable" LSB_JOB_STARTER: "ENVIRONMENT=BATCH /usr/tcetmp/bin/bsub_job_starter %USRCMD" From 42a12177e9333af5f6697f41a8c74245c3b6c8d7 Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Thu, 7 Nov 2024 14:42:20 -0800 Subject: [PATCH 30/44] Explicitly set them to batch runners --- .gitlab/machines.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitlab/machines.yml b/.gitlab/machines.yml index 9fc464513..6905c828b 100644 --- a/.gitlab/machines.yml +++ b/.gitlab/machines.yml @@ -4,6 +4,7 @@ .on_ruby: tags: - ruby + - batch variables: SCHEDULER_PARAMETERS: "--res=ci --exclusive=user -N 2 -t 60" HOSTNAME: 'ruby' @@ -12,6 +13,7 @@ .on_lassen: tags: - lassen + - batch variables: SCHEDULER_PARAMETERS: "-nnodes 1 -W 60 -q pci -alloc_flags atsdisable" LSB_JOB_STARTER: "ENVIRONMENT=BATCH /usr/tcetmp/bin/bsub_job_starter %USRCMD" From 06da230dd604310b470ed1fee1a72be5d7e4ff67 Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Fri, 8 Nov 2024 14:33:46 -0800 Subject: [PATCH 31/44] Split up host-config CI command to not include build call, added no-clean to host-config call in CI to avoid completely rebuilding when test is rerun --- .gitlab/machines.yml | 4 +++- .gitlab/scripts.yml | 7 +++++-- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/.gitlab/machines.yml b/.gitlab/machines.yml index 6905c828b..28736433a 100644 --- a/.gitlab/machines.yml +++ b/.gitlab/machines.yml @@ -7,6 +7,7 @@ - batch variables: SCHEDULER_PARAMETERS: "--res=ci --exclusive=user -N 2 -t 60" + NPROC: 112 HOSTNAME: 'ruby' extends: [.on_toss_4_x86] @@ -15,8 +16,9 @@ - lassen - batch variables: - SCHEDULER_PARAMETERS: "-nnodes 1 -W 60 -q pci -alloc_flags atsdisable" + SCHEDULER_PARAMETERS: "-nnodes 1 -W 150 -q pci -alloc_flags atsdisable" LSB_JOB_STARTER: "ENVIRONMENT=BATCH /usr/tcetmp/bin/bsub_job_starter %USRCMD" + NPROC: 40 ENVIRONMENT: "BATCH" HOSTNAME: 'lassen' LC_MODULES: "cuda/11.1.0" diff --git a/.gitlab/scripts.yml b/.gitlab/scripts.yml index f05759655..385f50394 100644 --- a/.gitlab/scripts.yml +++ b/.gitlab/scripts.yml @@ -27,7 +27,10 @@ script: - CI_BUILD_DIR=$(cat ci-dir.txt) - cd $CI_BUILD_DIR && cat job-name.txt - - ./$SCRIPT_DIR/devtools/host-config-build.py --host-config gitlab.cmake --build $EXTRA_CMAKE_ARGS + - ./$SCRIPT_DIR/devtools/host-config-build.py --no-clean --host-config gitlab.cmake $EXTRA_CMAKE_ARGS + - cd build_gitlab/build + - make -j $NPROC + - make -j $NPROC install artifacts: paths: - ci-dir.txt @@ -35,7 +38,7 @@ .build_and_test: extends: [.build] - after_script: + script: - CI_BUILD_DIR=$(cat ci-dir.txt) - cd $CI_BUILD_DIR && cat job-name.txt From b7be579c6980be3b3810c23bde8d057f1213da96 Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Fri, 8 Nov 2024 16:49:33 -0800 Subject: [PATCH 32/44] Changed build and test back to be after_script even though after_script cannot trigger a fail --- .gitlab/scripts.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab/scripts.yml b/.gitlab/scripts.yml index 385f50394..1e9680c0c 100644 --- a/.gitlab/scripts.yml +++ b/.gitlab/scripts.yml @@ -38,7 +38,7 @@ .build_and_test: extends: [.build] - script: + after_script: - CI_BUILD_DIR=$(cat ci-dir.txt) - cd $CI_BUILD_DIR && cat job-name.txt From 609b9b39a2a9b60b4c8b05bc8e9bbf2c15246b59 Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Mon, 11 Nov 2024 09:18:41 -0800 Subject: [PATCH 33/44] Switched back to using cmake build for simplicity --- .gitlab/scripts.yml | 5 +---- scripts/devtools/host-config-build.py | 6 +++++- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/.gitlab/scripts.yml b/.gitlab/scripts.yml index 1e9680c0c..5956f673a 100644 --- a/.gitlab/scripts.yml +++ b/.gitlab/scripts.yml @@ -27,10 +27,7 @@ script: - CI_BUILD_DIR=$(cat ci-dir.txt) - cd $CI_BUILD_DIR && cat job-name.txt - - ./$SCRIPT_DIR/devtools/host-config-build.py --no-clean --host-config gitlab.cmake $EXTRA_CMAKE_ARGS - - cd build_gitlab/build - - make -j $NPROC - - make -j $NPROC install + - ./$SCRIPT_DIR/devtools/host-config-build.py --no-clean --build --nprocs $NPROCS --host-config gitlab.cmake $EXTRA_CMAKE_ARGS artifacts: paths: - ci-dir.txt diff --git a/scripts/devtools/host-config-build.py b/scripts/devtools/host-config-build.py index 0c90c45e8..557b23ae6 100755 --- a/scripts/devtools/host-config-build.py +++ b/scripts/devtools/host-config-build.py @@ -31,6 +31,9 @@ def parse_args(): parser.add_argument('--build', action='store_true', help='Run make -j install after configuring build dirs.') + parser.add_argument('--nprocs', default=48, + help="Set number of procs to use while building. This is not used if --build is not enabled.") + parser.add_argument('--lc-modules', type=str, default="", help='LC Modules to use during build, install and smoke test. This is not used if --build is not enabled.') @@ -106,8 +109,9 @@ def main(): print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") print("~~~~~ Building Spheral") print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~") + build_cmd = f"{ml_cmd} {cmake_cmd} --build . --target install -j {args.nprocs}" - sexe("{0} {1} --build . -j 48 --target install".format(ml_cmd, cmake_cmd), echo=True, ret_output=False) + sexe(build_cmd, echo=True, ret_output=False) if __name__ == "__main__": main() From a6776c84c10e088b3359deffb4c2efd87d40dc5c Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Mon, 11 Nov 2024 09:33:49 -0800 Subject: [PATCH 34/44] Fix typo in gitlab script --- .gitlab/scripts.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitlab/scripts.yml b/.gitlab/scripts.yml index 5956f673a..9fbaa4103 100644 --- a/.gitlab/scripts.yml +++ b/.gitlab/scripts.yml @@ -27,7 +27,7 @@ script: - CI_BUILD_DIR=$(cat ci-dir.txt) - cd $CI_BUILD_DIR && cat job-name.txt - - ./$SCRIPT_DIR/devtools/host-config-build.py --no-clean --build --nprocs $NPROCS --host-config gitlab.cmake $EXTRA_CMAKE_ARGS + - ./$SCRIPT_DIR/devtools/host-config-build.py --no-clean --build --nprocs $NPROC --host-config gitlab.cmake $EXTRA_CMAKE_ARGS artifacts: paths: - ci-dir.txt From 076f8d36fd0a2d3ac5e77af2bbc341dbea705791 Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Mon, 11 Nov 2024 12:41:52 -0800 Subject: [PATCH 35/44] Added allocation time variable and timeout for gitlab ci --- .gitlab/machines.yml | 6 ++++-- .gitlab/scripts.yml | 1 + 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/.gitlab/machines.yml b/.gitlab/machines.yml index 28736433a..5d963b617 100644 --- a/.gitlab/machines.yml +++ b/.gitlab/machines.yml @@ -6,7 +6,8 @@ - ruby - batch variables: - SCHEDULER_PARAMETERS: "--res=ci --exclusive=user -N 2 -t 60" + ALLOC_TIME: 120 + SCHEDULER_PARAMETERS: "--res=ci --exclusive=user -N 2 -t $ALLOC_TIME" NPROC: 112 HOSTNAME: 'ruby' extends: [.on_toss_4_x86] @@ -16,7 +17,8 @@ - lassen - batch variables: - SCHEDULER_PARAMETERS: "-nnodes 1 -W 150 -q pci -alloc_flags atsdisable" + ALLOC_TIME: 150 + SCHEDULER_PARAMETERS: "-nnodes 1 -W $ALLOC_TIME -q pci -alloc_flags atsdisable" LSB_JOB_STARTER: "ENVIRONMENT=BATCH /usr/tcetmp/bin/bsub_job_starter %USRCMD" NPROC: 40 ENVIRONMENT: "BATCH" diff --git a/.gitlab/scripts.yml b/.gitlab/scripts.yml index 9fbaa4103..000fc0f41 100644 --- a/.gitlab/scripts.yml +++ b/.gitlab/scripts.yml @@ -24,6 +24,7 @@ stage: build_and_install variables: GIT_STRATEGY: none + timeout: $ALLOC_TIME minutes script: - CI_BUILD_DIR=$(cat ci-dir.txt) - cd $CI_BUILD_DIR && cat job-name.txt From 0ddd853e0ce6b84bbff4e485453a3b8b48b21956 Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Mon, 11 Nov 2024 12:48:15 -0800 Subject: [PATCH 36/44] Moved timeout to be part of the machine specs --- .gitlab/machines.yml | 8 ++++---- .gitlab/scripts.yml | 1 - 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/.gitlab/machines.yml b/.gitlab/machines.yml index 5d963b617..c2c18c0c5 100644 --- a/.gitlab/machines.yml +++ b/.gitlab/machines.yml @@ -6,10 +6,10 @@ - ruby - batch variables: - ALLOC_TIME: 120 - SCHEDULER_PARAMETERS: "--res=ci --exclusive=user -N 2 -t $ALLOC_TIME" + SCHEDULER_PARAMETERS: "--res=ci --exclusive=user -N 2 -t 120" NPROC: 112 HOSTNAME: 'ruby' + timeout: 119 minutes extends: [.on_toss_4_x86] .on_lassen: @@ -17,13 +17,13 @@ - lassen - batch variables: - ALLOC_TIME: 150 - SCHEDULER_PARAMETERS: "-nnodes 1 -W $ALLOC_TIME -q pci -alloc_flags atsdisable" + SCHEDULER_PARAMETERS: "-nnodes 1 -W 150 -q pci -alloc_flags atsdisable" LSB_JOB_STARTER: "ENVIRONMENT=BATCH /usr/tcetmp/bin/bsub_job_starter %USRCMD" NPROC: 40 ENVIRONMENT: "BATCH" HOSTNAME: 'lassen' LC_MODULES: "cuda/11.1.0" + timeout: 149 minutes extends: [.on_blueos_3_ppc64] # ------------------------------------------------------------------------------ diff --git a/.gitlab/scripts.yml b/.gitlab/scripts.yml index 000fc0f41..9fbaa4103 100644 --- a/.gitlab/scripts.yml +++ b/.gitlab/scripts.yml @@ -24,7 +24,6 @@ stage: build_and_install variables: GIT_STRATEGY: none - timeout: $ALLOC_TIME minutes script: - CI_BUILD_DIR=$(cat ci-dir.txt) - cd $CI_BUILD_DIR && cat job-name.txt From 6e6d0bb10682842eedefda75f0d8fc5c5b2fc68d Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Mon, 11 Nov 2024 12:49:49 -0800 Subject: [PATCH 37/44] Make alloc and timeout times coincident --- .gitlab/machines.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.gitlab/machines.yml b/.gitlab/machines.yml index c2c18c0c5..891300aaa 100644 --- a/.gitlab/machines.yml +++ b/.gitlab/machines.yml @@ -9,7 +9,7 @@ SCHEDULER_PARAMETERS: "--res=ci --exclusive=user -N 2 -t 120" NPROC: 112 HOSTNAME: 'ruby' - timeout: 119 minutes + timeout: 120 minutes extends: [.on_toss_4_x86] .on_lassen: @@ -23,7 +23,7 @@ ENVIRONMENT: "BATCH" HOSTNAME: 'lassen' LC_MODULES: "cuda/11.1.0" - timeout: 149 minutes + timeout: 150 minutes extends: [.on_blueos_3_ppc64] # ------------------------------------------------------------------------------ From 17b12a6effab29b8549c1ac2364d3ea140f6f618 Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Tue, 12 Nov 2024 09:29:47 -0800 Subject: [PATCH 38/44] Trying to fix slowdown of builds on rzgenie --- .gitlab/machines.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.gitlab/machines.yml b/.gitlab/machines.yml index 891300aaa..68c3d2d5b 100644 --- a/.gitlab/machines.yml +++ b/.gitlab/machines.yml @@ -6,7 +6,8 @@ - ruby - batch variables: - SCHEDULER_PARAMETERS: "--res=ci --exclusive=user -N 2 -t 120" + SCHEDULER_ACTION: allocate + SCHEDULER_PARAMETERS: "--res=ci --exclusive -N 2 -t 120" NPROC: 112 HOSTNAME: 'ruby' timeout: 120 minutes From 10d27a41e09e20cf476894e21e69d6daab4d890c Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Wed, 13 Nov 2024 14:15:41 -0800 Subject: [PATCH 39/44] Updated release notes --- RELEASE_NOTES.md | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/RELEASE_NOTES.md b/RELEASE_NOTES.md index 85809ca49..1ea1645d4 100644 --- a/RELEASE_NOTES.md +++ b/RELEASE_NOTES.md @@ -25,9 +25,10 @@ Notable changes include: * Physics::postStateUpdate now returns a bool indicating if boundary conditions should be enforced again. * Physics packages can now have Physics sub-packages, which can be run before or after the main package. The SpheralController now checks for these packages and adds them to the physics package list as needed. - * Physics packages can indicate if they require Voronoi cell information be available. If so, a new package which computes and + * Physics packages can indicate if they require Voronoi cell information be available. If so, a new package which computes and updates the Voronoi information is automatically added to the package list by the SpheralController (similar to how the Reproducing Kernel corrections are handled). + * Command line options are now consistent. Default values of a string "None" are no longer allowed and any input through the command line of "None" will become the python NoneType None. * Build changes / improvements: * Distributed source directory must always be built now. @@ -40,13 +41,16 @@ Notable changes include: * ENABLE\_DEV\_BUILD can now export targets properly. * Added a GCC flag to prevent building variable tracking symbols when building PYB11 modules. This is unnecessary, and on some platforms trying to build such symbols is very expensive and in some cases fails. + * Consolidates lcatstest.in and run\_ats.py into a single spheral\_ats.py script. + * SPHERAL\_TEST\_INSTALL\_PREFIX now includes the tests directory. + * Removed most configured files and added a SpheralConfigs.py file to use at runtime instead. * Bug Fixes / improvements: * Wrappers for MPI calls are simplified and improved. * Time step estimate due to velocity divergence in RZ space has been fixed. * Fixed tolerances for ANEOS equation of state temperature lookup * Clang C++ warnings have eliminated, so the Clang CI tests have been updated to treat warnings as errors. - * Fix for installing libraries when building individual package WITH ENABLE_DEV_BUILD=On. + * Fix for installing libraries when building individual package with ENABLE\_DEV\_BUILD=On. * Bugfix for RZ solid CRKSPH with compatible energy. * Parsing of None string now always becomes None python type. Tests have been updated accordingly. * IO for checkpoints and visuzalization can now be properly turned off through SpheralController input options. From 6ad28e4935c0c0d826be0bb6f506c9c937810672 Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Wed, 13 Nov 2024 14:19:53 -0800 Subject: [PATCH 40/44] Removed lcats script --- scripts/lc/lcats | 240 ----------------------------------------------- 1 file changed, 240 deletions(-) delete mode 100755 scripts/lc/lcats diff --git a/scripts/lc/lcats b/scripts/lc/lcats deleted file mode 100755 index 70a7cb984..000000000 --- a/scripts/lc/lcats +++ /dev/null @@ -1,240 +0,0 @@ -#!/usr/bin/env python3 - -import os, time, sys -import time -import platform -import sys -import argparse, re -import subprocess -import copy - -d_debug= 0 - -SYS_TYPE = os.environ.get('SYS_TYPE','') -# This is better than platform.node() some of the time, because it differentiates between jade, jadeita, and jadedev. -LCSCHEDCLUSTER = os.environ.get('LCSCHEDCLUSTER','') - -#------------------------------------------------------------------------ - -class MachineInfo: - def __init__ (self, **options): - self.name = '' - self.allocTime = 120 - self.timeCmd = '--time' - self.timeLimit = 120 - self.machineType = '' - self.numNodes = 4 - self.procsPerNode = None - self.allocCmd = None - self.nodeCmd = '' - self.gpusPerNode = 0 - self.group = '' - self.groupCmd = '' - self.partition = 'pbatch' - self.partitionCmd = '-p' - - self.bank = '' - self.defaultAtsArgs = [ - "--continueFreq=15", - "--timelimit=120"] - self.atsArgs = [] - self.envArgs = [] - self.__dict__.update(options) - - def get_ats_args(self): - args_list = " ".join(str(x) for x in self.defaultAtsArgs + self.atsArgs) - args_list += f" --numNodes {self.numNodes} --allInteractive" - return args_list - - def get_launch_cmd(self): - launch_cmd = f"{self.allocCmd} {self.nodeCmd} {self.numNodes} {self.timeCmd} {self.allocTime}" - if (self.group): - launch_cmd += f" {self.groupCmd} {self.group}" - if (self.partition): - launch_cmd += f" {self.partitionCmd} {self.partition}" - return launch_cmd - - def get_num_procs(self): - return self.numNodes * self.procsPerNode - -class blueOS(MachineInfo): - def __init__ (self, **options): - "Must not throw an exception -- object must always get created." - super(MachineInfo, self).__init__() - self.name = '', - self.allocTime = 240 - self.timeCmd = '-W' - self.machineType = 'blueos_3_ppc64le_ib_p9' - self.numNodes = 4 - self.procsPerNode = 40 - self.gpusPerNode = 4 - self.allocCmd = '/usr/tcetmp/bin/lalloc' - self.nodeCmd = "" - self.group = 'guests' - self.groupCmd = '-G' - self.bank = 'guests' - self.partition = 'pdebug' - self.partitionCmd = '-q' - self.atsArgs = ["--smpi_off", - "--npMax=36", - "--glue='noDraco=True'", - "--glue='noVisit=True'", - "--glue='noOpacityServer=True'", - "--glue='noCxxUnitTesting=True'"] - self.envArgs = [] - self.__dict__.update(options) - -class toss4(MachineInfo): - def __init__ (self, **options): - super(MachineInfo, self).__init__() - self.name = '', - self.allocTime = 180 - self.machineType = 'slurm36' - self.numNodes = 2 - self.procsPerNode = 36 - self.gpusPerNode = 0 - self.allocCmd = 'salloc --exclusive' - self.nodeCmd = "-N" - self.group = '' - self.bank = 'wbronze' - self.partition = 'pdebug' - self.partitionCmd = '-p' - self.atsArgs = ["--npMax=40"] - self.envArgs = [] - self.__dict__.update(options) - -#--------------------------------------------------------------------------- -# MAIN -#--------------------------------------------------------------------------- - -#--------------------------------------------------------------------------- -# Setup machine info classes -#----------------------------------------------------------------------- - -lassenSettings = blueOS(name="lassen") -rzanselSettings = blueOS(name="rzansel") - -rubySettings = toss4(name="ruby", partition="") -rzgenieSettings = toss4(name="rzgenie") -rzwhippetSettings = toss4(name="rzwhippet", procsPerNode=112) - -allSettings = [lassenSettings, rzanselSettings, rubySettings, rzgenieSettings] -macNames = {x.name: x for x in allSettings} - -#--------------------------------------------------------------------------- -# Setup argument parser -#--------------------------------------------------------------------------- -parser = argparse.ArgumentParser() - -parser.add_argument( "--allocTime", type=int, metavar="minutes", dest="allocTime", - help = "The amount of time for the batch job (in minutes) .") - -parser.add_argument( "--machine", type=str, default=None, choices=list(macNames.keys()), - help="Sets the machine for ats.") - -parser.add_argument( "--numNodes", type=int, - help="Number of nodes to allocate for ats to run in.") - -parser.add_argument( "--partition", type=str, - help = "Partition in which to run jobs.") - -parser.add_argument( "--bank", type=str, - help = "Bank to use for batch job.") - -parser.add_argument("--wcid", type=str, - help = "HERT WC-ID to use for batch job.") - -parser.add_argument( "--sanitize", action="store_true", dest="sanitize", - help = "Run sanitize tests. NOTE These need a specific build to work. ") - -#--------------------------------------------------------------------------- -# other options -#--------------------------------------------------------------------------- -ezatsLocaltime = time.localtime() -ezatsStartTime = time.strftime("%y%m%d%H%M%S",ezatsLocaltime) -msubFilenameDefault= "tmpAts." + ezatsStartTime + ".job" -bsubFilenameDefault= "tmpAts." + ezatsStartTime + ".job" -parser.add_argument( "--msubFilename", type=str, - default = msubFilenameDefault, help = "The name of the generated ats msub job script that will be run.") - -parser.add_argument( "--bsubFilename", type=str, - default = bsubFilenameDefault, help = "The name of the generated ats bsub job script that will be run.") - -parser.add_argument( '--timelimit', dest='timelimit', default=30, - help='Set the default time limit on each test. The value may be given as a digit followed by an s, m, or h to give the time in seconds, minutes (the default), or hours.') - -# The P2 version is a sym-link to the latest python 2 version of ATS. There's a P3 when we're ready for Python3 -parser.add_argument( "--atsExe", type=str, default="/usr/apps/ats/7.0.P3/bin/ats", help="Sets which ats to use.") - -parser.add_argument( "--testpath", type=str, default="", - help="Specifies a path for ezats to use for unique test output.") - -# Pass through options -parser.add_argument("passthrough", nargs="*", - help="Anything beyond a blank -- is passed through to the ats call") - -options = parser.parse_args() - -#--------------------------------------------------------------------------- -# Determine machine settings to use -#----------------------------------------------------------------------- - -if options.machine: - machineSettings = macNames[options.machine] -elif LCSCHEDCLUSTER in macNames: - machineSettings = macNames[LCSCHEDCLUSTER] -else: - print("Could not determine machine settings to use.") - sys.exit(1) - -print("Selected machine settings for: ", machineSettings.name) - -#--------------------------------------------------------------------------- -# options affecting machine settings -#--------------------------------------------------------------------------- - -if options.allocTime: - machineSettings.allocTime = options.allocTime - -if options.numNodes: - machineSettings.numNodes = options.numNodes - -if options.partition: - machineSettings.partition = options.partition - -if options.bank: - machineSettings.bank = options.bank - -if options.wcid: - machineSettings.wcid = options.wcid - -atsArgs = " ".join(str(x) for x in options.passthrough) - -print("Note: the srun message 'error: ioctl(TIOCGWINSZ)' can be ignored. \n[It means the process is trying to do something that requires a tty \nbut it's not doing either a read or write.]\n") - -toAdd = "" -# Add glue arg to pass unique file system test path to ats -if (options.testpath): - toAdd= """ --glue='testpath=str("%s")' """ % options.testpath - -if options.sanitize: - toAdd += """ --filter="sanitize==1" """ - -#toAdd += " ".join(x for x in machineSettings.atsArgs if x not in atsArgs) - -AtsRunCmd = f"{options.atsExe} {toAdd} {atsArgs} {machineSettings.get_ats_args()}" - -os.environ["MACHINE_TYPE"] = machineSettings.machineType -os.environ["BATCH_TYPE"] = "None" - -finalCommandToRun = machineSettings.get_launch_cmd() + " " + AtsRunCmd -# Remove all extra spaces -finalCommandToRun = re.sub(r"\s+", " ", finalCommandToRun.strip()) - -print(f"Running command:\n {finalCommandToRun}") - -from subprocess import check_call -try: - check_call( finalCommandToRun,shell=True ) -except Exception as e: - print("Caught - non-zero exit status 3 - thrown by final command", e) From e351eb44b8b73668387999e4a82c6ae3fed736e7 Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Thu, 14 Nov 2024 10:28:28 -0800 Subject: [PATCH 41/44] Fix bugs in performance.py --- scripts/spheral_ats.py | 2 +- src/SimulationControl/SpheralConfigs.py.in | 3 ++ tests/performance.py | 55 ++++++++++++++++------ 3 files changed, 44 insertions(+), 16 deletions(-) diff --git a/scripts/spheral_ats.py b/scripts/spheral_ats.py index e83509cb2..6e9aad8dd 100644 --- a/scripts/spheral_ats.py +++ b/scripts/spheral_ats.py @@ -113,7 +113,7 @@ def main(): #--------------------------------------------------------------------------- parser = argparse.ArgumentParser(allow_abbrev=False, usage=""" - ./spheral spheral_ats.py --numNodes 2 tests/integration.ats --filter="level<100" + ./spheral-ats --numNodes 2 tests/integration.ats --filter="level<100" """, description=""" Launches and runs Spheral using the ATS system. diff --git a/src/SimulationControl/SpheralConfigs.py.in b/src/SimulationControl/SpheralConfigs.py.in index 487fb9a8a..0f0c2e747 100644 --- a/src/SimulationControl/SpheralConfigs.py.in +++ b/src/SimulationControl/SpheralConfigs.py.in @@ -23,3 +23,6 @@ def caliper_module_path(): return os.path.join(caliper_loc, "lib64/caliper") else: return None + +def test_install_path(): + return "@SPHERAL_TEST_INSTALL_PREFIX@" diff --git a/tests/performance.py b/tests/performance.py index 0d4e48406..2b027f345 100644 --- a/tests/performance.py +++ b/tests/performance.py @@ -5,16 +5,42 @@ import sys, shutil, os, time import numpy as np -cur_dir = os.path.dirname(__file__) -spheral_path = os.path.join(cur_dir, "../lib/python3.9/site-packages/Spheral") +spheral_path = "../lib/python3.9/site-packages/Spheral" sys.path.append(spheral_path) import SpheralConfigs +# If desired, set a location to consolidate Caliper files, tthis is useful +# when running scaling tests +# This automatically creates directories based on the install configuration +# and test names inside output_loc +# WARNING: Be sure to remove older performance data in +# output location beforehand +#output_loc = "/home/user/scaling/test/files" +output_loc = None + # Current system architecture from Spack spheral_sys_arch = SpheralConfigs.sys_arch() # Current install configuration from Spack spheral_install_config = SpheralConfigs.config() +# Consolidate Caliper files after run +def gather_files(manager): + filtered = [test for test in manager.testlist if test.status is PASSED] + for test in filtered: + run_dir = test.directory + cali_filename = test.options["caliper_filename"] + cfile = os.path.join(run_dir, cali_filename) + test_name = test.options["label"] + outdir = os.path.join(output_loc, spheral_install_config, test_name) + if (not os.path.exists(outdir)): + log(f"Creating {outdir}") + os.mkdir(outdir) + outfile = os.path.join(outdir, cali_filename) + log(f"Copying {cali_filename} to {outdir}") + shutil.copy(cfile, outfile) + +if (output_loc): + onExit(gather_files) glue(keep=True) def add_timer_cmds(cali_name, test_name): @@ -30,14 +56,14 @@ def add_timer_cmds(cali_name, test_name): # NOH tests test_dir = os.path.join(SpheralConfigs.test_install_path(), "functional/Hydro/Noh") -# Select which timing regions to post-process +# Select which timing regions to compare (for CI) regions = ["CheapRK2", "CheapRK2PreInit", "ConnectivityMap_computeConnectivity", "ConnectivityMap_patch", "CheapRK2EvalDerivs", "CheapRK2EndStep"] -# Select which timers to use to post-process the regions above +# Select which timers to compare (for CI) timers = ["sum#inclusive#sum#time.duration"] # Means the sum of the time from all ranks # General input for all Noh tests @@ -49,17 +75,17 @@ def add_timer_cmds(cali_name, test_name): test_file = "Noh-cylindrical-2d.py" nRadial = 100 test_path = os.path.join(test_dir, test_file) -test_name_base = "NC2D" +test_name = "NC2D" # Test with varying number of ranks ranks = [1, 2, 4] for i, n in enumerate(ranks): - test_name = f"{test_name_base}_{i}" - caliper_filename = f"{test_name}_{int(time.time())}.cali" + caliper_filename = f"{test_name}_{i}_{int(time.time())}.cali" timer_cmds = add_timer_cmds(caliper_filename, test_name) inps = f"{gen_noh_inps} --nRadial {nRadial} --steps 10 {timer_cmds}" ncores = int(num_nodes*num_cores/n) - t = test(script=test_path, clas=inps, label=f"{test_name}", + t = test(script=test_path, clas=inps, + label=test_name, np=ncores, caliper_filename=caliper_filename, regions=regions, @@ -71,23 +97,22 @@ def add_timer_cmds(cali_name, test_name): group(name="NOH 3D tests") test_file = "Noh-spherical-3d.py" test_path = os.path.join(test_dir, test_file) -test_name_base = "NS3D" +test_name = "NS3D" # Test with varying number of SPH nodes per rank npcore = [100, 200, 300] for i, n in enumerate(npcore): - test_name = f"{test_name_base}_{i}" - caliper_filename = f"{test_name}_{int(time.time())}.cali" - total_sph_nodes = n*num_cores + caliper_filename = f"{test_name}_{i}_{int(time.time())}.cali" + ncores = int(num_nodes*num_cores) + total_sph_nodes = n*ncores npd = int(np.cbrt(total_sph_nodes)) node_inps = f"--nx {npd} --ny {npd} --nz {npd}" timer_cmds = add_timer_cmds(caliper_filename, test_name) inps = f"{gen_noh_inps} {node_inps} --steps 3 {timer_cmds}" # WIP: Path to benchmark timing data - ncores = int(num_cores) - t = test(script=test_path, clas=inps, label=f"{test_name}", + t = test(script=test_path, clas=inps, + label=test_name, np=ncores, - independent=False, caliper_filename=caliper_filename, regions=regions, timers=timers, From 5d9ea270a12fd0e948e28378a638fc3d9f49c87a Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Fri, 15 Nov 2024 09:56:29 -0800 Subject: [PATCH 42/44] Added 3d periodic test, removed perftest option from spheral_ats.py --- scripts/spheral_ats.py | 4 +- tests/performance.py | 40 ++++- .../unit/Boundary/testPeriodicBoundary-3d.py | 164 ++++++++++++++++++ 3 files changed, 201 insertions(+), 7 deletions(-) create mode 100644 tests/unit/Boundary/testPeriodicBoundary-3d.py diff --git a/scripts/spheral_ats.py b/scripts/spheral_ats.py index 6e9aad8dd..a3da87e01 100644 --- a/scripts/spheral_ats.py +++ b/scripts/spheral_ats.py @@ -128,8 +128,6 @@ def main(): help="Time limit for allocation.") parser.add_argument("--ciRun", action="store_true", help="Option to only be used by the CI") - parser.add_argument("--perfTest", action="store_true", - help="Turn on if doing a performance test.") parser.add_argument("--atsHelp", action="store_true", help="Print the help output for ATS. Useful for seeing ATS options.") options, unknown_options = parser.parse_known_args() @@ -173,7 +171,7 @@ def main(): # Launch ATS #--------------------------------------------------------------------------- # If doing a CI run, set some more options - if (not options.perfTest): + if (options.ciRun): if ("--logs" not in unknown_options): ats_args.append(f"--logs {test_log_name}") log_name = test_log_name diff --git a/tests/performance.py b/tests/performance.py index 2b027f345..9f60704c3 100644 --- a/tests/performance.py +++ b/tests/performance.py @@ -53,9 +53,6 @@ def add_timer_cmds(cali_name, test_name): num_nodes = 2 num_cores = 36 -# NOH tests -test_dir = os.path.join(SpheralConfigs.test_install_path(), "functional/Hydro/Noh") - # Select which timing regions to compare (for CI) regions = ["CheapRK2", "CheapRK2PreInit", @@ -66,6 +63,41 @@ def add_timer_cmds(cali_name, test_name): # Select which timers to compare (for CI) timers = ["sum#inclusive#sum#time.duration"] # Means the sum of the time from all ranks +# 3D convection test +test_dir = os.path.join(SpheralConfigs.test_install_path(), "unit/Boundary") + +group(name="3D Convection test") +test_file = "testPeriodicBoundary-3d.py" +test_path = os.path.join(test_dir, test_file) +test_name = "3DCONV" + +# Test with varying number of ranks +ranks = [1, 2, 4] +# We want 20 points per unit length +ref_len = 1. +sph_point_rho = 20. / ref_len +sph_per_core = 300 +for i, n in enumerate(ranks): + caliper_filename = f"{test_name}_{i}_{int(time.time())}.cali" + timer_cmds = add_timer_cmds(caliper_filename, test_name) + ncores = int(num_nodes*num_cores/n) + total_sph_nodes = sph_per_core * ncores + npd = int(np.cbrt(total_sph_nodes)) + new_len = npd * ref_len / sph_point_rho + inps = f"--nx {npd} --ny {npd} --nz {npd} --x1 {new_len} --y1 {new_len} --z1 {new_len} --steps 100 {timer_cmds}" + t = test(script=test_path, clas=inps, + label=test_name, + np=ncores, + caliper_filename=caliper_filename, + regions=regions, + timers=timers, + install_config=spheral_install_config) + +endgroup() + +# NOH tests +test_dir = os.path.join(SpheralConfigs.test_install_path(), "functional/Hydro/Noh") + # General input for all Noh tests gen_noh_inps = "--crksph False --cfl 0.25 --Cl 1.0 --Cq 1.0 --xfilter 0.0 "+\ "--nPerh 2.01 --graphics False --clearDirectories False --doCompare False "+\ @@ -108,7 +140,7 @@ def add_timer_cmds(cali_name, test_name): npd = int(np.cbrt(total_sph_nodes)) node_inps = f"--nx {npd} --ny {npd} --nz {npd}" timer_cmds = add_timer_cmds(caliper_filename, test_name) - inps = f"{gen_noh_inps} {node_inps} --steps 3 {timer_cmds}" + inps = f"{gen_noh_inps} {node_inps} --steps 10 {timer_cmds}" # WIP: Path to benchmark timing data t = test(script=test_path, clas=inps, label=test_name, diff --git a/tests/unit/Boundary/testPeriodicBoundary-3d.py b/tests/unit/Boundary/testPeriodicBoundary-3d.py new file mode 100644 index 000000000..d26d2e33e --- /dev/null +++ b/tests/unit/Boundary/testPeriodicBoundary-3d.py @@ -0,0 +1,164 @@ +#ATS:t0 = test(SELF, "", np=10, label="Periodic boundary unit test -- 3-D (parallel)") +#------------------------------------------------------------------------------- +# 3D test of periodic boundaries -- we simply allow a pressureless fluid to +# cycle around a box and check the sum density +#------------------------------------------------------------------------------- +from math import * +from Spheral3d import * +from SpheralTestUtilities import * +from SpheralPointmeshSiloDump import dumpPhysicsState +import mpi + +title("3D periodic boundary test.") + +#------------------------------------------------------------------------------- +# Generic problem parameters +#------------------------------------------------------------------------------- +commandLine(nx = 20, + ny = 20, + nz = 20, + x0 = 0.0, + x1 = 1.0, + y0 = 0.0, + y1 = 1.0, + z0 = 0.0, + z1 = 1.0, + + rho1 = 1.0, + cs2 = 1.0, + mu = 1.0, + vx1 = 1.0, + vy1 = 1.0, + vz1 = 1.0, + + nPerh = 2.01, + + hmin = 0.0001, + hmax = 0.5, + cfl = 0.5, + + tol = 1.0e-3, + steps = 300, + dt = 0.0001, + dtMin = 1.0e-5, + dtMax = 0.1, + dtGrowth = 2.0, + dtverbose = False, + rigorousBoundaries = False, + maxSteps = None, + statsStep = 1, + smoothIters = 0, + HEvolution = IdealH, + densityUpdate = RigorousSumDensity, + compatibleEnergy = True, + gradhCorrection = True, + linearConsistent = False, + domainIndependent = False, + + restoreCycle = None, + restartStep = 10000, + restartBaseName = None + ) + +#------------------------------------------------------------------------------- +# Material properties. +#------------------------------------------------------------------------------- +eos = IsothermalEquationOfStateMKS(cs2, mu) + +#------------------------------------------------------------------------------- +# Interpolation kernels. +#------------------------------------------------------------------------------- +WT = TableKernel(BSplineKernel(), 1000) +WTPi = TableKernel(BSplineKernel(), 1000) + +#------------------------------------------------------------------------------- +# Make the NodeList. +#------------------------------------------------------------------------------- +nodes1 = makeFluidNodeList("nodes1", eos, + hmin = hmin, + hmax = hmax, + nPerh = nPerh) + +#------------------------------------------------------------------------------- +# Set the node properties. +#------------------------------------------------------------------------------- +from GenerateNodeDistribution3d import GenerateNodeDistribution3d +gen1 = GenerateNodeDistribution3d(nx, ny, nz, + rho = rho1, + distributionType = "lattice", + xmin = (x0, y0, z0), + xmax = (x1, y1, z1), + nNodePerh = nPerh, + SPH = True) +if mpi.procs > 1: + from PeanoHilbertDistributeNodes import distributeNodes3d +else: + from DistributeNodes import distributeNodes3d +distributeNodes3d((nodes1, gen1)) + +# Set the node positions, velocities, and densities. +nodes1.velocity(VectorField("tmp velocity", nodes1, Vector(vx1, vy1, vz1))) + +#------------------------------------------------------------------------------- +# Construct a DataBase to hold our node list +#------------------------------------------------------------------------------- +db = DataBase() +db.appendNodeList(nodes1) + +#------------------------------------------------------------------------------- +# Construct the artificial viscosity. +#------------------------------------------------------------------------------- +q = MonaghanGingoldViscosity(0.0, 0.0) + +#------------------------------------------------------------------------------- +# Construct the hydro physics object. +#------------------------------------------------------------------------------- +hydro = SPH(dataBase = db, + W = WT, + Q = q, + cfl = cfl, + densityUpdate = RigorousSumDensity, + HUpdate = HEvolution) + +#------------------------------------------------------------------------------- +# Create boundary conditions. +#------------------------------------------------------------------------------- +loVect = Vector(x0, y0, z0) +hiVect = Vector(x1, y1, z1) +bcs = [] +for i in range(0,3): + nVect = Vector(0., 0., 0.) + nVect[i] = 1. + plane0 = Plane(loVect, nVect) + plane1 = Plane(hiVect, -nVect) + bcs.append(PeriodicBoundary(plane0, plane1)) +# Segfault occurs if hydro is append directly in previous loop +for i in bcs: + hydro.appendBoundary(i) + +#------------------------------------------------------------------------------- +# Construct a time integrator. +#------------------------------------------------------------------------------- +integrator = CheapSynchronousRK2Integrator(db) +integrator.appendPhysicsPackage(hydro) +integrator.lastDt = dt +integrator.dtMin = dtMin +integrator.dtMax = dtMax +integrator.dtGrowth = dtGrowth +integrator.rigorousBoundaries = rigorousBoundaries +integrator.domainDecompositionIndependent = domainIndependent +integrator.verbose = dtverbose + +#------------------------------------------------------------------------------- +# Make the problem controller. +#------------------------------------------------------------------------------- +control = SpheralController(integrator, WT, + statsStep = statsStep, + restartStep = restartStep, + restartBaseName = restartBaseName, + restoreCycle = restoreCycle) + +#------------------------------------------------------------------------------- +# Advance to the end time. +#------------------------------------------------------------------------------- +control.step(steps) From f96601319a46e9237e35a0e23f9ae6d4e741b25d Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Mon, 18 Nov 2024 11:19:07 -0800 Subject: [PATCH 43/44] Fix bug where default adiak data was incorrect if mpi module had not been loaded --- src/SimulationControl/Spheral.py | 10 +++++----- src/SimulationControl/SpheralTestUtilities.py | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/SimulationControl/Spheral.py b/src/SimulationControl/Spheral.py index 8f5bf2255..4ba881f3c 100644 --- a/src/SimulationControl/Spheral.py +++ b/src/SimulationControl/Spheral.py @@ -3,6 +3,11 @@ # Modified version to be compatible with the pybindgen version of Spheral++. +# ------------------------------------------------------------------------------ +# Load up MPI. +# ------------------------------------------------------------------------------ +import mpi + from SpheralUtilities import BuildData if not BuildData.cxx_compiler_id == "GNU": @@ -13,11 +18,6 @@ print("WARNING: unable to set python dl flags on Spheral import.") pass -# ------------------------------------------------------------------------------ -# Load up MPI. -# ------------------------------------------------------------------------------ -import mpi - # ------------------------------------------------------------------------------ # Import a scipy module to initialize scipy's shared qhull library before # spheral's static qhull library. diff --git a/src/SimulationControl/SpheralTestUtilities.py b/src/SimulationControl/SpheralTestUtilities.py index c0562ec8a..eecd80767 100644 --- a/src/SimulationControl/SpheralTestUtilities.py +++ b/src/SimulationControl/SpheralTestUtilities.py @@ -1,5 +1,6 @@ # SpheralTestUtilities -- small helper functions used in Spheral unit tests. +import mpi import sys from math import * from collections import Iterable @@ -132,7 +133,6 @@ def checkNeighbors(neighborList, answer): # Print statistic about the H tensors for a set of NodeLists. #------------------------------------------------------------------------------- def hstats(nodeSet): - import mpi for nodes in nodeSet: hmin, hmax, havg = 1e50, -1e50, 0.0 hratmin, hratmax, hratavg = 1e50, -1e50, 0.0 From 1673bb2ed66f73639fbb91d081f742505a0109a2 Mon Sep 17 00:00:00 2001 From: Landon Owen Date: Fri, 22 Nov 2024 14:01:54 -0800 Subject: [PATCH 44/44] Cleaned up performance.py, reverted some changes to gitlab/scripts.yaml and install-from-dev-pkg, fixed recursive module bug from last commit --- .gitlab/scripts.yml | 8 +- scripts/gitlab/performance_analysis.py | 2 +- scripts/lc/install-from-dev-pkg.sh | 8 +- src/SimulationControl/SpheralTestUtilities.py | 2 +- tests/performance.py | 206 +++++++++--------- 5 files changed, 108 insertions(+), 118 deletions(-) diff --git a/.gitlab/scripts.yml b/.gitlab/scripts.yml index 9fbaa4103..14e48a1f4 100644 --- a/.gitlab/scripts.yml +++ b/.gitlab/scripts.yml @@ -84,10 +84,8 @@ .update_tpls: stage: update_tpls - variables: - GIT_STRATEGY: none script: - - ./$SCRIPT_DIR/devtools/tpl-manager.py --spec-list="$SCRIPT_DIR/devtools/spec-list.json" --spheral-spack-dir=$UPSTREAM_DIR + - ./$SCRIPT_DIR/devtools/tpl-manager.py --no-upstream --spec-list="$SCRIPT_DIR/devtools/spec-list.json" --spheral-spack-dir=$UPSTREAM_DIR .toss_update_permissions: stage: update_permissions @@ -114,8 +112,6 @@ .build_dev_pkg: stage: generate_buildcache - variables: - GIT_STRATEGY: none extends: [.spheral_rev_str] script: - INSTALL_DIR=/usr/gapps/Spheral/$SYS_TYPE/spheral-$SPHERAL_REV_STR @@ -145,7 +141,7 @@ - tar -xzf $DEV_PKG_NAME.tar.gz - cd $DEV_PKG_NAME - - env INSTALL_DIR=$INSTALL_DIR SPEC=$SPEC SPACK_PKG_NAME=$SPACK_PKG_NAME SCRIPT_DIR=$SCRIPT_DIR + - env INSTALL_DIR=$INSTALL_DIR SPEC=$SPEC SPACK_PKG_NAME=$SPACK_PKG_NAME BUILD_ALLOC="" SCRIPT_DIR=$SCRIPT_DIR bash ./$SCRIPT_DIR/lc/install-from-dev-pkg.sh artifacts: diff --git a/scripts/gitlab/performance_analysis.py b/scripts/gitlab/performance_analysis.py index 8f70cadd5..49a401b78 100644 --- a/scripts/gitlab/performance_analysis.py +++ b/scripts/gitlab/performance_analysis.py @@ -67,7 +67,7 @@ def main(): # Get historical timing data cali_ref_dir = os.path.join(benchmark_dir, install_config, test_name) if (not os.path.exists(cali_ref_dir)): - os.mkdir(cali_ref_dir) + os.makedirs(cali_ref_dir) shutils.copyfile(cfile, os.path.join(cali_ref_dir, cali_file)) if __name__=="__main__": diff --git a/scripts/lc/install-from-dev-pkg.sh b/scripts/lc/install-from-dev-pkg.sh index d109da586..b4b6a1c6f 100644 --- a/scripts/lc/install-from-dev-pkg.sh +++ b/scripts/lc/install-from-dev-pkg.sh @@ -2,6 +2,7 @@ trap 'echo "# $BASH_COMMAND"' DEBUG SPACK_PKG_NAME=${SPACK_PKG_NAME:-'spheral'} SPACK_URL=${SPACK_URL:-'https://github.com/spack/spack'} +BUILD_ALLOC=${BUILD_ALLOC} SCRIPT_DIR=${SCRIPT_DIR:-'scripts'} if [[ -z "${SPEC}" ]]; then @@ -19,6 +20,7 @@ echo $SPEC echo $SPACK_URL echo $INSTALL_DIR echo $SCRIPT_DIR +echo $BUILD_ALLOC rm -rf $INSTALL_DIR mkdir -p $INSTALL_DIR @@ -34,12 +36,12 @@ spack mirror add --unsigned spheral-mirror $PWD/resources/mirror spack mirror add --unsigned spheral-cache $PWD/resources spack buildcache update-index $PWD/resources/mirror -spack install --fresh --deprecated --no-check-signature --only dependencies $SPACK_PKG_NAME@develop%$SPEC +$BUILD_ALLOC spack install --fresh --deprecated --no-check-signature --only dependencies $SPACK_PKG_NAME@develop%$SPEC -./$SCRIPT_DIR/devtools/tpl-manager.py --spack-url $SPACK_URL --no-upstream --spheral-spack-dir $INSTALL_DIR/spheral-spack-tpls --spec $SPEC +$BUILD_ALLOC ./$SCRIPT_DIR/devtools/tpl-manager.py --spack-url $SPACK_URL --no-upstream --spheral-spack-dir $INSTALL_DIR/spheral-spack-tpls --spec $SPEC HOST_CONFIG_FILE=$(ls -t | grep -E "*\.cmake" | head -1) -./$SCRIPT_DIR/devtools/host-config-build.py --host-config $HOST_CONFIG_FILE -i $INSTALL_DIR --build --no-clean +$BUILD_ALLOC ./$SCRIPT_DIR/devtools/host-config-build.py --host-config $HOST_CONFIG_FILE -i $INSTALL_DIR --build --no-clean diff --git a/src/SimulationControl/SpheralTestUtilities.py b/src/SimulationControl/SpheralTestUtilities.py index eecd80767..c0562ec8a 100644 --- a/src/SimulationControl/SpheralTestUtilities.py +++ b/src/SimulationControl/SpheralTestUtilities.py @@ -1,6 +1,5 @@ # SpheralTestUtilities -- small helper functions used in Spheral unit tests. -import mpi import sys from math import * from collections import Iterable @@ -133,6 +132,7 @@ def checkNeighbors(neighborList, answer): # Print statistic about the H tensors for a set of NodeLists. #------------------------------------------------------------------------------- def hstats(nodeSet): + import mpi for nodes in nodeSet: hmin, hmax, havg = 1e50, -1e50, 0.0 hratmin, hratmax, hratavg = 1e50, -1e50, 0.0 diff --git a/tests/performance.py b/tests/performance.py index 9f60704c3..66858a5d4 100644 --- a/tests/performance.py +++ b/tests/performance.py @@ -23,6 +23,9 @@ # Current install configuration from Spack spheral_install_config = SpheralConfigs.config() +def add_timer_cmds(cali_name, test_name): + return f"--caliperFilename {cali_name} --adiakData 'test_name: {test_name}, install_config: {spheral_install_config}'" + # Consolidate Caliper files after run def gather_files(manager): filtered = [test for test in manager.testlist if test.status is PASSED] @@ -34,120 +37,109 @@ def gather_files(manager): outdir = os.path.join(output_loc, spheral_install_config, test_name) if (not os.path.exists(outdir)): log(f"Creating {outdir}") - os.mkdir(outdir) + os.makedirs(outdir) outfile = os.path.join(outdir, cali_filename) log(f"Copying {cali_filename} to {outdir}") shutil.copy(cfile, outfile) - -if (output_loc): - onExit(gather_files) -glue(keep=True) - -def add_timer_cmds(cali_name, test_name): - return f"--caliperFilename {cali_name} --adiakData 'test_name: {test_name}, install_config: {spheral_install_config}'" - -if ("power" in spheral_sys_arch): - num_nodes = 1 - num_cores = 40 -elif ("broadwell" in spheral_sys_arch): - num_nodes = 2 - num_cores = 36 - -# Select which timing regions to compare (for CI) -regions = ["CheapRK2", - "CheapRK2PreInit", - "ConnectivityMap_computeConnectivity", - "ConnectivityMap_patch", - "CheapRK2EvalDerivs", - "CheapRK2EndStep"] -# Select which timers to compare (for CI) -timers = ["sum#inclusive#sum#time.duration"] # Means the sum of the time from all ranks - -# 3D convection test -test_dir = os.path.join(SpheralConfigs.test_install_path(), "unit/Boundary") - -group(name="3D Convection test") -test_file = "testPeriodicBoundary-3d.py" -test_path = os.path.join(test_dir, test_file) -test_name = "3DCONV" - -# Test with varying number of ranks -ranks = [1, 2, 4] -# We want 20 points per unit length -ref_len = 1. -sph_point_rho = 20. / ref_len -sph_per_core = 300 -for i, n in enumerate(ranks): - caliper_filename = f"{test_name}_{i}_{int(time.time())}.cali" - timer_cmds = add_timer_cmds(caliper_filename, test_name) - ncores = int(num_nodes*num_cores/n) - total_sph_nodes = sph_per_core * ncores - npd = int(np.cbrt(total_sph_nodes)) - new_len = npd * ref_len / sph_point_rho - inps = f"--nx {npd} --ny {npd} --nz {npd} --x1 {new_len} --y1 {new_len} --z1 {new_len} --steps 100 {timer_cmds}" - t = test(script=test_path, clas=inps, - label=test_name, - np=ncores, - caliper_filename=caliper_filename, - regions=regions, - timers=timers, - install_config=spheral_install_config) - -endgroup() - -# NOH tests -test_dir = os.path.join(SpheralConfigs.test_install_path(), "functional/Hydro/Noh") - -# General input for all Noh tests -gen_noh_inps = "--crksph False --cfl 0.25 --Cl 1.0 --Cq 1.0 --xfilter 0.0 "+\ - "--nPerh 2.01 --graphics False --clearDirectories False --doCompare False "+\ - "--dataDir None --vizTime None --vizCycle None" - -group(name="NOH 2D tests") -test_file = "Noh-cylindrical-2d.py" -nRadial = 100 -test_path = os.path.join(test_dir, test_file) -test_name = "NC2D" - -# Test with varying number of ranks -ranks = [1, 2, 4] -for i, n in enumerate(ranks): - caliper_filename = f"{test_name}_{i}_{int(time.time())}.cali" - timer_cmds = add_timer_cmds(caliper_filename, test_name) - inps = f"{gen_noh_inps} --nRadial {nRadial} --steps 10 {timer_cmds}" - ncores = int(num_nodes*num_cores/n) - t = test(script=test_path, clas=inps, - label=test_name, - np=ncores, - caliper_filename=caliper_filename, - regions=regions, - timers=timers, - install_config=spheral_install_config) - -endgroup() - -group(name="NOH 3D tests") -test_file = "Noh-spherical-3d.py" -test_path = os.path.join(test_dir, test_file) -test_name = "NS3D" - -# Test with varying number of SPH nodes per rank -npcore = [100, 200, 300] -for i, n in enumerate(npcore): - caliper_filename = f"{test_name}_{i}_{int(time.time())}.cali" - ncores = int(num_nodes*num_cores) - total_sph_nodes = n*ncores - npd = int(np.cbrt(total_sph_nodes)) - node_inps = f"--nx {npd} --ny {npd} --nz {npd}" +# Setup Spheral performance tests +def spheral_setup_test(test_path, test_name, test_num, inps, ncores, threads=1): + 'General method for creating an individual performance test' + global regions, timers, spheral_install_config + caliper_filename = f"{test_name}_{test_num}_{int(time.time())}.cali" timer_cmds = add_timer_cmds(caliper_filename, test_name) - inps = f"{gen_noh_inps} {node_inps} --steps 10 {timer_cmds}" - # WIP: Path to benchmark timing data - t = test(script=test_path, clas=inps, + finps = f"{inps} {timer_cmds}" + t = test(script=test_path, clas=finps, label=test_name, np=ncores, + nt=threads, caliper_filename=caliper_filename, regions=regions, timers=timers, install_config=spheral_install_config) -# Add a wait to ensure all timer files are done -wait() + return t + +def main(): + if (output_loc): + onExit(gather_files) + glue(keep=True) + if ("power" in spheral_sys_arch): + num_nodes = 1 + num_cores = 40 + elif ("broadwell" in spheral_sys_arch): + num_nodes = 2 + num_cores = 36 + # Select which timing regions to compare (for CI) + regions = ["CheapRK2", + "CheapRK2PreInit", + "ConnectivityMap_computeConnectivity", + "ConnectivityMap_patch", + "CheapRK2EvalDerivs", + "CheapRK2EndStep"] + # Select which timers to compare (for CI) + timers = ["sum#inclusive#sum#time.duration"] # Means the sum of the time from all ranks + + # 3D convection test + test_dir = os.path.join(SpheralConfigs.test_install_path(), "unit/Boundary") + + group(name="3D Convection test") + test_file = "testPeriodicBoundary-3d.py" + test_path = os.path.join(test_dir, test_file) + test_name = "3DCONV" + + # Test with varying number of ranks + ranks = [1, 2, 4] + # We want 20 points per unit length + ref_len = 1. + sph_point_rho = 20. / ref_len + sph_per_core = 300 + for i, n in enumerate(ranks): + ncores = int(num_nodes*num_cores/n) + total_sph_nodes = sph_per_core * ncores + npd = int(np.cbrt(total_sph_nodes)) + new_len = npd * ref_len / sph_point_rho + inps = f"--nx {npd} --ny {npd} --nz {npd} --x1 {new_len} --y1 {new_len} --z1 {new_len} --steps 100" + t = spheral_setup_test(test_path, test_name, i, inps, ncores) + endgroup() + + # NOH tests + test_dir = os.path.join(SpheralConfigs.test_install_path(), "functional/Hydro/Noh") + + # General input for all Noh tests + gen_noh_inps = "--crksph False --cfl 0.25 --Cl 1.0 --Cq 1.0 --xfilter 0.0 "+\ + "--nPerh 2.01 --graphics False --clearDirectories False --doCompare False "+\ + "--dataDir None --vizTime None --vizCycle None" + + group(name="NOH 2D tests") + test_file = "Noh-cylindrical-2d.py" + nRadial = 100 + test_path = os.path.join(test_dir, test_file) + test_name = "NC2D" + + # Test with varying number of ranks + ranks = [1, 2, 4] + for i, n in enumerate(ranks): + inps = f"{gen_noh_inps} --nRadial {nRadial} --steps 10" + ncores = int(num_nodes*num_cores/n) + t = spheral_setup_test(test_path, test_name, i, inps, ncores) + + endgroup() + + group(name="NOH 3D tests") + test_file = "Noh-spherical-3d.py" + test_path = os.path.join(test_dir, test_file) + test_name = "NS3D" + + # Test with varying number of SPH nodes per rank + npcore = [100, 200, 300] + for i, n in enumerate(npcore): + ncores = int(num_nodes*num_cores) + total_sph_nodes = n*ncores + npd = int(np.cbrt(total_sph_nodes)) + node_inps = f"--nx {npd} --ny {npd} --nz {npd}" + inps = f"{gen_noh_inps} {node_inps} --steps 10" + t = spheral_setup_test(test_path, test_name, i, inps, ncores) + # Add a wait to ensure all timer files are done + wait() + +if __name__=="__main__": + main()