diff --git a/CIME/SystemTests/system_tests_common.py b/CIME/SystemTests/system_tests_common.py index a9c61b28c24..568f3b3647d 100644 --- a/CIME/SystemTests/system_tests_common.py +++ b/CIME/SystemTests/system_tests_common.py @@ -28,7 +28,7 @@ from CIME.locked_files import LOCKED_DIR, lock_file, is_locked from CIME.baselines.performance import ( get_latest_cpl_logs, - _perf_get_memory, + perf_get_memory_list, perf_compare_memory_baseline, perf_compare_throughput_baseline, perf_write_baseline, @@ -806,7 +806,7 @@ def perf_check_for_memory_leak(case, tolerance): for cpllog in latestcpllogs: try: - memlist = _perf_get_memory(case, cpllog) + memlist = perf_get_memory_list(case, cpllog) except RuntimeError: return False, "insufficient data for memleak test" diff --git a/CIME/baselines/performance.py b/CIME/baselines/performance.py index 98bb170436d..ad7895f3eff 100644 --- a/CIME/baselines/performance.py +++ b/CIME/baselines/performance.py @@ -121,25 +121,25 @@ def perf_write_baseline(case, basegen_dir, throughput=True, memory=True): if throughput: try: - tput = perf_get_throughput(case, config) + tput, mode = perf_get_throughput(case, config) except RuntimeError as e: logger.debug("Could not get throughput: {0!s}".format(e)) else: baseline_file = os.path.join(basegen_dir, "cpl-tput.log") - write_baseline_file(baseline_file, tput) + write_baseline_file(baseline_file, tput, mode) logger.info("Updated throughput baseline to {!s}".format(tput)) if memory: try: - mem = perf_get_memory(case, config) + mem, mode = perf_get_memory(case, config) except RuntimeError as e: logger.info("Could not get memory usage: {0!s}".format(e)) else: baseline_file = os.path.join(basegen_dir, "cpl-mem.log") - write_baseline_file(baseline_file, mem) + write_baseline_file(baseline_file, mem, mode) logger.info("Updated memory usage baseline to {!s}".format(mem)) @@ -184,16 +184,11 @@ def perf_get_throughput(case, config): Model throughput. """ try: - tput = config.perf_get_throughput(case) + tput, mode = config.perf_get_throughput(case) except AttributeError: - tput = _perf_get_throughput(case) + tput, mode = _perf_get_throughput(case) - if tput is None: - raise RuntimeError("Could not get default throughput") from None - - tput = str(tput) - - return tput + return tput, mode def perf_get_memory(case, config): @@ -215,19 +210,14 @@ def perf_get_memory(case, config): Model memory usage. """ try: - mem = config.perf_get_memory(case) + mem, mode = config.perf_get_memory(case) except AttributeError: - mem = _perf_get_memory(case) - - if mem is None: - raise RuntimeError("Could not get default memory usage") from None - - mem = str(mem[-1][1]) + mem, mode = _perf_get_memory(case) - return mem + return mem, mode -def write_baseline_file(baseline_file, value): +def write_baseline_file(baseline_file, value, mode="a"): """ Writes value to `baseline_file`. @@ -237,13 +227,11 @@ def write_baseline_file(baseline_file, value): Path to the baseline file. value : str Value to write. + mode : str + Mode to open file with. """ - commit_hash = get_current_commit(repo=get_src_root()) - - timestamp = get_timestamp(timestamp_format="%Y-%m-%d_%H:%M:%S") - - with open(baseline_file, "a") as fd: - fd.write(f"sha:{commit_hash} date:{timestamp} {value}\n") + with open(baseline_file, mode) as fd: + fd.write(value) def _perf_get_memory(case, cpllog=None): @@ -269,6 +257,17 @@ def _perf_get_memory(case, cpllog=None): RuntimeError If not enough sample were found. """ + memlist = perf_get_memory_list(case, cpllog) + + if memlist is None: + raise RuntimeError("Could not get default memory usage") from None + + value = _format_baseline(memlist[-1][1]) + + return value, "a" + + +def perf_get_memory_list(case, cpllog): if cpllog is None: cpllog = get_latest_cpl_logs(case) else: @@ -316,7 +315,12 @@ def _perf_get_throughput(case): logger.debug("Could not parse throughput from coupler log") - return tput + if tput is None: + raise RuntimeError("Could not get default throughput") from None + + value = _format_baseline(tput) + + return value, "a" def get_latest_cpl_logs(case): @@ -428,11 +432,9 @@ def read_baseline_file(baseline_file): Value stored in baseline file without comments. """ with open(baseline_file) as fd: - lines = [ - x.strip().split(" ")[-1] for x in fd.readlines() if not x.startswith("#") - ] + lines = [x.strip() for x in fd.readlines() if not x.startswith("#") and x != ""] - return lines[-1] + return "\n".join(lines) def _perf_compare_throughput_baseline(case, baseline, tolerance): @@ -457,13 +459,20 @@ def _perf_compare_throughput_baseline(case, baseline, tolerance): comment : str provides explanation from comparison. """ - current = _perf_get_throughput(case) + current, _ = _perf_get_throughput(case) + + try: + current = float(_parse_baseline(current)) + except (ValueError, TypeError): + comment = "Could not compare throughput to baseline, as baseline had no value." + + return None, comment try: # default baseline is stored as single float - baseline = float(baseline) - except ValueError: - comment = "Could not compare throughput to baseline, as basline had no value." + baseline = float(_parse_baseline(baseline)) + except (ValueError, TypeError): + comment = "Could not compare throughput to baseline, as baseline had no value." return None, comment @@ -509,16 +518,21 @@ def _perf_compare_memory_baseline(case, baseline, tolerance): provides explanation from comparison. """ try: - current = _perf_get_memory(case) + current, _ = _perf_get_memory(case) except RuntimeError as e: return None, str(e) - else: - current = current[-1][1] + + try: + current = float(_parse_baseline(current)) + except (ValueError, TypeError): + comment = "Could not compare throughput to baseline, as baseline had no value." + + return None, comment try: # default baseline is stored as single float - baseline = float(baseline) - except ValueError: + baseline = float(_parse_baseline(baseline)) + except (ValueError, TypeError): baseline = 0.0 try: @@ -542,3 +556,55 @@ def _perf_compare_memory_baseline(case, baseline, tolerance): comment = "Error: MEMCOMP: " + info return below_tolerance, comment + + +def _format_baseline(value): + """ + Encodes value with default baseline format. + + Default format: + sha: date: + + Parameters + ---------- + value : str + Baseline value to encode. + + Returns + ------- + value : str + Baseline entry. + """ + commit_hash = get_current_commit(repo=get_src_root()) + + timestamp = get_timestamp(timestamp_format="%Y-%m-%d_%H:%M:%S") + + return f"sha:{commit_hash} date:{timestamp} {value}\n" + + +def _parse_baseline(data): + """ + Parses default baseline format. + + Default format: + sha: date: + + Parameters + ---------- + data : str + Containing contents of baseline file. + + Returns + ------- + value : str + Value of the latest blessed baseline. + """ + lines = data.split("\n") + lines = [x for x in lines if x != ""] + + try: + value = lines[-1].strip().split(" ")[-1] + except IndexError: + value = None + + return value diff --git a/CIME/tests/test_unit_baselines_performance.py b/CIME/tests/test_unit_baselines_performance.py index 02ed39dc688..1564541ba9a 100644 --- a/CIME/tests/test_unit_baselines_performance.py +++ b/CIME/tests/test_unit_baselines_performance.py @@ -28,22 +28,9 @@ def create_mock_case(tempdir, get_latest_cpl_logs=None): class TestUnitBaselinesPerformance(unittest.TestCase): - @mock.patch("CIME.baselines.performance._perf_get_memory") - def test_perf_get_memory_default_no_value(self, _perf_get_memory): - _perf_get_memory.return_value = None - - case = mock.MagicMock() - - config = mock.MagicMock() - - config.perf_get_memory.side_effect = AttributeError - - with self.assertRaises(RuntimeError): - performance.perf_get_memory(case, config) - @mock.patch("CIME.baselines.performance._perf_get_memory") def test_perf_get_memory_default(self, _perf_get_memory): - _perf_get_memory.return_value = [(1, 1000)] + _perf_get_memory.return_value = ("1000", "a") case = mock.MagicMock() @@ -53,35 +40,22 @@ def test_perf_get_memory_default(self, _perf_get_memory): mem = performance.perf_get_memory(case, config) - assert mem == "1000" + assert mem == ("1000", "a") def test_perf_get_memory(self): case = mock.MagicMock() config = mock.MagicMock() - config.perf_get_memory.return_value = "1000" + config.perf_get_memory.return_value = ("1000", "a") mem = performance.perf_get_memory(case, config) - assert mem == "1000" - - @mock.patch("CIME.baselines.performance._perf_get_throughput") - def test_perf_get_throughput_default_no_value(self, _perf_get_throughput): - _perf_get_throughput.return_value = None - - case = mock.MagicMock() - - config = mock.MagicMock() - - config.perf_get_throughput.side_effect = AttributeError - - with self.assertRaises(RuntimeError): - performance.perf_get_throughput(case, config) + assert mem == ("1000", "a") @mock.patch("CIME.baselines.performance._perf_get_throughput") def test_perf_get_throughput_default(self, _perf_get_throughput): - _perf_get_throughput.return_value = 100 + _perf_get_throughput.return_value = ("100", "a") case = mock.MagicMock() @@ -91,18 +65,18 @@ def test_perf_get_throughput_default(self, _perf_get_throughput): tput = performance.perf_get_throughput(case, config) - assert tput == "100" + assert tput == ("100", "a") def test_perf_get_throughput(self): case = mock.MagicMock() config = mock.MagicMock() - config.perf_get_throughput.return_value = "100" + config.perf_get_throughput.return_value = ("100", "a") tput = performance.perf_get_throughput(case, config) - assert tput == "100" + assert tput == ("100", "a") def test_get_cpl_throughput_no_file(self): throughput = performance.get_cpl_throughput("/tmp/cpl.log") @@ -162,7 +136,7 @@ def test_read_baseline_file_multi_line(self): baseline = performance.read_baseline_file("/tmp/cpl-mem.log") mock_file.assert_called_with("/tmp/cpl-mem.log") - assert baseline == "2000.0" + assert baseline == "sha:1df0 date:2023 1000.0\nsha:3b05 date:2023 2000.0" def test_read_baseline_file_content(self): with mock.patch( @@ -171,7 +145,7 @@ def test_read_baseline_file_content(self): baseline = performance.read_baseline_file("/tmp/cpl-mem.log") mock_file.assert_called_with("/tmp/cpl-mem.log") - assert baseline == "1000.0" + assert baseline == "sha:1df0 date:2023 1000.0" def test_write_baseline_file(self): with mock.patch("builtins.open", mock.mock_open()) as mock_file: @@ -187,9 +161,8 @@ def test__perf_get_throughput(self, get_latest_cpl_logs, get_cpl_throughput): with tempfile.TemporaryDirectory() as tempdir: case, _, _, baseline_root = create_mock_case(tempdir, get_latest_cpl_logs) - tput = performance._perf_get_throughput(case) - - assert tput == None + with self.assertRaises(RuntimeError): + performance._perf_get_throughput(case) @mock.patch("CIME.baselines.performance.get_cpl_mem_usage") @mock.patch("CIME.baselines.performance.get_latest_cpl_logs") @@ -199,9 +172,8 @@ def test__perf_get_memory_override(self, get_latest_cpl_logs, get_cpl_mem_usage) with tempfile.TemporaryDirectory() as tempdir: case, _, _, baseline_root = create_mock_case(tempdir, get_latest_cpl_logs) - mem = performance._perf_get_memory(case, "/tmp/override") - - assert mem == None + with self.assertRaises(RuntimeError): + performance._perf_get_memory(case, "/tmp/override") @mock.patch("CIME.baselines.performance.get_cpl_mem_usage") @mock.patch("CIME.baselines.performance.get_latest_cpl_logs") @@ -211,9 +183,8 @@ def test__perf_get_memory(self, get_latest_cpl_logs, get_cpl_mem_usage): with tempfile.TemporaryDirectory() as tempdir: case, _, _, baseline_root = create_mock_case(tempdir, get_latest_cpl_logs) - mem = performance._perf_get_memory(case) - - assert mem == None + with self.assertRaises(RuntimeError): + performance._perf_get_memory(case) @mock.patch("CIME.baselines.performance.write_baseline_file") @mock.patch("CIME.baselines.performance.perf_get_memory") @@ -264,9 +235,9 @@ def test_write_baseline_runtimeerror( def test_perf_write_baseline( self, perf_get_throughput, perf_get_memory, write_baseline_file ): - perf_get_throughput.return_value = "100" + perf_get_throughput.return_value = ("100", "a") - perf_get_memory.return_value = "1000" + perf_get_memory.return_value = ("1000", "a") with tempfile.TemporaryDirectory() as tempdir: case, _, _, baseline_root = create_mock_case(tempdir) @@ -275,8 +246,12 @@ def test_perf_write_baseline( perf_get_throughput.assert_called() perf_get_memory.assert_called() - write_baseline_file.assert_any_call(str(baseline_root / "cpl-tput.log"), "100") - write_baseline_file.assert_any_call(str(baseline_root / "cpl-mem.log"), "1000") + write_baseline_file.assert_any_call( + str(baseline_root / "cpl-tput.log"), "100", "a" + ) + write_baseline_file.assert_any_call( + str(baseline_root / "cpl-mem.log"), "1000", "a" + ) @mock.patch("CIME.baselines.performance._perf_get_throughput") @mock.patch("CIME.baselines.performance.read_baseline_file") @@ -309,7 +284,7 @@ def test_perf_compare_throughput_baseline_no_baseline( ): read_baseline_file.return_value = "" - _perf_get_throughput.return_value = 504 + _perf_get_throughput.return_value = ("504", "a") with tempfile.TemporaryDirectory() as tempdir: case, _, _, baseline_root = create_mock_case(tempdir, get_latest_cpl_logs) @@ -330,7 +305,7 @@ def test_perf_compare_throughput_baseline_no_baseline( assert below_tolerance is None assert ( comment - == "Could not compare throughput to baseline, as basline had no value." + == "Could not compare throughput to baseline, as baseline had no value." ) @mock.patch("CIME.baselines.performance._perf_get_throughput") @@ -341,7 +316,7 @@ def test_perf_compare_throughput_baseline_no_tolerance( ): read_baseline_file.return_value = "500" - _perf_get_throughput.return_value = 504 + _perf_get_throughput.return_value = ("504", "a") with tempfile.TemporaryDirectory() as tempdir: case, _, _, baseline_root = create_mock_case(tempdir, get_latest_cpl_logs) @@ -373,7 +348,7 @@ def test_perf_compare_throughput_baseline_above_threshold( ): read_baseline_file.return_value = "1000" - _perf_get_throughput.return_value = 504 + _perf_get_throughput.return_value = ("504", "a") with tempfile.TemporaryDirectory() as tempdir: case, _, _, baseline_root = create_mock_case(tempdir, get_latest_cpl_logs) @@ -405,7 +380,7 @@ def test_perf_compare_throughput_baseline( ): read_baseline_file.return_value = "500" - _perf_get_throughput.return_value = 504 + _perf_get_throughput.return_value = ("504", "a") with tempfile.TemporaryDirectory() as tempdir: case, _, _, baseline_root = create_mock_case(tempdir, get_latest_cpl_logs) diff --git a/CIME/tests/test_unit_system_tests.py b/CIME/tests/test_unit_system_tests.py index 22825fd5559..1c05bed45be 100644 --- a/CIME/tests/test_unit_system_tests.py +++ b/CIME/tests/test_unit_system_tests.py @@ -70,12 +70,12 @@ def create_mock_case(tempdir, idx=None, cpllog_data=None): class TestUnitSystemTests(unittest.TestCase): @mock.patch("CIME.SystemTests.system_tests_common.load_coupler_customization") @mock.patch("CIME.SystemTests.system_tests_common.append_testlog") - @mock.patch("CIME.SystemTests.system_tests_common._perf_get_memory") + @mock.patch("CIME.SystemTests.system_tests_common.perf_get_memory_list") @mock.patch("CIME.SystemTests.system_tests_common.get_latest_cpl_logs") def test_check_for_memleak_runtime_error( self, get_latest_cpl_logs, - _perf_get_memory, + perf_get_memory_list, append_testlog, load_coupler_customization, ): @@ -83,7 +83,7 @@ def test_check_for_memleak_runtime_error( AttributeError ) - _perf_get_memory.side_effect = RuntimeError + perf_get_memory_list.side_effect = RuntimeError with tempfile.TemporaryDirectory() as tempdir: caseroot = Path(tempdir) / "caseroot" @@ -120,12 +120,12 @@ def test_check_for_memleak_runtime_error( @mock.patch("CIME.SystemTests.system_tests_common.load_coupler_customization") @mock.patch("CIME.SystemTests.system_tests_common.append_testlog") - @mock.patch("CIME.SystemTests.system_tests_common._perf_get_memory") + @mock.patch("CIME.SystemTests.system_tests_common.perf_get_memory_list") @mock.patch("CIME.SystemTests.system_tests_common.get_latest_cpl_logs") def test_check_for_memleak_not_enough_samples( self, get_latest_cpl_logs, - _perf_get_memory, + perf_get_memory_list, append_testlog, load_coupler_customization, ): @@ -133,7 +133,7 @@ def test_check_for_memleak_not_enough_samples( AttributeError ) - _perf_get_memory.return_value = [ + perf_get_memory_list.return_value = [ (1, 1000.0), (2, 0), ] @@ -173,12 +173,12 @@ def test_check_for_memleak_not_enough_samples( @mock.patch("CIME.SystemTests.system_tests_common.load_coupler_customization") @mock.patch("CIME.SystemTests.system_tests_common.append_testlog") - @mock.patch("CIME.SystemTests.system_tests_common._perf_get_memory") + @mock.patch("CIME.SystemTests.system_tests_common.perf_get_memory_list") @mock.patch("CIME.SystemTests.system_tests_common.get_latest_cpl_logs") def test_check_for_memleak_found( self, get_latest_cpl_logs, - _perf_get_memory, + perf_get_memory_list, append_testlog, load_coupler_customization, ): @@ -186,7 +186,7 @@ def test_check_for_memleak_found( AttributeError ) - _perf_get_memory.return_value = [ + perf_get_memory_list.return_value = [ (1, 1000.0), (2, 2000.0), (3, 3000.0), @@ -230,12 +230,12 @@ def test_check_for_memleak_found( @mock.patch("CIME.SystemTests.system_tests_common.load_coupler_customization") @mock.patch("CIME.SystemTests.system_tests_common.append_testlog") - @mock.patch("CIME.SystemTests.system_tests_common._perf_get_memory") + @mock.patch("CIME.SystemTests.system_tests_common.perf_get_memory_list") @mock.patch("CIME.SystemTests.system_tests_common.get_latest_cpl_logs") def test_check_for_memleak( self, get_latest_cpl_logs, - _perf_get_memory, + perf_get_memory_list, append_testlog, load_coupler_customization, ): @@ -243,7 +243,7 @@ def test_check_for_memleak( AttributeError ) - _perf_get_memory.return_value = [ + perf_get_memory_list.return_value = [ (1, 3040.0), (2, 3002.0), (3, 3030.0), diff --git a/doc/source/users_guide/testing.rst b/doc/source/users_guide/testing.rst index 40868d2bbdd..061c62e3152 100644 --- a/doc/source/users_guide/testing.rst +++ b/doc/source/users_guide/testing.rst @@ -441,10 +441,12 @@ The following pseudo code is an example of this customization.:: ------- str Storing throughput value. + str + Open baseline file for writing. """ current = analyze_throughput(...) - return json.dumps(current) + return json.dumps(current), "w" def perf_get_memory(case): """ @@ -457,10 +459,12 @@ The following pseudo code is an example of this customization.:: ------- str Storing memory value. + str + Open baseline file for writing. """ current = analyze_memory(case) - return json.dumps(current) + return json.dumps(current), "w" def perf_compare_throughput_baseline(case, baseline, tolerance): """