diff --git a/nixos/tests/archiver-appliance/test_script.py b/nixos/tests/archiver-appliance/test_script.py index e00dc0f3..5906f133 100644 --- a/nixos/tests/archiver-appliance/test_script.py +++ b/nixos/tests/archiver-appliance/test_script.py @@ -74,62 +74,70 @@ def caput(pv_name: str, value: str): ], ) -with subtest("wait until aiExample is being archived"): - retry(lambda _: check_pv_archived("aiExample")) +with subtest("aiExample"): + with subtest("wait until aiExample is being archived"): + retry(lambda _: check_pv_archived("aiExample")) -with subtest("wait for a few points"): - time.sleep(10) + with subtest("wait for a few points"): + time.sleep(10) -with subtest("json of aiExample are valid"): - data = get_data("aiExample") + with subtest("json of aiExample is valid"): + data = get_data("aiExample") - def alarm(value: float): - if value <= 2 or value >= 8: - return 2 - elif value <= 4 or value >= 6: - return 1 - else: - return 0 + def alarm(value: float): + if value <= 2 or value >= 8: + return 2 + elif value <= 4 or value >= 6: + return 1 + else: + return 0 - previous_val = data[0]["val"] + previous_val = data[0]["val"] - # Validate some of the data - for i in range(1, 6): - value: float = data[i]["val"] + # Validate some of the data + for i in range(1, 6): + value: float = data[i]["val"] - expexted_val: float - if previous_val == 9: - expexted_val = 0 - else: - expexted_val = previous_val + 1 + expexted_val: float + if previous_val == 9: + expexted_val = 0 + else: + expexted_val = previous_val + 1 - assert value == expexted_val, "inconsistent archiving of aiExample" - assert data[i]["severity"] == alarm( - value - ), "incoherent severity of the aiExample alarm" + assert value == expexted_val, "inconsistent archiving of aiExample" + assert data[i]["severity"] == alarm( + value + ), "incoherent severity of the aiExample alarm" - previous_val = value + previous_val = value -with subtest("csv of aiExample is valid"): - csv_content = server.succeed( - "curl -sSf " "'http://localhost:8080/retrieval/data/getData.csv?pv=aiExample'" - ) - - csv_lines = csv_content.split("\n") - - # Validate the first 5 lines - for i in range(5): - cols = csv_lines[i].split(",") + with subtest("csv of aiExample is valid"): + csv_content = server.succeed( + "curl -sSf " + "'http://localhost:8080/retrieval/data/getData.csv?pv=aiExample'" + ) - assert int(cols[0]) == data[i]["secs"], "secs CSV value incoherent with JSON" - assert float(cols[1]) == data[i]["val"], "val CSV value incoherent with JSON" - assert ( - int(cols[2]) == data[i]["severity"] - ), "severity CSV value incoherent with JSON" - assert ( - int(cols[3]) == data[i]["status"] - ), "status CSV value incoherent with JSON" - assert int(cols[4]) == data[i]["nanos"], "nanos CSV value incoherent with JSON" + csv_lines = csv_content.split("\n") + + # Validate the first 5 lines + for i in range(5): + cols = csv_lines[i].split(",") + + assert ( + int(cols[0]) == data[i]["secs"] + ), "secs CSV value incoherent with JSON" + assert ( + float(cols[1]) == data[i]["val"] + ), "val CSV value incoherent with JSON" + assert ( + int(cols[2]) == data[i]["severity"] + ), "severity CSV value incoherent with JSON" + assert ( + int(cols[3]) == data[i]["status"] + ), "status CSV value incoherent with JSON" + assert ( + int(cols[4]) == data[i]["nanos"] + ), "nanos CSV value incoherent with JSON" with subtest("static records"): with subtest("wait until static is being archived"): @@ -191,8 +199,58 @@ def static_deadband_has_more_data(_): retry(static_deadband_has_more_data) assert len(data) == 2, "staticDeadband should have two datapoints" - assert data[1]["val"] == 10, "staticDeadband's additional datapoint should be 10" + assert ( + data[1]["val"] == 10 + ), "staticDeadband's additional datapoint should be 10" + +with subtest("static processed record"): + with subtest("wait until staticDeadband is being archived"): + retry(lambda _: check_pv_archived("staticDeadband")) + + with subtest("json of staticProcessed is valid"): + data = get_data("staticProcessed") + + previous_secs = data[0]["secs"] + delay_sum = 0 + + for i in range(1, 6): + data[i]["val"] == 0, "value of staticProcessed should not change" + delay_sum += data[i]["secs"] - previous_secs + previous_secs = data[i]["secs"] + + mean_delay = delay_sum / 5 + + assert ( + round(mean_delay) == 1 + ), "staticProcessed should be processed every second" + +with subtest("waveform record"): + with subtest("wait until staticDeadband is being archived"): + retry(lambda _: check_pv_archived("staticDeadband")) + + caput("waveform", "1,2,3,4,5") + + with subtest("json of waveform is valid"): + + def waveform_has_data(_): + global data + data = get_data("waveform") + return len(data) > 0 + + retry(waveform_has_data) + assert len(data) == 1, "waveform should have one datapoint" + print(data) + assert data[0]["val"] == ["1,2,3,4,5"], "waveform datapoint is incorrect" + +with subtest("non existing record"): + pv_status = get("/mgmt/bpl/getPVStatus?pv=nonExisting") + assert pv_status != "Being archived", "nonExisting record shouldn't be archived" + + never_connected_pvs = get("/mgmt/bpl/getNeverConnectedPVs") + + assert len(never_connected_pvs) == 1, "only 1 PVs should never have been connected" + assert never_connected_pvs[0]["pvName"] == "nonExisting", "wrong PV never connected" -# TODO: check "nonExisting" # TODO: check archiving with manual sampling period # TODO: check /arch, check consolidation into mts, sts +# TODO: check pause/play