Skip to content

Commit

Permalink
[CI] Verbose flag check (#725)
Browse files Browse the repository at this point in the history
It'd actually be nice to have a line printed for each test that runs.
Next step: set up a few levels of verbosity

0: nothing
1: a line for each test run
2: default, sensible
3: debug 

Need to understand what 'count' in 
```
 parser.add_argument("-v", "--verbose", action="count", default=0) 
```
  • Loading branch information
newling authored Sep 6, 2024
1 parent b6a3ba8 commit 50034c7
Show file tree
Hide file tree
Showing 2 changed files with 60 additions and 18 deletions.
17 changes: 11 additions & 6 deletions .github/workflows/ci-linux.yml
Original file line number Diff line number Diff line change
Expand Up @@ -175,15 +175,20 @@ jobs:
$PWD/llvm-aie \
--xrt-dir /opt/xilinx/xrt \
--test-set='Smoke' \
--do-not-run-aie
--do-not-run-aie > smoke_output.log 2>&1
# Assert that output.log is empty (because verbose=0)
if [ -s output.log ]; then
echo "output.log is not empty:"
cat output.log
# Assert that smoke_output.log is empty (because verbose=0)
# First assert that smoke_output.log exists:
if [ ! -f smoke_output.log ]; then
echo "smoke_output.log does not exist."
exit 1
fi
if [ -s smoke_output.log ]; then
echo "smoke_output.log is not empty:"
cat smoke_output.log
exit 1
else
echo "output.log is empty"
echo "smoke_output.log is empty"
fi
- name : E2E comparison of AIE to llvm-cpu
Expand Down
61 changes: 49 additions & 12 deletions build_tools/ci/cpu_comparison/run_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -522,7 +522,8 @@ def aie_vs_llvm_cpu(
return

name = name_from_mlir_filename(test_file)
print(f"Running {name} test")
if config.verbose:
print(f"Running {name} test")

input_args = generate_inputs(test_file, config.output_dir, seed)

Expand Down Expand Up @@ -627,7 +628,14 @@ def run(self, config):
test_name = output_dir / "test_from_template_full_bias.mlir"
template_name = matmul_template_dir / "matmul_bias_MxK_KxN_MxN.mlir"
generate_matmul_test(test_name, template_name, 128, 128, 256, "i32", "i32")
aie_vs_llvm_cpu(config, test_name, tile_pipeline="pack-peel", lower_to_aie_pipeline="air", rtol=0, atol=0)
aie_vs_llvm_cpu(
config,
test_name,
tile_pipeline="pack-peel",
lower_to_aie_pipeline="air",
rtol=0,
atol=0,
)

if config.xdna_datetime and config.xdna_datetime < 20240801:
for name in [
Expand All @@ -652,30 +660,46 @@ def run(self, config):
# Test(s) of the form matmul(A,B) + C where A:MxK, B:KxN, C:N
test_name = output_dir / "test_from_template_bias_N.mlir"
template_name = matmul_template_dir / "matmul_bias_MxK_KxN_N.mlir"
generate_matmul_test(
test_name, template_name, 1024, 1024, 512, "bf16", "f32"
)
generate_matmul_test(test_name, template_name, 1024, 1024, 512, "bf16", "f32")
if config.vitis_dir:
aie_vs_llvm_cpu(
config, test_name, tile_pipeline="pack-peel", lower_to_aie_pipeline="air", use_ukernel=True
config,
test_name,
tile_pipeline="pack-peel",
lower_to_aie_pipeline="air",
use_ukernel=True,
)
aie_vs_llvm_cpu(
config, test_name, tile_pipeline="pack-peel", lower_to_aie_pipeline="air", use_ukernel=False
config,
test_name,
tile_pipeline="pack-peel",
lower_to_aie_pipeline="air",
use_ukernel=False,
)

# Test(s) of the form batch_matmul(A,B) where A:BxMxK, B:BxKxN
template_name = matmul_template_dir / "batch_matmul_BxMxK_BxKxN.mlir"
for (lhs_type, acc_type) in zip(["i32", "bf16"], ["i32", "f32"]):
test_name = output_dir / f"test_from_template_bmm_1_{lhs_type}_{acc_type}.mlir"
generate_matmul_test(test_name, template_name, 128, 128, 256, lhs_type, acc_type, b=1)
aie_vs_llvm_cpu(config, test_name, tile_pipeline="pack-peel", lower_to_aie_pipeline="objectFifo")
for lhs_type, acc_type in zip(["i32", "bf16"], ["i32", "f32"]):
test_name = (
output_dir / f"test_from_template_bmm_1_{lhs_type}_{acc_type}.mlir"
)
generate_matmul_test(
test_name, template_name, 128, 128, 256, lhs_type, acc_type, b=1
)
aie_vs_llvm_cpu(
config,
test_name,
tile_pipeline="pack-peel",
lower_to_aie_pipeline="objectFifo",
)

# TODO (vivian): The below tests are batch matmul with batch size equals 2, and have different
# numerics compared to CPU results. Comment these out until we have a fix.
# test_name = output_dir / f"test_from_template_bmm_2_{lhs_type}_{acc_type}.mlir"
# generate_matmul_test(test_name, template_name, 64, 64, 64, lhs_type, acc_type, b=2)
# aie_vs_llvm_cpu(config, test_name, tile_pipeline="pack-peel", lower_to_aie_pipeline="objectFifo")


class SmokeSet(TestSet):
def __init__(self):
super().__init__("Smoke")
Expand Down Expand Up @@ -837,7 +861,20 @@ def all_tests(
),
)

parser.add_argument("-v", "--verbose", action="count", default=0)
parser.add_argument(
"-v",
"--verbose",
action="count",
default=0,
help=dedent(
"""
Verbosity level. Currently
0: total silence.
1 (-v) : almost everything.
2 (-vv) : everything.
"""
),
)

parser.add_argument(
"--reset-npu-between-runs",
Expand Down

0 comments on commit 50034c7

Please sign in to comment.