From 1759f22c1381fb75f002800445b012a1294a9700 Mon Sep 17 00:00:00 2001 From: Yan Wang Date: Wed, 11 Dec 2024 12:20:22 +0100 Subject: [PATCH 1/2] Uses torch._inductor.compile instead of torch.compile in benchmark script to avoid segmentation by Dynamo (#1521) --- thunder/dynamo/utils.py | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/thunder/dynamo/utils.py b/thunder/dynamo/utils.py index 8376cb2c7d..24d2dc7648 100644 --- a/thunder/dynamo/utils.py +++ b/thunder/dynamo/utils.py @@ -780,10 +780,19 @@ def reproducer( if use_pytest_benchmark: code_str += f"""import pytest +# NOTE: The reproducer function has already been processed by TorchDynamo. +# If we let it go through TorchDynamo again, it could be segmented further. +# To avoid this, we directly use Inductor here. +def torch_inductor(fn, inputs): + from torch._inductor import compile as inductor_compile + from torch.fx import symbolic_trace + + fx_graph = symbolic_trace(fn) + return inductor_compile(fx_graph, inputs) + bench_executors_dict = {{}} bench_executors_dict["thunder"]=partial(thunder.jit, {thunder_options_str}) -bench_executors_dict["torch.compile"]=torch.compile -bench_executors_dict["dynamo_eager"]=partial(torch.compile, backend="eager") +bench_executors_dict["torch_inductor"]=torch_inductor bench_executors_dict["eager"]=None """ if has_cuda_args: @@ -812,7 +821,12 @@ def reproducer( else: func_str = f"""{func_str} mod = DynamoModule() -compiled = mod if executor == None else executor(mod) +if executor == None: + compiled = mod +elif executor == torch_inductor: + compiled = executor(mod, inputs) +else: + compiled = executor(mod) """ if not has_cuda_args: func_str += f"""benchmark(compiled, *inputs)""" From f19a9ebac7060c6e5c75fcf617c081a109820a82 Mon Sep 17 00:00:00 2001 From: Yan Wang Date: Wed, 11 Dec 2024 13:26:01 +0100 Subject: [PATCH 2/2] Add link to issue --- thunder/dynamo/utils.py | 1 + 1 file changed, 1 insertion(+) diff --git a/thunder/dynamo/utils.py b/thunder/dynamo/utils.py index 24d2dc7648..5711b54af8 100644 --- a/thunder/dynamo/utils.py +++ b/thunder/dynamo/utils.py @@ -783,6 +783,7 @@ def reproducer( # NOTE: The reproducer function has already been processed by TorchDynamo. # If we let it go through TorchDynamo again, it could be segmented further. # To avoid this, we directly use Inductor here. +# See issue https://github.com/Lightning-AI/lightning-thunder/issues/1521 def torch_inductor(fn, inputs): from torch._inductor import compile as inductor_compile from torch.fx import symbolic_trace