forked from iree-org/iree
-
Notifications
You must be signed in to change notification settings - Fork 11
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
[Codegen][Tuner] Allow tuning specs in the LLVMGPU pipeline (iree-org…
…#19359) This adds the `materialize-tuning-specs` pass to the LLVMGPU executable configuration pipelines. Add a test that shows that the tuning spec gets applied and picked up in the ROCDL pipeline. Also, replace the print-based checks in existing tests with op remarks on transform strategy application in `materialize-user-configs`.
- Loading branch information
Showing
13 changed files
with
112 additions
and
8 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
48 changes: 48 additions & 0 deletions
48
compiler/plugins/target/ROCM/test/lowering_strategy_from_tuning_spec.mlir
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,48 @@ | ||
// RUN: iree-opt --split-input-file --iree-gpu-test-target=gfx942 \ | ||
// RUN: --pass-pipeline="builtin.module(hal.executable(hal.executable.variant(iree-hal-configure-target-executable-variants{target=rocm})))" \ | ||
// RUN: --iree-codegen-tuning-spec-path=%p/tuning_spec_mmt_tile_and_fuse.mlir \ | ||
// RUN: --iree-codegen-notify-transform-strategy-application \ | ||
// RUN: --verify-diagnostics %s | FileCheck %s | ||
|
||
// Make sure we can apply the lowering strategy from the specified tuning spec. | ||
|
||
// CHECK: #translation = #iree_codegen.translation_info<pipeline = LLVMGPUTileAndFuse workgroup_size = [128, 1, 1] subgroup_size = 64> | ||
// CHECK: func.func @matmul_transpose_b | ||
// CHECK-SAME: translation_info = #translation | ||
// CHECK: linalg.generic | ||
// CHECK-SAME: __tuning_spec_applied__ | ||
// CHECK-SAME: lowering_config = #iree_gpu.lowering_config< | ||
|
||
#pipeline_layout = #hal.pipeline.layout<bindings = [ | ||
#hal.pipeline.binding<storage_buffer>, | ||
#hal.pipeline.binding<storage_buffer>, | ||
#hal.pipeline.binding<storage_buffer> | ||
]> | ||
hal.executable public @main { | ||
hal.executable.variant public @rocm_hsaco_fb target(<"rocm", "rocm-hsaco-fb">) { | ||
hal.executable.export public @matmul_transpose_b ordinal(0) layout(#pipeline_layout) { | ||
^bb0(%arg0: !hal.device): | ||
%x, %y, %z = flow.dispatch.workgroup_count_from_slice | ||
hal.return %x, %y, %z : index, index, index | ||
} | ||
builtin.module { | ||
// expected-remark@+1 {{Applied transform configuration strategy @iree_linked_tuning_spec::@__kernel_config}} | ||
func.func @matmul_transpose_b() { | ||
%cst = arith.constant 0.000000e+00 : f16 | ||
%c0 = arith.constant 0 : index | ||
%0 = hal.interface.binding.subspan layout(#pipeline_layout) binding(0) alignment(64) offset(%c0) flags(ReadOnly) : !flow.dispatch.tensor<readonly:tensor<2048x1280xf16>> | ||
%1 = hal.interface.binding.subspan layout(#pipeline_layout) binding(1) alignment(64) offset(%c0) flags(ReadOnly) : !flow.dispatch.tensor<readonly:tensor<10240x1280xf16>> | ||
%2 = hal.interface.binding.subspan layout(#pipeline_layout) binding(2) alignment(64) offset(%c0) : !flow.dispatch.tensor<writeonly:tensor<2048x10240xf32>> | ||
%3 = flow.dispatch.tensor.load %0, offsets = [0, 0], sizes = [2048, 1280], strides = [1, 1] : !flow.dispatch.tensor<readonly:tensor<2048x1280xf16>> -> tensor<2048x1280xf16> | ||
%4 = flow.dispatch.tensor.load %1, offsets = [0, 0], sizes = [10240, 1280], strides = [1, 1] : !flow.dispatch.tensor<readonly:tensor<10240x1280xf16>> -> tensor<10240x1280xf16> | ||
%5 = tensor.empty() : tensor<2048x10240xf32> | ||
%6 = linalg.fill ins(%cst : f16) outs(%5 : tensor<2048x10240xf32>) -> tensor<2048x10240xf32> | ||
%7 = linalg.matmul_transpose_b | ||
ins(%3, %4 : tensor<2048x1280xf16>, tensor<10240x1280xf16>) | ||
outs(%6 : tensor<2048x10240xf32>) -> tensor<2048x10240xf32> | ||
flow.dispatch.tensor.store %7, %2, offsets = [0, 0], sizes = [2048, 10240], strides = [1, 1] : tensor<2048x10240xf32> -> !flow.dispatch.tensor<writeonly:tensor<2048x10240xf32>> | ||
return | ||
} | ||
} | ||
} | ||
} |
24 changes: 24 additions & 0 deletions
24
compiler/plugins/target/ROCM/test/tuning_spec_mmt_tile_and_fuse.mlir
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,24 @@ | ||
// RUN: iree-opt %s | ||
|
||
module @mmt_tile_and_fuse_spec attributes { transform.with_named_sequence } { | ||
transform.named_sequence @main(%arg0: !transform.any_op {transform.readonly}) -> () | ||
attributes { iree_codegen.tuning_spec_entrypoint } { | ||
%mmt = transform.structured.match ops{["linalg.generic"]} in %arg0 : (!transform.any_op) -> !transform.any_op | ||
// transform.print %mmt {name="MMT"} : !transform.any_op | ||
%config = transform.param.constant #iree_codegen.compilation_info< | ||
lowering_config = #iree_gpu.lowering_config<{workgroup = [64, 64, 0], | ||
reduction = [0, 0, 4], | ||
thread = [8, 4], | ||
promote_operands = [0, 1]}>, | ||
translation_info = #iree_codegen.translation_info<pipeline = LLVMGPUTileAndFuse | ||
workgroup_size = [128, 1, 1] subgroup_size = 64> | ||
> -> !transform.any_param | ||
transform.annotate %mmt "compilation_info" = %config : !transform.any_op, !transform.any_param | ||
// Add a dummy unit attribute to be sure that the tuning spec applied. | ||
// Otherwise it would be difficult to tell if the lowering config attribute | ||
// comes from our tuning spec or if the compiler heuristic happened to produce | ||
// the same config as this script. | ||
transform.annotate %mmt "__tuning_spec_applied__" : !transform.any_op | ||
transform.yield | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters