diff --git a/examples/BuddyLlama/CMakeLists.txt b/examples/BuddyLlama/CMakeLists.txt index 8613dc1170..2fb9f33625 100644 --- a/examples/BuddyLlama/CMakeLists.txt +++ b/examples/BuddyLlama/CMakeLists.txt @@ -13,6 +13,8 @@ add_custom_command( -eliminate-empty-tensors -empty-tensor-to-alloc-tensor -linalg-bufferize + -matmul-paralell-vectorization-optimize + -batchmatmul-optimize -convert-linalg-to-affine-loops -affine-loop-fusion -affine-parallelize @@ -61,4 +63,14 @@ SET_TARGET_PROPERTIES( add_executable(buddy-llama-run llama-main.cpp) target_link_directories(buddy-llama-run PRIVATE ${LLVM_MLIR_LIBRARY_DIR}) -target_link_libraries(buddy-llama-run LLAMA mlir_c_runner_utils omp) + +set(BUDDY_LLAMA_LIBS + LLAMA + mlir_c_runner_utils + omp +) +if(BUDDY_MLIR_USE_MIMALLOC) + list(APPEND BUDDY_LLAMA_LIBS mimalloc) +endif() + +target_link_libraries(buddy-llama-run ${BUDDY_LLAMA_LIBS}) diff --git a/examples/BuddyLlama/README.md b/examples/BuddyLlama/README.md index 36b499a81e..7fce5a9e67 100644 --- a/examples/BuddyLlama/README.md +++ b/examples/BuddyLlama/README.md @@ -77,3 +77,6 @@ $ cd bin $ ./buddy-llama-run ``` This build will spend a few minutes. We recommend you to use better cpu such as server-level cpu to run buddy-llama-run. + +If you wish to utilize `mimalloc` as a memory allocator, you need to set `BUDDY_MLIR_USE_MIMALLOC` and `MIMALLOC_BUILD_DIR`. +For more details, please see [here](../../thirdparty/README.md#the-mimalloc-allocator).