forked from vllm-project/vllm
-
Notifications
You must be signed in to change notification settings - Fork 29
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge branch 'develop' into triton_bugfix
- Loading branch information
Showing
10 changed files
with
810 additions
and
575 deletions.
There are no files selected for viewing
This file was deleted.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,33 @@ | ||
#!/bin/bash | ||
|
||
export HIP_VISIBLE_DEVICES=0,1,2,3,4,5,6,7 | ||
|
||
|
||
## ---- Mixtral fp8 tuning example ---- ## | ||
python benchmark_moe.py --model /data/models/mistral-ai-models/Mixtral-8x22B-Instruct-v0.1-FP8/ --tp-size 1 --tune --dtype fp8_w8a8 | ||
python benchmark_moe.py --model /data/models/mistral-ai-models/Mixtral-8x22B-Instruct-v0.1-FP8/ --tp-size 2 --tune --dtype fp8_w8a8 | ||
python benchmark_moe.py --model /data/models/mistral-ai-models/Mixtral-8x22B-Instruct-v0.1-FP8/ --tp-size 4 --tune --dtype fp8_w8a8 | ||
python benchmark_moe.py --model /data/models/mistral-ai-models/Mixtral-8x22B-Instruct-v0.1-FP8/ --tp-size 8 --tune --dtype fp8_w8a8 | ||
|
||
|
||
## ---- Mixtral fp16 tuning example ---- ## | ||
# we don't need --dtype fp16; it has been set as default for rocm in the script. | ||
|
||
python benchmark_moe.py --model /data/models/mistral-ai-models/Mixtral-8x22B-v0.1/ --tp-size 1 --tune | ||
python benchmark_moe.py --model /data/models/mistral-ai-models/Mixtral-8x22B-v0.1/ --tp-size 2 --tune | ||
python benchmark_moe.py --model /data/models/mistral-ai-models/Mixtral-8x22B-v0.1/ --tp-size 4 --tune | ||
python benchmark_moe.py --model /data/models/mistral-ai-models/Mixtral-8x22B-v0.1/ --tp-size 8 --tune | ||
|
||
|
||
|
||
## ---- After the tuning is finished ---- ## | ||
# The tuning script saves the configurations in a json file at the same directory from where you launch the script. | ||
# The name of the json file will look something like this: E=8,N=14336,device_name=AMD_Instinct_MI300X.json | ||
# | ||
# [IMPORTANT] -> Once the tuning is complete, move the tuned config file(s) to the following path: | ||
# vllm/vllm/model_executor/layers/fused_moe/configs/ | ||
|
||
|
||
## ---- Notes ---- ## | ||
# 1. The tuned file is specific for a TP size. This means a tuned file obtained for --tp-size 8 can only be used when running the model under TP=8 setting. | ||
# 2. The script uses Ray for multi-gpu tuning. Export HIP_VISIBLE_DEVICES accordingly to expose the required no. of GPUs and use multiple gpus for tuning. |
164 changes: 164 additions & 0 deletions
164
.../layers/fused_moe/configs/E=8,N=16384,device_name=AMD_Instinct_MI300X,dtype=fp8_w8a8.json
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,164 @@ | ||
{ | ||
"1": { | ||
"BLOCK_SIZE_M": 16, | ||
"BLOCK_SIZE_N": 64, | ||
"BLOCK_SIZE_K": 256, | ||
"GROUP_SIZE_M": 1, | ||
"num_warps": 4, | ||
"num_stages": 0, | ||
"waves_per_eu": 0 | ||
}, | ||
"2": { | ||
"BLOCK_SIZE_M": 16, | ||
"BLOCK_SIZE_N": 16, | ||
"BLOCK_SIZE_K": 256, | ||
"GROUP_SIZE_M": 1, | ||
"num_warps": 4, | ||
"num_stages": 0, | ||
"waves_per_eu": 0 | ||
}, | ||
"4": { | ||
"BLOCK_SIZE_M": 16, | ||
"BLOCK_SIZE_N": 32, | ||
"BLOCK_SIZE_K": 256, | ||
"GROUP_SIZE_M": 1, | ||
"num_warps": 2, | ||
"num_stages": 0, | ||
"waves_per_eu": 0 | ||
}, | ||
"8": { | ||
"BLOCK_SIZE_M": 16, | ||
"BLOCK_SIZE_N": 64, | ||
"BLOCK_SIZE_K": 256, | ||
"GROUP_SIZE_M": 1, | ||
"num_warps": 4, | ||
"num_stages": 0, | ||
"waves_per_eu": 0 | ||
}, | ||
"16": { | ||
"BLOCK_SIZE_M": 16, | ||
"BLOCK_SIZE_N": 64, | ||
"BLOCK_SIZE_K": 256, | ||
"GROUP_SIZE_M": 1, | ||
"num_warps": 4, | ||
"num_stages": 0, | ||
"waves_per_eu": 0 | ||
}, | ||
"24": { | ||
"BLOCK_SIZE_M": 16, | ||
"BLOCK_SIZE_N": 64, | ||
"BLOCK_SIZE_K": 256, | ||
"GROUP_SIZE_M": 1, | ||
"num_warps": 4, | ||
"num_stages": 0, | ||
"waves_per_eu": 0 | ||
}, | ||
"32": { | ||
"BLOCK_SIZE_M": 16, | ||
"BLOCK_SIZE_N": 64, | ||
"BLOCK_SIZE_K": 256, | ||
"GROUP_SIZE_M": 4, | ||
"num_warps": 4, | ||
"num_stages": 0, | ||
"waves_per_eu": 0 | ||
}, | ||
"48": { | ||
"BLOCK_SIZE_M": 16, | ||
"BLOCK_SIZE_N": 64, | ||
"BLOCK_SIZE_K": 256, | ||
"GROUP_SIZE_M": 1, | ||
"num_warps": 4, | ||
"num_stages": 0, | ||
"waves_per_eu": 0 | ||
}, | ||
"64": { | ||
"BLOCK_SIZE_M": 32, | ||
"BLOCK_SIZE_N": 128, | ||
"BLOCK_SIZE_K": 128, | ||
"GROUP_SIZE_M": 4, | ||
"num_warps": 4, | ||
"num_stages": 0, | ||
"waves_per_eu": 0 | ||
}, | ||
"96": { | ||
"BLOCK_SIZE_M": 32, | ||
"BLOCK_SIZE_N": 128, | ||
"BLOCK_SIZE_K": 128, | ||
"GROUP_SIZE_M": 1, | ||
"num_warps": 4, | ||
"num_stages": 0, | ||
"waves_per_eu": 0 | ||
}, | ||
"128": { | ||
"BLOCK_SIZE_M": 64, | ||
"BLOCK_SIZE_N": 128, | ||
"BLOCK_SIZE_K": 128, | ||
"GROUP_SIZE_M": 4, | ||
"num_warps": 8, | ||
"num_stages": 0, | ||
"waves_per_eu": 0 | ||
}, | ||
"256": { | ||
"BLOCK_SIZE_M": 64, | ||
"BLOCK_SIZE_N": 128, | ||
"BLOCK_SIZE_K": 128, | ||
"GROUP_SIZE_M": 1, | ||
"num_warps": 8, | ||
"num_stages": 0, | ||
"waves_per_eu": 0 | ||
}, | ||
"512": { | ||
"BLOCK_SIZE_M": 64, | ||
"BLOCK_SIZE_N": 128, | ||
"BLOCK_SIZE_K": 128, | ||
"GROUP_SIZE_M": 1, | ||
"num_warps": 8, | ||
"num_stages": 0, | ||
"waves_per_eu": 0 | ||
}, | ||
"1024": { | ||
"BLOCK_SIZE_M": 128, | ||
"BLOCK_SIZE_N": 256, | ||
"BLOCK_SIZE_K": 128, | ||
"GROUP_SIZE_M": 1, | ||
"num_warps": 8, | ||
"num_stages": 0, | ||
"waves_per_eu": 0 | ||
}, | ||
"1536": { | ||
"BLOCK_SIZE_M": 128, | ||
"BLOCK_SIZE_N": 256, | ||
"BLOCK_SIZE_K": 128, | ||
"GROUP_SIZE_M": 1, | ||
"num_warps": 8, | ||
"num_stages": 0, | ||
"waves_per_eu": 0 | ||
}, | ||
"2048": { | ||
"BLOCK_SIZE_M": 128, | ||
"BLOCK_SIZE_N": 256, | ||
"BLOCK_SIZE_K": 128, | ||
"GROUP_SIZE_M": 1, | ||
"num_warps": 8, | ||
"num_stages": 0, | ||
"waves_per_eu": 0 | ||
}, | ||
"3072": { | ||
"BLOCK_SIZE_M": 128, | ||
"BLOCK_SIZE_N": 256, | ||
"BLOCK_SIZE_K": 128, | ||
"GROUP_SIZE_M": 1, | ||
"num_warps": 8, | ||
"num_stages": 0, | ||
"waves_per_eu": 0 | ||
}, | ||
"4096": { | ||
"BLOCK_SIZE_M": 256, | ||
"BLOCK_SIZE_N": 256, | ||
"BLOCK_SIZE_K": 64, | ||
"GROUP_SIZE_M": 1, | ||
"num_warps": 8, | ||
"num_stages": 0, | ||
"waves_per_eu": 0 | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.