forked from vllm-project/vllm
-
Notifications
You must be signed in to change notification settings - Fork 29
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Creating ROCm whl upon release (#259)
* Creating ROCm whl upon release
- Loading branch information
Showing
2 changed files
with
35 additions
and
49 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,23 +1,23 @@ | ||
#!/bin/bash | ||
set -eux | ||
|
||
python_executable=python$1 | ||
cuda_home=/usr/local/cuda-$2 | ||
python_executable=python3 | ||
|
||
# Update paths | ||
PATH=${cuda_home}/bin:$PATH | ||
LD_LIBRARY_PATH=${cuda_home}/lib64:$LD_LIBRARY_PATH | ||
|
||
# Install requirements | ||
$python_executable -m pip install -r requirements-build.txt -r requirements-cuda.txt | ||
$python_executable -m pip install -r requirements-rocm.txt | ||
|
||
# Limit the number of parallel jobs to avoid OOM | ||
export MAX_JOBS=1 | ||
# Make sure release wheels are built for the following architectures | ||
export TORCH_CUDA_ARCH_LIST="7.0 7.5 8.0 8.6 8.9 9.0+PTX" | ||
export VLLM_FA_CMAKE_GPU_ARCHES="80-real;90-real" | ||
export PYTORCH_ROCM_ARCH="gfx90a;gfx942" | ||
|
||
rm -f $(which sccache) | ||
|
||
bash tools/check_repo.sh | ||
export MAX_JOBS=32 | ||
|
||
# Build | ||
$python_executable setup.py bdist_wheel --dist-dir=dist | ||
cd gradlib | ||
$python_executable setup.py bdist_wheel --dist-dir=dist | ||
cd .. |