Skip to content

Commit

Permalink
[ci] use local requirements for test workflow (#2569)
Browse files Browse the repository at this point in the history
* update

* update
  • Loading branch information
zhulinJulia24 authored Oct 12, 2024
1 parent 1d442df commit 9b52f8d
Show file tree
Hide file tree
Showing 4 changed files with 8 additions and 32 deletions.
8 changes: 1 addition & 7 deletions .github/workflows/benchmark.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,19 +23,13 @@ on:
description: 'Whether start a offline mode, if true, you should prepare code and whl package by yourself'
type: boolean
default: false
dependency_pkgs:
required: true
description: 'Dependency packages, you can also set a specific version'
type: string
default: 'packaging transformers_stream_generator transformers datasets matplotlib jmespath'

env:
HOST_PIP_CACHE_DIR: /nvme/github-actions/pip-cache
HOST_LOCALTIME: /usr/share/zoneinfo/Asia/Shanghai
OUTPUT_FOLDER: cuda11.8_dist_${{ github.run_id }}
REPORT_DIR: /nvme/qa_test_models/benchmark-reports/${{ github.run_id }}
ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true
dependency_pkgs: ${{inputs.dependency_pkgs || 'packaging transformers_stream_generator transformers datasets matplotlib jmespath'}}
FAIL_CONFIG: ${{ github.run_attempt != 1 && '--lf --lfnf none' || '--lf'}}

jobs:
Expand Down Expand Up @@ -115,7 +109,7 @@ jobs:
python3 -m pip install -e /root/packages/AutoAWQ_kernels
python3 -m pip install /root/packages/autoawq-0.2.6-cp310-cp310-manylinux2014_x86_64.whl --no-deps
python3 -m pip install /root/packages/xformers-0.0.27+cu118-cp310-cp310-manylinux2014_x86_64.whl --no-deps
python3 -m pip install ${{env.dependency_pkgs}}
python3 -m pip install -r /nvme/qa_test_models/offline_pkg/requirements.txt
- name: Install lmdeploy
if: ${{github.event_name == 'schedule' || !inputs.offline_mode}}
run: |
Expand Down
16 changes: 5 additions & 11 deletions .github/workflows/daily_ete_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,11 +28,6 @@ on:
description: 'Whether start a offline mode, if true, you should prepare code and whl package by yourself'
type: boolean
default: false
dependency_pkgs:
required: true
description: 'Dependency packages, you can also set a specific version'
type: string
default: 'packaging transformers_stream_generator transformers datasets matplotlib openai attrdict timm modelscope jmespath decord auto_gptq qwen_vl_utils mmengine-lite==0.10.5'
regression_func:
required: true
description: 'regression functions'
Expand All @@ -43,7 +38,6 @@ on:

env:
HOST_PIP_CACHE_DIR: /nvme/github-actions/pip-cache
dependency_pkgs: ${{inputs.dependency_pkgs || 'packaging transformers_stream_generator transformers datasets matplotlib openai attrdict timm modelscope jmespath decord auto_gptq qwen_vl_utils mmengine-lite==0.10.5'}}
HOST_LOCALTIME: /usr/share/zoneinfo/Asia/Shanghai
OUTPUT_FOLDER: cuda11.8_dist_${{ github.run_id }}
ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true
Expand Down Expand Up @@ -132,7 +126,7 @@ jobs:
python3 -m pip install -e /root/packages/AutoAWQ_kernels
python3 -m pip install /root/packages/autoawq-0.2.6-cp310-cp310-manylinux2014_x86_64.whl --no-deps
python3 -m pip install /root/packages/xformers-0.0.27+cu118-cp310-cp310-manylinux2014_x86_64.whl --no-deps
python3 -m pip install ${{env.dependency_pkgs}}
python3 -m pip install -r /nvme/qa_test_models/offline_pkg/requirements.txt
- name: Install lmdeploy
if: ${{github.event_name == 'schedule' || !inputs.offline_mode}}
run: |
Expand Down Expand Up @@ -306,7 +300,7 @@ jobs:
# manually install flash attn
# the install packeage from. https://github.com/Dao-AILab/flash-attention/releases
python3 -m pip install /root/packages/flash_attn-2.6.3+cu118torch2.3cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
python3 -m pip install ${{env.dependency_pkgs}}
python3 -m pip install -r /nvme/qa_test_models/offline_pkg/requirements.txt
- name: Install lmdeploy
if: ${{github.event_name == 'schedule' || !inputs.offline_mode}}
run: |
Expand Down Expand Up @@ -410,7 +404,7 @@ jobs:
# manually install flash attn
# the install packeage from. https://github.com/Dao-AILab/flash-attention/releases
python3 -m pip install /root/packages/flash_attn-2.6.3+cu118torch2.3cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
python3 -m pip install ${{env.dependency_pkgs}}
python3 -m pip install -r /nvme/qa_test_models/offline_pkg/requirements.txt
- name: Install lmdeploy
if: ${{github.event_name == 'schedule' || !inputs.offline_mode}}
run: |
Expand Down Expand Up @@ -483,7 +477,7 @@ jobs:
# manually install flash attn
# the install packeage from. https://github.com/Dao-AILab/flash-attention/releases
python3 -m pip install /root/packages/flash_attn-2.6.3+cu118torch2.3cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
python3 -m pip install ${{env.dependency_pkgs}}
python3 -m pip install -r /nvme/qa_test_models/offline_pkg/requirements.txt
- name: Install lmdeploy
if: ${{github.event_name == 'schedule' || !inputs.offline_mode}}
run: |
Expand Down Expand Up @@ -559,7 +553,7 @@ jobs:
# the install packeage from. https://github.com/Dao-AILab/flash-attention/releases
python3 -m pip install /root/packages/flash_attn-2.6.3+cu118torch2.3cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
python3 -m pip install /root/packages/xformers-0.0.27+cu118-cp310-cp310-manylinux2014_x86_64.whl --no-deps
python3 -m pip install ${{env.dependency_pkgs}}
python3 -m pip install -r /nvme/qa_test_models/offline_pkg/requirements.txt
- name: Install lmdeploy
if: ${{github.event_name == 'schedule' || !inputs.offline_mode}}
run: |
Expand Down
8 changes: 1 addition & 7 deletions .github/workflows/evaluate.yml
Original file line number Diff line number Diff line change
Expand Up @@ -43,15 +43,9 @@ on:
description: 'Whether start a offline mode, if true, you should prepare code and whl package by yourself'
type: boolean
default: false
dependency_pkgs:
required: true
description: 'Dependency packages, you can also set a specific version'
type: string
default: 'pynvml packaging protobuf transformers_stream_generator transformers human_eval mmengine-lite==0.10.5'

env:
ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true
dependency_pkgs: ${{inputs.dependency_pkgs || 'pynvml packaging protobuf transformers_stream_generator transformers human_eval mmengine-lite==0.10.5'}}

jobs:
linux-build:
Expand Down Expand Up @@ -136,7 +130,7 @@ jobs:
# the install packeage from. https://github.com/Dao-AILab/flash-attention/releases
python3 -m pip install /root/packages/flash_attn-2.6.3+cu118torch2.3cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
python3 -m pip install /root/packages/xformers-0.0.27+cu118-cp310-cp310-manylinux2014_x86_64.whl --no-deps
python3 -m pip install ${{env.dependency_pkgs}}
python3 -m pip install -r /root/models/offline_pkg/requirements.txt
- name: Install lmdeploy
if: ${{github.event_name == 'schedule' || !inputs.offline_mode}}
run: |
Expand Down
8 changes: 1 addition & 7 deletions .github/workflows/stable.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,6 @@ on:
description: 'Whether start a offline mode, if true, you should prepare code and whl package by yourself'
type: boolean
default: false
dependency_pkgs:
required: true
description: 'Dependency packages, you can also set a specific version'
type: string
default: 'packaging transformers_stream_generator transformers datasets matplotlib jmespath mmengine-lite==0.10.5'
schedule:
- cron: '00 8 * * 1'

Expand All @@ -32,7 +27,6 @@ env:
OUTPUT_FOLDER: cuda11.8_dist_${{ github.run_id }}
REPORT_DIR: /nvme/qa_test_models/stable_reports/${{ github.run_id }}
ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: true
dependency_pkgs: ${{inputs.dependency_pkgs || 'packaging transformers_stream_generator transformers datasets matplotlib jmespath mmengine-lite==0.10.5'}}
COMPASS_DATA_CACHE: /nvme/qa_test_models/dataset

jobs:
Expand Down Expand Up @@ -110,7 +104,7 @@ jobs:
# the install packeage from. https://github.com/Dao-AILab/flash-attention/releases
python3 -m pip install /root/packages/flash_attn-2.6.3+cu118torch2.3cxx11abiFALSE-cp310-cp310-linux_x86_64.whl
python3 -m pip install /root/packages/xformers-0.0.27+cu118-cp310-cp310-manylinux2014_x86_64.whl --no-deps
python3 -m pip install ${{env.dependency_pkgs}}
python3 -m pip install -r /nvme/qa_test_models/offline_pkg/requirements.txt
- name: Install lmdeploy
if: ${{github.event_name == 'schedule' || !inputs.offline_mode}}
run: |
Expand Down

0 comments on commit 9b52f8d

Please sign in to comment.