Skip to content
This repository has been archived by the owner on Oct 25, 2024. It is now read-only.

Fix optimum-intel version for INC v3.0 #1654

Merged
merged 1 commit into from
Jul 5, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ ENV COMPOSE_DOCKER_CLI_BUILD=0
# Install torch and intel-extension-for-pytorch 2.1
RUN python3 -m pip install torch torchvision torchaudio --index-url https://download.pytorch.org/whl/cpu
RUN python3 -m pip install intel-extension-for-pytorch intel-extension-for-transformers optimum
RUN python3 -m pip install git+https://github.com/huggingface/optimum-intel.git@f95dea1ae8966dee4d75d622e7b2468c514ba02d
RUN python3 -m pip install git+https://github.com/huggingface/optimum-intel.git@50d867c13b22c22eda451ddb67bddb8159670f85
RUN python3 -m pip install git+https://github.com/bigcode-project/bigcode-evaluation-harness@0d84db85f9ff971fa23a187a3347b7f59af288dc

# Standard requirements
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,6 @@ transformers >= 4.35.0
tiktoken #code_gen
neural-compressor
intel_extension_for_pytorch==2.3.0
optimum-intel
git+https://github.com/huggingface/optimum-intel.git@50d867c13b22c22eda451ddb67bddb8159670f85
auto-round==0.2
git+https://github.com/bigcode-project/bigcode-evaluation-harness@094c7cc197d13a53c19303865e2056f1c7488ac1
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ sentencepiece != 0.1.92
--extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/
torch==2.1.0a0
transformers
optimum-intel
git+https://github.com/huggingface/optimum-intel.git@50d867c13b22c22eda451ddb67bddb8159670f85
bitsandbytes #baichuan
transformers_stream_generator
tiktoken #qwen
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ sentencepiece != 0.1.92
torch==2.3.0+cpu
transformers==4.38.1
intel_extension_for_pytorch==2.3.0
optimum-intel
git+https://github.com/huggingface/optimum-intel.git@50d867c13b22c22eda451ddb67bddb8159670f85
bitsandbytes #baichuan
transformers_stream_generator
tiktoken #qwen
Expand Down
2 changes: 1 addition & 1 deletion tests/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ datasets==2.16.1
einops
evaluate
gguf
git+https://github.com/huggingface/optimum-intel.git@50d867c13b22c22eda451ddb67bddb8159670f85
git+https://github.com/intel/neural-compressor.git
git+https://github.com/intel/neural-speed.git
intel-extension-for-pytorch==2.3.0
Expand All @@ -16,7 +17,6 @@ mlflow
nlpaug==1.1.9
onnx
onnxruntime
optimum-intel
peft==0.6.2
py-cpuinfo
sacremoses
Expand Down
Loading