-
Notifications
You must be signed in to change notification settings - Fork 18
/
requirements.txt
70 lines (62 loc) · 2.36 KB
/
requirements.txt
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
accelerate
# auto_gptq and autoawq lack pre-compiled support for python 3.12
auto_gptq; python_version != "3.12"
autoawq; python_version != "3.12"
autoawq_kernels; python_version != "3.12"
bitsandbytes==0.44.1
fastapi
# See: https://github.com/bdashore3/flash-attention/releases for other windows flash_attn releases
# And: https://github.com/Dao-AILab/flash-attention/releases for linux.
https://github.com/Dao-AILab/flash-attention/releases/download/v2.6.3/flash_attn-2.6.3+cu123torch2.4cxx11abiFALSE-cp312-cp312-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.12"
https://github.com/Dao-AILab/flash-attention/releases/download/v2.6.3/flash_attn-2.6.3+cu123torch2.4cxx11abiFALSE-cp311-cp311-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"
https://github.com/Dao-AILab/flash-attention/releases/download/v2.6.3/flash_attn-2.6.3+cu123torch2.4cxx11abiFALSE-cp310-cp310-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.10"
https://github.com/bdashore3/flash-attention/releases/download/v2.6.3/flash_attn-2.6.3+cu123torch2.4.0cxx11abiFALSE-cp312-cp312-win_amd64.whl; platform_system == "Windows" and python_version == "3.12"
https://github.com/bdashore3/flash-attention/releases/download/v2.6.3/flash_attn-2.6.3+cu123torch2.4.0cxx11abiFALSE-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11"
https://github.com/bdashore3/flash-attention/releases/download/v2.6.3/flash_attn-2.6.3+cu123torch2.4.0cxx11abiFALSE-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10"
flash_attn; python_version != "3.10" and python_version != "3.11" and python_version != "3.12"
hf_transfer
loguru
numpy
openai
peft
protobuf
pydantic
python-datauri
requests
sentencepiece
sse_starlette
torch==2.4.*
uvicorn
wandb
xformers
# moondream
deepspeed
einops
einops-exts
httpx
markdown2[all]
open_clip_torch
shortuuid
timm
tokenizers
torchvision
# qwen
matplotlib
optimum
tiktoken
transformers_stream_generator
qwen-vl-utils
# video
decord
# 360vl
logger
# llava-onevision
git+https://github.com/LLaVA-VL/LLaVA-NeXT.git
# mistral
mistral_inference
mistral_common[opencv]
# got-ocr2
verovio
# Aria. needs to build a bunch and doesn't work without many extra packages
# BYOB, use it if you need it
#grouped_gemm