From d0027bce32adb9dddca13c5bd9ab33f53f9f078e Mon Sep 17 00:00:00 2001 From: kingbri Date: Wed, 7 Feb 2024 20:44:23 -0500 Subject: [PATCH] Requirements: Update flash attention 2 for Windows Version 2.5.2 Signed-off-by: kingbri --- requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 231254ff..fe11de41 100644 --- a/requirements.txt +++ b/requirements.txt @@ -24,8 +24,8 @@ colorlog # Flash attention v2 # Windows FA2 from https://github.com/bdashore3/flash-attention/releases -https://github.com/bdashore3/flash-attention/releases/download/v2.4.2/flash_attn-2.4.2+cu122torch2.2.0cxx11abiFALSE-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11" -https://github.com/bdashore3/flash-attention/releases/download/v2.4.2/flash_attn-2.4.2+cu122torch2.2.0cxx11abiFALSE-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10" +https://github.com/bdashore3/flash-attention/releases/download/v2.5.2/flash_attn-2.5.2+cu122torch2.2.0cxx11abiFALSE-cp311-cp311-win_amd64.whl; platform_system == "Windows" and python_version == "3.11" +https://github.com/bdashore3/flash-attention/releases/download/v2.4.2/flash_attn-2.5.2+cu122torch2.2.0cxx11abiFALSE-cp310-cp310-win_amd64.whl; platform_system == "Windows" and python_version == "3.10" # Linux FA2 from https://github.com/Dao-AILab/flash-attention/releases https://github.com/Dao-AILab/flash-attention/releases/download/v2.5.2/flash_attn-2.5.2+cu122torch2.2cxx11abiFALSE-cp311-cp311-linux_x86_64.whl; platform_system == "Linux" and platform_machine == "x86_64" and python_version == "3.11"