From 39a4e1d4f0e49b2ec4e7ea7abc3652fbdd09f0ba Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 19 Sep 2024 00:38:39 +0000 Subject: [PATCH] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- transformer_engine/pytorch/attention.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/transformer_engine/pytorch/attention.py b/transformer_engine/pytorch/attention.py index d02a4f4207..876c549c49 100644 --- a/transformer_engine/pytorch/attention.py +++ b/transformer_engine/pytorch/attention.py @@ -120,7 +120,8 @@ if not fa3_logger.hasHandlers(): fa3_logger.addHandler(_stream_handler) fa3_logger.debug( - "To use flash-attn v3, please follow these steps to install the flashattn-hopper package: \n" + "To use flash-attn v3, please follow these steps to install the flashattn-hopper" + " package: \n" """(1) pip install "git+https://github.com/Dao-AILab/flash-attention.git#egg=flashattn-hopper&subdirectory=hopper" \n""" """(2) python_path=`python -c "import site; print(site.getsitepackages()[0])"` \n""" """(3) mkdir -p $python_path/flashattn_hopper \n"""