From 6a8666054d293e980fecc6a14faad09964981e51 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 17 Sep 2024 20:52:22 +0000 Subject: [PATCH] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- transformer_engine/pytorch/attention.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/transformer_engine/pytorch/attention.py b/transformer_engine/pytorch/attention.py index e138f946e8..296b68af8a 100644 --- a/transformer_engine/pytorch/attention.py +++ b/transformer_engine/pytorch/attention.py @@ -5110,10 +5110,10 @@ def convert_to_torch_float8(tensor, dtype): if fp8 and fp8_meta["recipe"].fp8_mha: output = Float8Tensor.make_like( output, - data=output._data.reshape( - batch_size, max_seqlen_q // cp_size, -1 - ).transpose(0,1).contiguous() - ) + data=output._data.reshape(batch_size, max_seqlen_q // cp_size, -1) + .transpose(0, 1) + .contiguous(), + ) else: output = ( output.view(batch_size, max_seqlen_q // cp_size, -1)