From dd6ee3cd2944f00a7eb1c92ee3fbe2b8ac4dc48f Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 27 Jun 2024 01:14:16 +0000 Subject: [PATCH] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- .../transformers/kv_cache_compression/models/modeling_llama.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/intel_extension_for_transformers/transformers/kv_cache_compression/models/modeling_llama.py b/intel_extension_for_transformers/transformers/kv_cache_compression/models/modeling_llama.py index 0a2ee026c9c..1c8928ce4d1 100644 --- a/intel_extension_for_transformers/transformers/kv_cache_compression/models/modeling_llama.py +++ b/intel_extension_for_transformers/transformers/kv_cache_compression/models/modeling_llama.py @@ -924,7 +924,7 @@ def _update_causal_mask( inputs_embeds=input_tensor, past_key_values_length=past_seen_tokens, is_training=self.training, - ): + ): return None dtype, device = input_tensor.dtype, input_tensor.device