diff --git a/keras_nlp/src/models/causal_lm.py b/keras_nlp/src/models/causal_lm.py index 3493532e2c..c24150041e 100644 --- a/keras_nlp/src/models/causal_lm.py +++ b/keras_nlp/src/models/causal_lm.py @@ -352,9 +352,9 @@ def generate( raise ValueError( 'A `preprocessor` must be attached to the model if `stop_token_ids="auto"`. ' "Currently `preprocessor=None`. To call `generate()` with preprocessing " - "detached, either pass `stop_tokens_ids=None` to always generate until " + "detached, either pass `stop_token_ids=None` to always generate until " "`max_length` or pass a tuple of token ids that should terminate generation " - "as `stop_tokens_ids`." + "as `stop_token_ids`." ) elif stop_token_ids == "auto": stop_token_ids = [self.preprocessor.tokenizer.end_token_id] diff --git a/keras_nlp/src/models/phi3/phi3_presets.py b/keras_nlp/src/models/phi3/phi3_presets.py index 48ea0c1994..0f935d3371 100644 --- a/keras_nlp/src/models/phi3/phi3_presets.py +++ b/keras_nlp/src/models/phi3/phi3_presets.py @@ -22,7 +22,7 @@ "model. The model was trained using the Phi-3 datasets. This " "dataset includes both synthetic data and filtered publicly " "available website data, with an emphasis on high-quality and " - "reasoning-dense properties.", + "reasoning-dense properties." ), "params": 3821079552, "official_name": "Phi-3", diff --git a/tools/checkpoint_conversion/convert_llama_checkpoints.py b/tools/checkpoint_conversion/convert_llama_checkpoints.py index 27be78901b..b91dd51d1b 100644 --- a/tools/checkpoint_conversion/convert_llama_checkpoints.py +++ b/tools/checkpoint_conversion/convert_llama_checkpoints.py @@ -257,7 +257,7 @@ def main(_): test_model(keras_nlp_model, keras_nlp_tokenizer, hf_model, hf_tokenizer) print("\n-> Tests passed!") - keras_nlp_model.save_to_preset(keras_nlp_model, preset) + keras_nlp_model.save_to_preset(preset) print("\n-> Saved the model preset in float16") # === Save the tokenizer ===