diff --git a/backends/exllamav2/model.py b/backends/exllamav2/model.py index 6f17570..c7d2069 100644 --- a/backends/exllamav2/model.py +++ b/backends/exllamav2/model.py @@ -389,6 +389,10 @@ async def find_prompt_template(self, prompt_template_name, model_directory): logger.info("Attempting to load a prompt template if present.") find_template_functions = [ + lambda: PromptTemplate.from_model_json( + pathlib.Path(self.config.model_dir) / "chat_template.json", + key="chat_template", + ), lambda: PromptTemplate.from_model_json( pathlib.Path(self.config.model_dir) / "tokenizer_config.json", key="chat_template",