From 74f1d5ab5794ff5a8ce9de77c0b6ffb986fd8029 Mon Sep 17 00:00:00 2001 From: Thomas-AH-Heller <94134707+Thomas-AH-Heller@users.noreply.github.com> Date: Tue, 27 Feb 2024 19:21:06 +0100 Subject: [PATCH] Add initialization of llm parameter to None in ContextChatEngine constructor --- llama-index-core/llama_index/core/chat_engine/context.py | 1 + 1 file changed, 1 insertion(+) diff --git a/llama-index-core/llama_index/core/chat_engine/context.py b/llama-index-core/llama_index/core/chat_engine/context.py index 0c7a24e41f17b..091e045ee5f7c 100644 --- a/llama-index-core/llama_index/core/chat_engine/context.py +++ b/llama-index-core/llama_index/core/chat_engine/context.py @@ -69,6 +69,7 @@ def from_defaults( prefix_messages: Optional[List[ChatMessage]] = None, node_postprocessors: Optional[List[BaseNodePostprocessor]] = None, context_template: Optional[str] = None, + llm: Optional[LLM] = None, **kwargs: Any, ) -> "ContextChatEngine": """Initialize a ContextChatEngine from default parameters."""