From 8c0d6a00a4b824815a40ca02163878da62c7bdc7 Mon Sep 17 00:00:00 2001 From: Thomas-AH-Heller <94134707+Thomas-AH-Heller@users.noreply.github.com> Date: Tue, 27 Feb 2024 20:03:31 +0100 Subject: [PATCH] Refactor llm initialization --- llama-index-core/llama_index/core/chat_engine/context.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/llama-index-core/llama_index/core/chat_engine/context.py b/llama-index-core/llama_index/core/chat_engine/context.py index 091e045ee5f7c..a9ba5ba560d8d 100644 --- a/llama-index-core/llama_index/core/chat_engine/context.py +++ b/llama-index-core/llama_index/core/chat_engine/context.py @@ -73,7 +73,7 @@ def from_defaults( **kwargs: Any, ) -> "ContextChatEngine": """Initialize a ContextChatEngine from default parameters.""" - llm = llm_from_settings_or_context(Settings, service_context) + llm = llm or llm_from_settings_or_context(Settings, service_context) chat_history = chat_history or [] memory = memory or ChatMemoryBuffer.from_defaults(