diff --git a/llama-index-core/llama_index/core/llms/utils.py b/llama-index-core/llama_index/core/llms/utils.py index ecb388cd186af..1c87dd38ca3ea 100644 --- a/llama-index-core/llama_index/core/llms/utils.py +++ b/llama-index-core/llama_index/core/llms/utils.py @@ -64,7 +64,7 @@ def resolve_llm( "llm must start with str 'local' or of type LLM or BaseLanguageModel" ) try: - from llama_index.llms.llama.utils import ( + from llama_index.llms.llama_cpp.llama_utils import ( completion_to_prompt, messages_to_prompt, ) # pants: no-infer-dep diff --git a/llama-index-core/llama_index/core/program/utils.py b/llama-index-core/llama_index/core/program/utils.py index fa60e5734b197..a96eeb878b270 100644 --- a/llama-index-core/llama_index/core/program/utils.py +++ b/llama-index-core/llama_index/core/program/utils.py @@ -90,9 +90,15 @@ def get_program_for_llm( **kwargs, ) elif pydantic_program_mode == PydanticProgramMode.LM_FORMAT_ENFORCER: - from llama_index.core.program.lmformatenforcer_program import ( - LMFormatEnforcerPydanticProgram, - ) + try: + from llama_index.program.lmformatenforcer import ( + LMFormatEnforcerPydanticProgram, + ) # pants: no-infer-dep + except ImportError: + raise ImportError( + "This mode requires the `llama-index-program-lmformatenforcer package. Please" + " install it by running `pip install llama-index-program-lmformatenforcer`." + ) return LMFormatEnforcerPydanticProgram.from_defaults( output_cls=output_cls,