diff --git a/llama-index-core/llama_index/core/prompts/base.py b/llama-index-core/llama_index/core/prompts/base.py index c11c839b579a86eb45259b1439ffd198eae75b03..326d48dc691036eda99ef58d0373eb41e2456313 100644 --- a/llama-index-core/llama_index/core/prompts/base.py +++ b/llama-index-core/llama_index/core/prompts/base.py @@ -451,7 +451,7 @@ class LangchainPromptTemplate(BasePromptTemplate): def format(self, llm: Optional[BaseLLM] = None, **kwargs: Any) -> str: """Format the prompt into a string.""" - from llama_index.core.llms.langchain import LangChainLLM + from llama_index.llms.langchain import LangChainLLM if llm is not None: # if llamaindex LLM is provided, and we require a langchain LLM, @@ -474,8 +474,8 @@ class LangchainPromptTemplate(BasePromptTemplate): self, llm: Optional[BaseLLM] = None, **kwargs: Any ) -> List[ChatMessage]: """Format the prompt into a list of chat messages.""" - from llama_index.core.llms.langchain import LangChainLLM - from llama_index.core.llms.langchain_utils import from_lc_messages + from llama_index.llms.langchain import LangChainLLM + from llama_index.llms.langchain.utils import from_lc_messages if llm is not None: # if llamaindex LLM is provided, and we require a langchain LLM, @@ -497,7 +497,7 @@ class LangchainPromptTemplate(BasePromptTemplate): return from_lc_messages(lc_messages) def get_template(self, llm: Optional[BaseLLM] = None) -> str: - from llama_index.core.llms.langchain import LangChainLLM + from llama_index.llms.langchain import LangChainLLM if llm is not None: # if llamaindex LLM is provided, and we require a langchain LLM,