Skip to content
Snippets Groups Projects
Unverified Commit b6e33d45 authored by Anoop Sharma's avatar Anoop Sharma Committed by GitHub
Browse files

corrected import (#10704)

parent a86f77c4
No related branches found
No related tags found
No related merge requests found
...@@ -451,7 +451,7 @@ class LangchainPromptTemplate(BasePromptTemplate): ...@@ -451,7 +451,7 @@ class LangchainPromptTemplate(BasePromptTemplate):
def format(self, llm: Optional[BaseLLM] = None, **kwargs: Any) -> str: def format(self, llm: Optional[BaseLLM] = None, **kwargs: Any) -> str:
"""Format the prompt into a string.""" """Format the prompt into a string."""
from llama_index.core.llms.langchain import LangChainLLM from llama_index.llms.langchain import LangChainLLM
if llm is not None: if llm is not None:
# if llamaindex LLM is provided, and we require a langchain LLM, # if llamaindex LLM is provided, and we require a langchain LLM,
...@@ -474,8 +474,8 @@ class LangchainPromptTemplate(BasePromptTemplate): ...@@ -474,8 +474,8 @@ class LangchainPromptTemplate(BasePromptTemplate):
self, llm: Optional[BaseLLM] = None, **kwargs: Any self, llm: Optional[BaseLLM] = None, **kwargs: Any
) -> List[ChatMessage]: ) -> List[ChatMessage]:
"""Format the prompt into a list of chat messages.""" """Format the prompt into a list of chat messages."""
from llama_index.core.llms.langchain import LangChainLLM from llama_index.llms.langchain import LangChainLLM
from llama_index.core.llms.langchain_utils import from_lc_messages from llama_index.llms.langchain.utils import from_lc_messages
if llm is not None: if llm is not None:
# if llamaindex LLM is provided, and we require a langchain LLM, # if llamaindex LLM is provided, and we require a langchain LLM,
...@@ -497,7 +497,7 @@ class LangchainPromptTemplate(BasePromptTemplate): ...@@ -497,7 +497,7 @@ class LangchainPromptTemplate(BasePromptTemplate):
return from_lc_messages(lc_messages) return from_lc_messages(lc_messages)
def get_template(self, llm: Optional[BaseLLM] = None) -> str: def get_template(self, llm: Optional[BaseLLM] = None) -> str:
from llama_index.core.llms.langchain import LangChainLLM from llama_index.llms.langchain import LangChainLLM
if llm is not None: if llm is not None:
# if llamaindex LLM is provided, and we require a langchain LLM, # if llamaindex LLM is provided, and we require a langchain LLM,
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment