diff --git a/benchmarks/struct_indices/spider/evaluate.py b/benchmarks/struct_indices/spider/evaluate.py
index 3a6630a66229d8c9f6bbbf503ead236a46cfc695..ebed59cb178934d9fcb2ee6202e4f96c178b8146 100644
--- a/benchmarks/struct_indices/spider/evaluate.py
+++ b/benchmarks/struct_indices/spider/evaluate.py
@@ -6,8 +6,8 @@ import logging
 import os
 from typing import Dict, List, Optional
 
-from langchain.chat_models import ChatOpenAI
-from langchain.schema import HumanMessage
+from llama_index.bridge.langchain import ChatOpenAI
+from llama_index.bridge.langchain import HumanMessage
 from llama_index.response.schema import Response
 from spider_utils import create_indexes, load_examples
 from tqdm import tqdm
diff --git a/benchmarks/struct_indices/spider/generate_sql.py b/benchmarks/struct_indices/spider/generate_sql.py
index 29a68af3669f89ca427faaefdad8b6be87f5dd87..cf42f2054cea464e8d909d65d1f7272a239dc1a9 100644
--- a/benchmarks/struct_indices/spider/generate_sql.py
+++ b/benchmarks/struct_indices/spider/generate_sql.py
@@ -5,9 +5,8 @@ import logging
 import os
 import re
 
-from langchain.chat_models import ChatOpenAI
-from langchain.llms import OpenAI
-from langchain.base_language import BaseLanguageModel
+from llama_index.bridge.langchain import ChatOpenAI, OpenAI
+from llama_index.bridge.langchain import BaseLanguageModel
 from sqlalchemy import create_engine, text
 from tqdm import tqdm
 
diff --git a/benchmarks/struct_indices/spider/spider_utils.py b/benchmarks/struct_indices/spider/spider_utils.py
index aa37e10e35a35a29c93c4d2b295bdda10664c24d..e29fc02e6862446bf84c907a6ea53d9796c57684 100644
--- a/benchmarks/struct_indices/spider/spider_utils.py
+++ b/benchmarks/struct_indices/spider/spider_utils.py
@@ -4,8 +4,8 @@ import json
 import os
 from typing import Dict, Tuple, Union
 
-from langchain import OpenAI
-from langchain.chat_models import ChatOpenAI
+from llama_index.bridge.langchain import OpenAI
+from llama_index.bridge.langchain import ChatOpenAI
 from sqlalchemy import create_engine, text
 
 from llama_index import SQLStructStoreIndex, LLMPredictor, SQLDatabase
diff --git a/experimental/cli/configuration.py b/experimental/cli/configuration.py
index b9850d5931723bcdb6f2d93c37c6a950386df6d8..a13c98e6cd67cbf06c30dda76f9361f6abeac15b 100644
--- a/experimental/cli/configuration.py
+++ b/experimental/cli/configuration.py
@@ -2,8 +2,8 @@ import os
 from configparser import ConfigParser, SectionProxy
 from typing import Any, Type
 from llama_index.embeddings.openai import OpenAIEmbedding
-from langchain import OpenAI
-from langchain.base_language import BaseLanguageModel
+from llama_index.bridge.langchain import OpenAI
+from llama_index.bridge.langchain import BaseLanguageModel
 from llama_index.indices.base import BaseIndex
 from llama_index.embeddings.base import BaseEmbedding
 from llama_index import (
diff --git a/llama_index/agent/openai_agent.py b/llama_index/agent/openai_agent.py
index 9d847a123ac39f7cb85e2d8fbfc03aadb8e1d820..555354da3637fdc0362e8fe1c269acc52586acef 100644
--- a/llama_index/agent/openai_agent.py
+++ b/llama_index/agent/openai_agent.py
@@ -2,9 +2,7 @@ import json
 from abc import abstractmethod
 from typing import Callable, List, Optional
 
-from langchain.chat_models import ChatOpenAI
-from langchain.memory import ChatMessageHistory
-from langchain.schema import FunctionMessage
+from llama_index.bridge.langchain import FunctionMessage, ChatMessageHistory, ChatOpenAI
 
 from llama_index.callbacks.base import CallbackManager
 from llama_index.chat_engine.types import BaseChatEngine
diff --git a/llama_index/agent/retriever_openai_agent.py b/llama_index/agent/retriever_openai_agent.py
index 8cd27f6efe877a53502efcd859ca67a4b9f9219a..e03f0e6b8035419cb1fcd1ba5370b20c17929f8c 100644
--- a/llama_index/agent/retriever_openai_agent.py
+++ b/llama_index/agent/retriever_openai_agent.py
@@ -4,9 +4,8 @@ from llama_index.agent.openai_agent import BaseOpenAIAgent
 from llama_index.objects.base import ObjectRetriever
 from llama_index.tools.types import BaseTool
 from typing import Optional, List
-from langchain.chat_models import ChatOpenAI
+from llama_index.bridge.langchain import ChatOpenAI, ChatMessageHistory
 from llama_index.callbacks.base import CallbackManager
-from langchain.memory import ChatMessageHistory
 from llama_index.agent.openai_agent import (
     SUPPORTED_MODEL_NAMES,
     DEFAULT_MAX_FUNCTION_CALLS,
diff --git a/llama_index/bridge/__init__.py b/llama_index/bridge/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/llama_index/bridge/langchain.py b/llama_index/bridge/langchain.py
new file mode 100644
index 0000000000000000000000000000000000000000..dd1ceed09e9619ffd33601cba0e1dcb0c18d9fa5
--- /dev/null
+++ b/llama_index/bridge/langchain.py
@@ -0,0 +1,106 @@
+import langchain
+
+# LLMs
+from langchain.llms import BaseLLM, FakeListLLM, OpenAI, AI21, Cohere
+from langchain.chat_models.base import BaseChatModel
+from langchain.chat_models import ChatOpenAI
+from langchain.base_language import BaseLanguageModel
+
+# embeddings
+from langchain.embeddings.base import Embeddings
+
+# prompts
+from langchain import PromptTemplate, BasePromptTemplate
+from langchain.chains.prompt_selector import ConditionalPromptSelector, is_chat_model
+from langchain.prompts.chat import (
+    AIMessagePromptTemplate,
+    ChatPromptTemplate,
+    HumanMessagePromptTemplate,
+    BaseMessagePromptTemplate,
+)
+
+# chain
+from langchain import LLMChain
+
+# chat and memory
+from langchain.memory.chat_memory import BaseChatMemory
+from langchain.memory import ConversationBufferMemory, ChatMessageHistory
+
+# agents and tools
+from langchain.agents.agent_toolkits.base import BaseToolkit
+from langchain.agents import AgentType
+from langchain.agents import AgentExecutor, initialize_agent
+from langchain.tools import StructuredTool, Tool, BaseTool
+
+# input & output
+from langchain.text_splitter import TextSplitter
+from langchain.output_parsers import ResponseSchema
+from langchain.output_parsers import PydanticOutputParser
+from langchain.input import print_text, get_color_mapping
+
+# callback
+from langchain.callbacks.base import BaseCallbackHandler, BaseCallbackManager
+
+# schema
+from langchain.schema import AIMessage, FunctionMessage, BaseMessage, HumanMessage
+from langchain.schema import BaseMemory
+from langchain.schema import BaseOutputParser, LLMResult
+from langchain.schema import ChatGeneration
+
+# misc
+from langchain.sql_database import SQLDatabase
+from langchain.cache import GPTCache, BaseCache
+from langchain.docstore.document import Document
+
+__all__ = [
+    "langchain",
+    "BaseLLM",
+    "FakeListLLM",
+    "OpenAI",
+    "AI21",
+    "Cohere",
+    "BaseChatModel",
+    "ChatOpenAI",
+    "BaseLanguageModel",
+    "Embeddings",
+    "PromptTemplate",
+    "BasePromptTemplate",
+    "ConditionalPromptSelector",
+    "is_chat_model",
+    "AIMessagePromptTemplate",
+    "ChatPromptTemplate",
+    "HumanMessagePromptTemplate",
+    "BaseMessagePromptTemplate",
+    "LLMChain",
+    "BaseChatMemory",
+    "ConversationBufferMemory",
+    "ChatMessageHistory",
+    "BaseToolkit",
+    "AgentType",
+    "AgentExecutor",
+    "initialize_agent",
+    "StructuredTool",
+    "Tool",
+    "BaseTool",
+    "TextSplitter",
+    "ResponseSchema",
+    "PydanticOutputParser",
+    "print_text",
+    "get_color_mapping",
+    "BaseCallbackHandler",
+    "BaseCallbackManager",
+    "AIMessage",
+    "FunctionMessage",
+    "BaseMessage",
+    "HumanMessage",
+    "BaseMemory",
+    "BaseOutputParser",
+    "HumanMessage",
+    "BaseMessage",
+    "LLMResult",
+    "ChatGeneration",
+    "SQLDatabase",
+    "GPTCache",
+    "BaseCache",
+    "Document",
+]
diff --git a/llama_index/chat_engine/react.py b/llama_index/chat_engine/react.py
index e69965a8a147585a2e71b49467cb984c9abee789..2f3b839acdeb805040e9353418a2159b8d437f2b 100644
--- a/llama_index/chat_engine/react.py
+++ b/llama_index/chat_engine/react.py
@@ -1,7 +1,6 @@
 from typing import Any, Optional, Sequence
 
-from langchain.memory import ConversationBufferMemory
-from langchain.memory.chat_memory import BaseChatMemory
+from llama_index.bridge.langchain import ConversationBufferMemory, BaseChatMemory
 
 from llama_index.chat_engine.types import BaseChatEngine, ChatHistoryType
 from llama_index.chat_engine.utils import is_chat_model, to_langchain_chat_history
diff --git a/llama_index/chat_engine/simple.py b/llama_index/chat_engine/simple.py
index 531bac8c2933263fe75db40f2e3f6b5e6f69537b..983bc4b3d3819796ed22e454b1a8553f304b5a6b 100644
--- a/llama_index/chat_engine/simple.py
+++ b/llama_index/chat_engine/simple.py
@@ -1,7 +1,6 @@
 from typing import Any, Optional
 
-from langchain.chat_models.base import BaseChatModel
-from langchain.schema import ChatGeneration
+from llama_index.bridge.langchain import BaseChatModel, ChatGeneration
 
 from llama_index.chat_engine.types import BaseChatEngine, ChatHistoryType
 from llama_index.chat_engine.utils import (
diff --git a/llama_index/chat_engine/utils.py b/llama_index/chat_engine/utils.py
index 25bb2899d807af9be852e6c6f6798d287a88a4e8..ba137abdfc46ca05cbf9da9d1448ea937861145b 100644
--- a/llama_index/chat_engine/utils.py
+++ b/llama_index/chat_engine/utils.py
@@ -1,7 +1,6 @@
 from typing import Optional
 
-from langchain.chat_models.base import BaseChatModel
-from langchain.memory import ChatMessageHistory
+from llama_index.bridge.langchain import BaseChatModel, ChatMessageHistory
 
 from llama_index.chat_engine.types import ChatHistoryType
 from llama_index.indices.service_context import ServiceContext
diff --git a/llama_index/embeddings/langchain.py b/llama_index/embeddings/langchain.py
index 0b89aa7dbc9d19bf31b6ea3db7f73e75c51a7f5a..e5b743fc51fae7381d007c8187a807b51f1acff8 100644
--- a/llama_index/embeddings/langchain.py
+++ b/llama_index/embeddings/langchain.py
@@ -3,7 +3,7 @@
 
 from typing import Any, List
 
-from langchain.embeddings.base import Embeddings as LCEmbeddings
+from llama_index.bridge.langchain import Embeddings as LCEmbeddings
 
 from llama_index.embeddings.base import BaseEmbedding
 
diff --git a/llama_index/evaluation/dataset_generation.py b/llama_index/evaluation/dataset_generation.py
index a837cc7a8126eb64840c47a03e208c3ce8c0f736..aa71d2f31692e1a3060a81c661eaeef27ebd86de 100644
--- a/llama_index/evaluation/dataset_generation.py
+++ b/llama_index/evaluation/dataset_generation.py
@@ -4,7 +4,7 @@ from __future__ import annotations
 import re
 from typing import List, Optional
 
-from langchain.chat_models import ChatOpenAI
+from llama_index.bridge.langchain import ChatOpenAI
 
 from llama_index import (
     Document,
diff --git a/llama_index/evaluation/guideline_eval.py b/llama_index/evaluation/guideline_eval.py
index 26a6de704f015e4c9ced82e31256cad3bc590c5f..5eab879df33c9c12e823b57906b67912f88ff5c6 100644
--- a/llama_index/evaluation/guideline_eval.py
+++ b/llama_index/evaluation/guideline_eval.py
@@ -1,7 +1,7 @@
 import logging
 from typing import Optional
 
-from langchain.output_parsers import PydanticOutputParser
+from llama_index.bridge.langchain import PydanticOutputParser
 from pydantic import BaseModel, Field
 
 from llama_index.evaluation.base import BaseEvaluator, Evaluation
diff --git a/llama_index/indices/query/query_transform/base.py b/llama_index/indices/query/query_transform/base.py
index 8551c9b57aa8b2d53b9185780c9ca58f4c446903..9fb4a688369425327e34111ee82cd86dd7682f09 100644
--- a/llama_index/indices/query/query_transform/base.py
+++ b/llama_index/indices/query/query_transform/base.py
@@ -4,7 +4,7 @@ import dataclasses
 from abc import abstractmethod
 from typing import Dict, Optional, cast
 
-from langchain.input import print_text
+from llama_index.bridge.langchain import print_text
 
 from llama_index.indices.query.query_transform.prompts import (
     DEFAULT_DECOMPOSE_QUERY_TRANSFORM_PROMPT,
diff --git a/llama_index/indices/service_context.py b/llama_index/indices/service_context.py
index 6a591e725c1b07f109f32d9b8fdc3a34f8d5689b..ddbb282903e6df1e713cb7739b1bc8f3c92cc6bd 100644
--- a/llama_index/indices/service_context.py
+++ b/llama_index/indices/service_context.py
@@ -3,7 +3,7 @@ import logging
 from dataclasses import dataclass
 from typing import Optional
 
-from langchain.base_language import BaseLanguageModel
+from llama_index.bridge.langchain import BaseLanguageModel
 
 import llama_index
 from llama_index.callbacks.base import CallbackManager
diff --git a/llama_index/indices/struct_store/json_query.py b/llama_index/indices/struct_store/json_query.py
index cf2ebc533eb47704aad7f58d9995463f328ba7bd..af9d588264a21a80dc367481c4689c9b79ddde64 100644
--- a/llama_index/indices/struct_store/json_query.py
+++ b/llama_index/indices/struct_store/json_query.py
@@ -2,7 +2,7 @@ import json
 import logging
 from typing import Any, Callable, Dict, List, Optional, Union
 
-from langchain.input import print_text
+from llama_index.bridge.langchain import print_text
 
 from llama_index.indices.query.base import BaseQueryEngine
 from llama_index.indices.query.schema import QueryBundle
diff --git a/llama_index/indices/tree/select_leaf_retriever.py b/llama_index/indices/tree/select_leaf_retriever.py
index 2230e9e13412bc8addb4963d9f27058cec7ad417..84af7e548ff8f58d65b279aaabd73ce1ae1f0d34 100644
--- a/llama_index/indices/tree/select_leaf_retriever.py
+++ b/llama_index/indices/tree/select_leaf_retriever.py
@@ -3,7 +3,7 @@
 import logging
 from typing import Any, Dict, List, Optional, cast
 
-from langchain.input import print_text
+from llama_index.bridge.langchain import print_text
 
 from llama_index.data_structs.node import Node, NodeWithScore
 from llama_index.indices.base_retriever import BaseRetriever
diff --git a/llama_index/langchain_helpers/agents/agents.py b/llama_index/langchain_helpers/agents/agents.py
index 2bc4b3b5c522eb934dd9618a6fd7143f6ac44e0f..dc077c46b0806dfe2fa89eb0ea38765570c158ee 100644
--- a/llama_index/langchain_helpers/agents/agents.py
+++ b/llama_index/langchain_helpers/agents/agents.py
@@ -2,10 +2,13 @@
 
 from typing import Any, Optional
 
-from langchain.agents import AgentExecutor, initialize_agent
-from langchain.callbacks.base import BaseCallbackManager
-from langchain.llms.base import BaseLLM
-from langchain.agents.agent_types import AgentType
+from llama_index.bridge.langchain import (
+    BaseLLM,
+    AgentType,
+    AgentExecutor,
+    initialize_agent,
+    BaseCallbackManager,
+)
 
 from llama_index.langchain_helpers.agents.toolkits import LlamaToolkit
 
diff --git a/llama_index/langchain_helpers/agents/toolkits.py b/llama_index/langchain_helpers/agents/toolkits.py
index d821f9f53a0e979cdeab99def48b2848c5cc673a..968556b18ffcd721d0b0eb33b4ceb68c85a4e68a 100644
--- a/llama_index/langchain_helpers/agents/toolkits.py
+++ b/llama_index/langchain_helpers/agents/toolkits.py
@@ -2,8 +2,7 @@
 
 from typing import List
 
-from langchain.agents.agent_toolkits.base import BaseToolkit
-from langchain.tools import BaseTool
+from llama_index.bridge.langchain import BaseTool, BaseToolkit
 from pydantic import Field
 
 from llama_index.langchain_helpers.agents.tools import (
diff --git a/llama_index/langchain_helpers/agents/tools.py b/llama_index/langchain_helpers/agents/tools.py
index c4c4f7f57ab6abcce34fe98bfbdc277bee55d6cd..4e4940786416afa682a8851943bfa6b0855d2673 100644
--- a/llama_index/langchain_helpers/agents/tools.py
+++ b/llama_index/langchain_helpers/agents/tools.py
@@ -2,7 +2,7 @@
 
 from typing import Dict
 
-from langchain.tools import BaseTool
+from llama_index.bridge.langchain import BaseTool
 from pydantic import BaseModel, Field
 
 from llama_index.indices.query.base import BaseQueryEngine
diff --git a/llama_index/langchain_helpers/memory_wrapper.py b/llama_index/langchain_helpers/memory_wrapper.py
index 9fcfde0209c056f376a6f581aabaf3fa08d5b88d..04a401caad0eeaca3a409c0b69c0e582c525de38 100644
--- a/llama_index/langchain_helpers/memory_wrapper.py
+++ b/llama_index/langchain_helpers/memory_wrapper.py
@@ -2,10 +2,13 @@
 
 from typing import Any, Dict, List, Optional
 
-from langchain.memory.chat_memory import BaseChatMemory
-from langchain.schema import AIMessage
-from langchain.schema import BaseMemory as Memory
-from langchain.schema import BaseMessage, HumanMessage
+from llama_index.bridge.langchain import (
+    BaseChatMemory,
+    AIMessage,
+    BaseMemory as Memory,
+    BaseMessage,
+    HumanMessage,
+)
 from pydantic import Field
 
 from llama_index.indices.base import BaseIndex
diff --git a/llama_index/langchain_helpers/sql_wrapper.py b/llama_index/langchain_helpers/sql_wrapper.py
index 179753aceba414230ec6cae00edc51b64552dfdc..d9d5d1e6a90965c2d7c4076799c80bdfc085f0c7 100644
--- a/llama_index/langchain_helpers/sql_wrapper.py
+++ b/llama_index/langchain_helpers/sql_wrapper.py
@@ -1,7 +1,7 @@
 """SQL wrapper around SQLDatabase in langchain."""
 from typing import Any, Dict, List, Tuple, Optional
 
-from langchain.sql_database import SQLDatabase as LangchainSQLDatabase
+from llama_index.bridge.langchain import SQLDatabase as LangchainSQLDatabase
 from sqlalchemy import MetaData, create_engine, insert, text
 from sqlalchemy.engine import Engine
 
diff --git a/llama_index/langchain_helpers/streaming.py b/llama_index/langchain_helpers/streaming.py
index 3e59a2fbd8b015e1500d79fd6f7b2c91e93b926f..ba5dda56cd7ee37d37b6bb8951abae399e08c210 100644
--- a/llama_index/langchain_helpers/streaming.py
+++ b/llama_index/langchain_helpers/streaming.py
@@ -2,8 +2,7 @@ from queue import Queue
 from threading import Event
 from typing import Any, Generator, Union
 
-from langchain.callbacks.base import BaseCallbackHandler
-from langchain.schema import LLMResult
+from llama_index.bridge.langchain import BaseCallbackHandler, LLMResult
 
 
 class StreamingGeneratorCallbackHandler(BaseCallbackHandler):
diff --git a/llama_index/langchain_helpers/text_splitter.py b/llama_index/langchain_helpers/text_splitter.py
index aa4131ab4d0d5815ca280b30af28838e76fc1944..f524de4dfcbd5818c8e3daf30dd4d62776bad8ca 100644
--- a/llama_index/langchain_helpers/text_splitter.py
+++ b/llama_index/langchain_helpers/text_splitter.py
@@ -2,8 +2,8 @@
 from dataclasses import dataclass
 from typing import Callable, List, Optional
 
-from langchain.text_splitter import TextSplitter
 from llama_index.constants import DEFAULT_CHUNK_OVERLAP, DEFAULT_CHUNK_SIZE
+from llama_index.bridge.langchain import TextSplitter
 
 from llama_index.callbacks.base import CallbackManager
 from llama_index.callbacks.schema import CBEventType, EventPayload
diff --git a/llama_index/llm_predictor/base.py b/llama_index/llm_predictor/base.py
index c6b0107ab952d622792ae3843afdd244b9fc4939..0a87e6d097d2ad999f2bcb45cd84c96658fbeafa 100644
--- a/llama_index/llm_predictor/base.py
+++ b/llama_index/llm_predictor/base.py
@@ -6,12 +6,10 @@ from dataclasses import dataclass
 from threading import Thread
 from typing import Any, Generator, Optional, Protocol, Tuple, runtime_checkable
 
-import langchain
 import openai
-from langchain import BaseCache, Cohere, LLMChain, OpenAI
-from langchain.base_language import BaseLanguageModel
-from langchain.chat_models import ChatOpenAI
-from langchain.llms import AI21
+from llama_index.bridge.langchain import langchain
+from llama_index.bridge.langchain import BaseCache, Cohere, LLMChain, OpenAI
+from llama_index.bridge.langchain import ChatOpenAI, AI21, BaseLanguageModel
 
 from llama_index.callbacks.base import CallbackManager
 from llama_index.callbacks.schema import CBEventType, EventPayload
diff --git a/llama_index/llm_predictor/chatgpt.py b/llama_index/llm_predictor/chatgpt.py
index 8d91f6d74f1b688410d817a0d12f71aa54df2931..2d3f934798e3f30f6d4993bf6cc1fbb8135db9d7 100644
--- a/llama_index/llm_predictor/chatgpt.py
+++ b/llama_index/llm_predictor/chatgpt.py
@@ -4,16 +4,17 @@ import logging
 from typing import Any, List, Optional, Union
 
 import openai
-from langchain import LLMChain
-from langchain.chat_models import ChatOpenAI
-from langchain.prompts.base import BasePromptTemplate
-from langchain.prompts.chat import (
+from llama_index.bridge.langchain import (
+    LLMChain,
+    ChatOpenAI,
     BaseMessagePromptTemplate,
     ChatPromptTemplate,
     HumanMessagePromptTemplate,
+    BaseLanguageModel,
+    BaseMessage,
+    PromptTemplate,
+    BasePromptTemplate,
 )
-from langchain.prompts.prompt import PromptTemplate
-from langchain.base_language import BaseLanguageModel, BaseMessage
 
 from llama_index.llm_predictor.base import LLMPredictor
 from llama_index.prompts.base import Prompt
diff --git a/llama_index/output_parsers/guardrails.py b/llama_index/output_parsers/guardrails.py
index 94d0f9d29129d475c6a8b6aa37f8221ae7340324..131a43c632a217670861915de7b6d1608cb27f2d 100644
--- a/llama_index/output_parsers/guardrails.py
+++ b/llama_index/output_parsers/guardrails.py
@@ -12,7 +12,7 @@ except ImportError:
 from copy import deepcopy
 from typing import Any, Callable, Optional
 
-from langchain.llms.base import BaseLLM
+from llama_index.bridge.langchain import BaseLLM
 
 from llama_index.output_parsers.base import BaseOutputParser
 
diff --git a/llama_index/output_parsers/langchain.py b/llama_index/output_parsers/langchain.py
index c63414bde9a1fe0280d2a72a35e9f4a858868121..e10762633e8be88bd70a1f5879a55265377c9181 100644
--- a/llama_index/output_parsers/langchain.py
+++ b/llama_index/output_parsers/langchain.py
@@ -3,7 +3,7 @@
 from string import Formatter
 from typing import Any, Optional
 
-from langchain.schema import BaseOutputParser as LCOutputParser
+from llama_index.bridge.langchain import BaseOutputParser as LCOutputParser
 
 from llama_index.output_parsers.base import BaseOutputParser
 
diff --git a/llama_index/playground/base.py b/llama_index/playground/base.py
index 8ead94d5c4649de0047cfe68541c7477a8224b4a..acd317230d24f6170df8859161faa0db78b9cdb2 100644
--- a/llama_index/playground/base.py
+++ b/llama_index/playground/base.py
@@ -5,7 +5,7 @@ import time
 from typing import Any, Dict, List, Optional, Type, Union
 
 import pandas as pd
-from langchain.input import get_color_mapping, print_text
+from llama_index.bridge.langchain import get_color_mapping, print_text
 
 from llama_index.indices.base import BaseIndex
 from llama_index.indices.list.base import ListIndex, ListRetrieverMode
diff --git a/llama_index/program/openai_program.py b/llama_index/program/openai_program.py
index da804e419e52c7aef78f74ac6683f39da3567c35..6ca5a02f80d286a998b2b70b40c594c2fab9c36b 100644
--- a/llama_index/program/openai_program.py
+++ b/llama_index/program/openai_program.py
@@ -1,7 +1,6 @@
 from typing import Any, Dict, Generic, Optional, Type, Union
 
-from langchain.chat_models import ChatOpenAI
-from langchain.schema import HumanMessage
+from llama_index.bridge.langchain import ChatOpenAI, HumanMessage
 
 from llama_index.program.base_program import BasePydanticProgram, Model
 from llama_index.prompts.base import Prompt
diff --git a/llama_index/prompts/base.py b/llama_index/prompts/base.py
index 8dd649e4bc85d4659a80bd06a2c9e1ea7bf16d96..77383a730d896fcd16ac3bf5ca0b1b781e66e93f 100644
--- a/llama_index/prompts/base.py
+++ b/llama_index/prompts/base.py
@@ -2,10 +2,9 @@
 from copy import deepcopy
 from typing import Any, Dict, Optional
 
-from langchain import BasePromptTemplate as BaseLangchainPrompt
-from langchain import PromptTemplate as LangchainPrompt
-from langchain.base_language import BaseLanguageModel
-from langchain.chains.prompt_selector import ConditionalPromptSelector
+from llama_index.bridge.langchain import BasePromptTemplate as BaseLangchainPrompt
+from llama_index.bridge.langchain import PromptTemplate as LangchainPrompt
+from llama_index.bridge.langchain import BaseLanguageModel, ConditionalPromptSelector
 
 from llama_index.output_parsers.base import BaseOutputParser
 from llama_index.prompts.prompt_type import PromptType
diff --git a/llama_index/prompts/chat_prompts.py b/llama_index/prompts/chat_prompts.py
index 8986850679e97dbaf5551ec841289b6a7df4b56e..2d02c83fead094291704fa01d11b14aca2f3b07a 100644
--- a/llama_index/prompts/chat_prompts.py
+++ b/llama_index/prompts/chat_prompts.py
@@ -1,6 +1,6 @@
 """Prompts for ChatGPT."""
 
-from langchain.prompts.chat import (
+from llama_index.bridge.langchain import (
     AIMessagePromptTemplate,
     ChatPromptTemplate,
     HumanMessagePromptTemplate,
diff --git a/llama_index/prompts/default_prompt_selectors.py b/llama_index/prompts/default_prompt_selectors.py
index 9f8d3d1a9050a5bf562c8bcd76bc5cf574e066ba..8aef6c153cb90c8e64fec2997978a3a2d8b74842 100644
--- a/llama_index/prompts/default_prompt_selectors.py
+++ b/llama_index/prompts/default_prompt_selectors.py
@@ -1,5 +1,5 @@
 """Prompt selectors."""
-from langchain.chains.prompt_selector import ConditionalPromptSelector, is_chat_model
+from llama_index.bridge.langchain import ConditionalPromptSelector, is_chat_model
 
 from llama_index.prompts.chat_prompts import (
     CHAT_REFINE_PROMPT,
diff --git a/llama_index/query_engine/flare/base.py b/llama_index/query_engine/flare/base.py
index 27f8054f415b8d05dd90cbf9b61063df998aebf6..6b3858d142209d04fcd8f50a417e8b1b70aff46d 100644
--- a/llama_index/query_engine/flare/base.py
+++ b/llama_index/query_engine/flare/base.py
@@ -4,7 +4,7 @@ Active Retrieval Augmented Generation.
 
 """
 
-from langchain.input import print_text
+from llama_index.bridge.langchain import print_text
 from typing import Optional
 from llama_index.indices.query.base import BaseQueryEngine
 from llama_index.indices.service_context import ServiceContext
diff --git a/llama_index/query_engine/pandas_query_engine.py b/llama_index/query_engine/pandas_query_engine.py
index cc6466d16ba35bfe4902df5cabf276f6f5e6430a..580f1b753877e33d4210f389aece31802d02f872 100644
--- a/llama_index/query_engine/pandas_query_engine.py
+++ b/llama_index/query_engine/pandas_query_engine.py
@@ -4,7 +4,7 @@ import logging
 from typing import Any, Callable, Optional
 
 import pandas as pd
-from langchain.input import print_text
+from llama_index.bridge.langchain import print_text
 
 from llama_index.indices.query.base import BaseQueryEngine
 from llama_index.indices.query.schema import QueryBundle
diff --git a/llama_index/query_engine/sql_join_query_engine.py b/llama_index/query_engine/sql_join_query_engine.py
index 57e513a766f569a89e34fc7c447825471baea2dc..da823c691729cd27da0598e8d3e7f53d1332d7b0 100644
--- a/llama_index/query_engine/sql_join_query_engine.py
+++ b/llama_index/query_engine/sql_join_query_engine.py
@@ -1,6 +1,6 @@
 """SQL Join query engine."""
 
-from langchain.input import print_text
+from llama_index.bridge.langchain import print_text
 from typing import Optional, cast, Dict, Callable
 from llama_index.indices.query.base import BaseQueryEngine
 from llama_index.indices.struct_store.sql_query import NLStructStoreQueryEngine
diff --git a/llama_index/query_engine/sub_question_query_engine.py b/llama_index/query_engine/sub_question_query_engine.py
index 9e94c2c583b3ba87a385975efdd7aa3df74854b2..37adc3a21b09187249c7637914940e3f5eaaa40c 100644
--- a/llama_index/query_engine/sub_question_query_engine.py
+++ b/llama_index/query_engine/sub_question_query_engine.py
@@ -2,7 +2,7 @@ import asyncio
 import logging
 from typing import List, Optional, Sequence, cast
 
-from langchain.input import get_color_mapping, print_text
+from llama_index.bridge.langchain import get_color_mapping, print_text
 
 from llama_index.async_utils import run_async_tasks
 from llama_index.callbacks.base import CallbackManager
diff --git a/llama_index/readers/base.py b/llama_index/readers/base.py
index a7056e69b348339415dc47357e963019c2bc1ef4..68228222855640a14396e382290a1a9e95eafea6 100644
--- a/llama_index/readers/base.py
+++ b/llama_index/readers/base.py
@@ -2,7 +2,7 @@
 from abc import abstractmethod
 from typing import Any, List
 
-from langchain.docstore.document import Document as LCDocument
+from llama_index.bridge.langchain import Document as LCDocument
 
 from llama_index.readers.schema.base import Document
 
diff --git a/llama_index/readers/obsidian.py b/llama_index/readers/obsidian.py
index 3f4cc3c4f9801644e0189606e98fe78400af5eb3..7503335f35800a257b54c6a025bf61693fd55b7b 100644
--- a/llama_index/readers/obsidian.py
+++ b/llama_index/readers/obsidian.py
@@ -9,7 +9,7 @@ import os
 from pathlib import Path
 from typing import Any, List
 
-from langchain.docstore.document import Document as LCDocument
+from llama_index.bridge.langchain import Document as LCDocument
 
 from llama_index.readers.base import BaseReader
 from llama_index.readers.file.markdown_reader import MarkdownReader
diff --git a/llama_index/readers/schema/base.py b/llama_index/readers/schema/base.py
index 64445701817c4cbf244b0b6337e57823d43e82e8..0c8d12c0a47068871294da551bbde0bbefcf9d28 100644
--- a/llama_index/readers/schema/base.py
+++ b/llama_index/readers/schema/base.py
@@ -2,7 +2,7 @@
 from dataclasses import dataclass
 from typing import Optional
 
-from langchain.docstore.document import Document as LCDocument
+from llama_index.bridge.langchain import Document as LCDocument
 
 from llama_index.schema import BaseDocument
 
diff --git a/llama_index/selectors/pydantic_selectors.py b/llama_index/selectors/pydantic_selectors.py
index 34de757b20eac9dfec908b21b114b99a88a1d187..6172f9db8fddd66f79a1e042d1e57e2f643af18a 100644
--- a/llama_index/selectors/pydantic_selectors.py
+++ b/llama_index/selectors/pydantic_selectors.py
@@ -1,4 +1,4 @@
-from langchain.chat_models import ChatOpenAI
+from llama_index.bridge.langchain import ChatOpenAI
 from typing import Any, Optional, Sequence
 
 from llama_index.indices.query.schema import QueryBundle
diff --git a/llama_index/token_counter/mock_chain_wrapper.py b/llama_index/token_counter/mock_chain_wrapper.py
index 82b6ed958ddf38f9efbf320c7c830243465f0053..b7642702a10dedbd19792c399fc4422d7f5d65dc 100644
--- a/llama_index/token_counter/mock_chain_wrapper.py
+++ b/llama_index/token_counter/mock_chain_wrapper.py
@@ -2,7 +2,7 @@
 
 from typing import Any, Dict, Optional
 
-from langchain.llms.base import BaseLLM
+from llama_index.bridge.langchain import BaseLLM
 
 from llama_index.constants import DEFAULT_NUM_OUTPUTS
 from llama_index.langchain_helpers.chain_wrapper import LLMPredictor
diff --git a/llama_index/tools/function_tool.py b/llama_index/tools/function_tool.py
index 0e51f1d802ea3d6b623ccf3c223f4dd13d75d74b..698f53a781af97d7c126b6ebb25d9c99d3431783 100644
--- a/llama_index/tools/function_tool.py
+++ b/llama_index/tools/function_tool.py
@@ -2,7 +2,7 @@ from typing import Any, Optional, Callable, Type
 
 from pydantic import BaseModel
 from llama_index.tools.types import BaseTool, ToolMetadata
-from langchain.tools import Tool, StructuredTool
+from llama_index.bridge.langchain import Tool, StructuredTool
 from inspect import signature
 from llama_index.tools.utils import create_schema_from_function
 
diff --git a/llama_index/tools/query_plan.py b/llama_index/tools/query_plan.py
index ddc9c60bd5136ae284c807179347344dbab9e0c1..92d142050701ec96fe70b93951042c701f8e46d6 100644
--- a/llama_index/tools/query_plan.py
+++ b/llama_index/tools/query_plan.py
@@ -8,7 +8,7 @@ from llama_index.data_structs.node import NodeWithScore, Node
 from typing import Dict, List, Any, Optional
 from pydantic import BaseModel, Field
 from llama_index.indices.query.schema import QueryBundle
-from langchain.input import print_text
+from llama_index.bridge.langchain import print_text
 
 
 DEFAULT_NAME = "query_plan_tool"
diff --git a/llama_index/tools/types.py b/llama_index/tools/types.py
index 0064f1cd013997d66d3377093e1efdad4905416f..db20598aedfbf2fac147fd3833fa6d11b1b4a00d 100644
--- a/llama_index/tools/types.py
+++ b/llama_index/tools/types.py
@@ -2,7 +2,7 @@ from abc import abstractmethod
 from dataclasses import dataclass
 from typing import Any, Dict, Optional, Type
 
-from langchain.tools import StructuredTool, Tool
+from llama_index.bridge.langchain import StructuredTool, Tool
 from pydantic import BaseModel
 
 
diff --git a/setup.py b/setup.py
index 9b9bd1588c0578b88e6f0106e2686aa2f78a8938..4cc64f5516b784ed23c8775094e830835edae719 100644
--- a/setup.py
+++ b/setup.py
@@ -25,6 +25,7 @@ install_requires = [
     "fsspec>=2023.5.0",
     "typing-inspect==0.8.0",
     "typing_extensions==4.5.0",
+    "bs4",  # hotfix for langchain 0.0.212 bug
 ]
 
 # NOTE: if python version >= 3.9, install tiktoken
diff --git a/tests/indices/test_prompt_helper.py b/tests/indices/test_prompt_helper.py
index 92da0662af6bc17c4ef57ed805895e7fdfb83f75..4e6c08bff2b9ca13ca1b4595161f5c23e644644b 100644
--- a/tests/indices/test_prompt_helper.py
+++ b/tests/indices/test_prompt_helper.py
@@ -1,7 +1,7 @@
 """Test PromptHelper."""
 from typing import cast
 
-from langchain import PromptTemplate as LangchainPrompt
+from llama_index.bridge.langchain import PromptTemplate as LangchainPrompt
 
 from llama_index.data_structs.node import Node
 from llama_index.indices.prompt_helper import PromptHelper
diff --git a/tests/llm_predictor/test_base.py b/tests/llm_predictor/test_base.py
index 0f5166364f55b7a8d50e735ace253abd40679456..fa0935d76e3bc7dda4a262fc59b55eb6a5097036 100644
--- a/tests/llm_predictor/test_base.py
+++ b/tests/llm_predictor/test_base.py
@@ -4,7 +4,7 @@ from typing import Any, Tuple
 from unittest.mock import patch
 
 import pytest
-from langchain.llms.fake import FakeListLLM
+from llama_index.bridge.langchain import FakeListLLM
 
 from llama_index.llm_predictor.structured import LLMPredictor, StructuredLLMPredictor
 from llama_index.output_parsers.base import BaseOutputParser
@@ -61,7 +61,7 @@ def test_struct_llm_predictor_with_cache() -> None:
     """Test LLM predictor."""
     from gptcache.processor.pre import get_prompt
     from gptcache.manager.factory import get_data_manager
-    from langchain.cache import GPTCache
+    from llama_index.bridge.langchain import GPTCache
 
     def init_gptcache_map(cache_obj: Cache) -> None:
         cache_path = "test"
diff --git a/tests/output_parsers/test_base.py b/tests/output_parsers/test_base.py
index b07f5aecc274c36d524fbda958bc26c07dd6990b..18e53dd74137aa95994dc6782bde193a2aecf61e 100644
--- a/tests/output_parsers/test_base.py
+++ b/tests/output_parsers/test_base.py
@@ -1,8 +1,10 @@
 """Test Output parsers."""
 
 
-from langchain.output_parsers import ResponseSchema
-from langchain.schema import BaseOutputParser as LCOutputParser
+from llama_index.bridge.langchain import (
+    ResponseSchema,
+    BaseOutputParser as LCOutputParser,
+)
 
 from llama_index.output_parsers.langchain import LangchainOutputParser
 
diff --git a/tests/prompts/test_base.py b/tests/prompts/test_base.py
index 1adcbef31ad770bc28141c96172e0b9b3cc234b2..ec722e62877c0d6af6b850fdf8dfe291f5a6f5d4 100644
--- a/tests/prompts/test_base.py
+++ b/tests/prompts/test_base.py
@@ -3,10 +3,12 @@
 from unittest.mock import MagicMock
 
 import pytest
-from langchain import PromptTemplate
-from langchain.chains.prompt_selector import ConditionalPromptSelector
-from langchain.chat_models.base import BaseChatModel
-from langchain.chat_models.openai import ChatOpenAI
+from llama_index.bridge.langchain import (
+    PromptTemplate,
+    ConditionalPromptSelector,
+    BaseChatModel,
+    ChatOpenAI,
+)
 
 from llama_index.prompts.base import Prompt
 
diff --git a/tests/token_predictor/test_base.py b/tests/token_predictor/test_base.py
index 252a8c5f88c153a46abbcf555d34eb10183a969e..d3893975c3258c587751f6b63a0196bb2e615b4b 100644
--- a/tests/token_predictor/test_base.py
+++ b/tests/token_predictor/test_base.py
@@ -3,7 +3,7 @@
 from typing import Any
 from unittest.mock import MagicMock, patch
 
-from langchain.llms.base import BaseLLM
+from llama_index.bridge.langchain import BaseLLM
 
 from llama_index.indices.keyword_table.base import KeywordTableIndex
 from llama_index.indices.list.base import ListIndex