diff --git a/semantic_router/encoders/base.py b/semantic_router/encoders/base.py
index f5968578ead0d01a269876f948e259910a6116fb..edc98641147668705150a0ee1242e77eeeebb431 100644
--- a/semantic_router/encoders/base.py
+++ b/semantic_router/encoders/base.py
@@ -1,3 +1,4 @@
+from typing import List
 from pydantic import BaseModel, Field
 
 
@@ -9,5 +10,5 @@ class BaseEncoder(BaseModel):
     class Config:
         arbitrary_types_allowed = True
 
-    def __call__(self, docs: list[str]) -> list[list[float]]:
+    def __call__(self, docs: List[str]) -> List[List[float]]:
         raise NotImplementedError("Subclasses must implement this method")
diff --git a/semantic_router/encoders/bm25.py b/semantic_router/encoders/bm25.py
index 451273cdcc78899861c7b64baea4eb4e1cc6b33b..83cbccc06fe453203cd729e6ab2f56c4237a0f74 100644
--- a/semantic_router/encoders/bm25.py
+++ b/semantic_router/encoders/bm25.py
@@ -1,4 +1,4 @@
-from typing import Any, Optional
+from typing import Any, Optional, List, Dict
 
 from semantic_router.encoders import BaseEncoder
 from semantic_router.utils.logger import logger
@@ -6,7 +6,7 @@ from semantic_router.utils.logger import logger
 
 class BM25Encoder(BaseEncoder):
     model: Optional[Any] = None
-    idx_mapping: Optional[dict[int, int]] = None
+    idx_mapping: Optional[Dict[int, int]] = None
     type: str = "sparse"
 
     def __init__(
@@ -40,7 +40,7 @@ class BM25Encoder(BaseEncoder):
         else:
             raise TypeError("Expected a dictionary for 'doc_freq'")
 
-    def __call__(self, docs: list[str]) -> list[list[float]]:
+    def __call__(self, docs: List[str]) -> List[List[float]]:
         if self.model is None or self.idx_mapping is None:
             raise ValueError("Model or index mapping is not initialized.")
         if len(docs) == 1:
@@ -60,7 +60,7 @@ class BM25Encoder(BaseEncoder):
                     embeds[i][position] = val
         return embeds
 
-    def fit(self, docs: list[str]):
+    def fit(self, docs: List[str]):
         if self.model is None:
             raise ValueError("Model is not initialized.")
         self.model.fit(docs)
diff --git a/semantic_router/encoders/cohere.py b/semantic_router/encoders/cohere.py
index ec8ee0f8fcebd39444f3689d7bdffc2b7e98c812..803fe779f82b54460040d5ba57b82aff1bcb1f13 100644
--- a/semantic_router/encoders/cohere.py
+++ b/semantic_router/encoders/cohere.py
@@ -1,5 +1,5 @@
 import os
-from typing import Optional
+from typing import Optional, List
 
 import cohere
 
@@ -27,7 +27,7 @@ class CohereEncoder(BaseEncoder):
         except Exception as e:
             raise ValueError(f"Cohere API client failed to initialize. Error: {e}")
 
-    def __call__(self, docs: list[str]) -> list[list[float]]:
+    def __call__(self, docs: List[str]) -> List[List[float]]:
         if self.client is None:
             raise ValueError("Cohere client is not initialized.")
         try:
diff --git a/semantic_router/encoders/fastembed.py b/semantic_router/encoders/fastembed.py
index 33c81f397263375fb58d851656a0a8d5ae43ea8b..ec356317671fc93848e0f3977985cec1a221d827 100644
--- a/semantic_router/encoders/fastembed.py
+++ b/semantic_router/encoders/fastembed.py
@@ -1,4 +1,4 @@
-from typing import Any, Optional
+from typing import Any, Optional, List
 
 import numpy as np
 from pydantic import PrivateAttr
@@ -42,10 +42,10 @@ class FastEmbedEncoder(BaseEncoder):
         embedding = Embedding(**embedding_args)
         return embedding
 
-    def __call__(self, docs: list[str]) -> list[list[float]]:
+    def __call__(self, docs: List[str]) -> List[List[float]]:
         try:
-            embeds: list[np.ndarray] = list(self._client.embed(docs))
-            embeddings: list[list[float]] = [e.tolist() for e in embeds]
+            embeds: List[np.ndarray] = list(self._client.embed(docs))
+            embeddings: List[List[float]] = [e.tolist() for e in embeds]
             return embeddings
         except Exception as e:
             raise ValueError(f"FastEmbed embed failed. Error: {e}")
diff --git a/semantic_router/encoders/huggingface.py b/semantic_router/encoders/huggingface.py
index ace189213b76aed940dd8b4280ce1505339f656f..2166ea13f68cb263d76fabb96b310501d58169fb 100644
--- a/semantic_router/encoders/huggingface.py
+++ b/semantic_router/encoders/huggingface.py
@@ -1,4 +1,4 @@
-from typing import Any, Optional
+from typing import Any, Optional, List
 
 from pydantic import PrivateAttr
 
@@ -60,11 +60,11 @@ class HuggingFaceEncoder(BaseEncoder):
 
     def __call__(
         self,
-        docs: list[str],
+        docs: List[str],
         batch_size: int = 32,
         normalize_embeddings: bool = True,
         pooling_strategy: str = "mean",
-    ) -> list[list[float]]:
+    ) -> List[List[float]]:
         all_embeddings = []
         for i in range(0, len(docs), batch_size):
             batch_docs = docs[i : i + batch_size]
diff --git a/semantic_router/encoders/openai.py b/semantic_router/encoders/openai.py
index 169761afa8f726a72439a534a69bac3ebf73de29..3b06d33de2a4ad01da3ad950feddf15731d332c8 100644
--- a/semantic_router/encoders/openai.py
+++ b/semantic_router/encoders/openai.py
@@ -1,6 +1,6 @@
 import os
 from time import sleep
-from typing import Optional
+from typing import Optional, List
 
 import openai
 from openai import OpenAIError
@@ -31,7 +31,7 @@ class OpenAIEncoder(BaseEncoder):
         except Exception as e:
             raise ValueError(f"OpenAI API client failed to initialize. Error: {e}")
 
-    def __call__(self, docs: list[str]) -> list[list[float]]:
+    def __call__(self, docs: List[str]) -> List[List[float]]:
         if self.client is None:
             raise ValueError("OpenAI client is not initialized.")
         embeds = None
diff --git a/semantic_router/hybrid_layer.py b/semantic_router/hybrid_layer.py
index d4c81b13c87749f070a2177896ed6c702484fde0..ad8d6ec23e06b372432d2c1e374f48d045fa884c 100644
--- a/semantic_router/hybrid_layer.py
+++ b/semantic_router/hybrid_layer.py
@@ -1,4 +1,4 @@
-from typing import Optional
+from typing import Optional, List, Dict, Tuple
 
 import numpy as np
 from numpy.linalg import norm
@@ -21,7 +21,7 @@ class HybridRouteLayer:
         self,
         encoder: BaseEncoder,
         sparse_encoder: Optional[BM25Encoder] = None,
-        routes: list[Route] = [],
+        routes: List[Route] = [],
         alpha: float = 0.3,
     ):
         self.encoder = encoder
@@ -81,7 +81,7 @@ class HybridRouteLayer:
         else:
             self.sparse_index = np.concatenate([self.sparse_index, sparse_embeds])
 
-    def _add_routes(self, routes: list[Route]):
+    def _add_routes(self, routes: List[Route]):
         # create embeddings for all routes
         logger.info("Creating embeddings for all routes...")
         all_utterances = [
@@ -153,8 +153,8 @@ class HybridRouteLayer:
         sparse = np.array(sparse) * (1 - self.alpha)
         return dense, sparse
 
-    def _semantic_classify(self, query_results: list[dict]) -> tuple[str, list[float]]:
-        scores_by_class: dict[str, list[float]] = {}
+    def _semantic_classify(self, query_results: List[Dict]) -> Tuple[str, List[float]]:
+        scores_by_class: Dict[str, List[float]] = {}
         for result in query_results:
             score = result["score"]
             route = result["route"]
@@ -174,7 +174,7 @@ class HybridRouteLayer:
             logger.warning("No classification found for semantic classifier.")
             return "", []
 
-    def _pass_threshold(self, scores: list[float], threshold: float) -> bool:
+    def _pass_threshold(self, scores: List[float], threshold: float) -> bool:
         if scores:
             return max(scores) > threshold
         else:
diff --git a/semantic_router/layer.py b/semantic_router/layer.py
index 7ff7a15b22a367d92ad475e5dff70f4458ebf3a0..bce160ba7853e5f70d30ea9a223ee3d44630c40f 100644
--- a/semantic_router/layer.py
+++ b/semantic_router/layer.py
@@ -1,6 +1,6 @@
 import json
 import os
-from typing import Optional, Any
+from typing import Optional, Any, List, Dict, Tuple
 
 import numpy as np
 import yaml
@@ -48,11 +48,11 @@ class LayerConfig:
     RouteLayer.
     """
 
-    routes: list[Route] = []
+    routes: List[Route] = []
 
     def __init__(
         self,
-        routes: list[Route] = [],
+        routes: List[Route] = [],
         encoder_type: str = "openai",
         encoder_name: Optional[str] = None,
     ):
@@ -99,7 +99,7 @@ class LayerConfig:
             else:
                 raise Exception("Invalid config JSON or YAML")
 
-    def to_dict(self) -> dict[str, Any]:
+    def to_dict(self) -> Dict[str, Any]:
         return {
             "encoder_type": self.encoder_type,
             "encoder_name": self.encoder_name,
@@ -158,7 +158,7 @@ class RouteLayer:
         self,
         encoder: Optional[BaseEncoder] = None,
         llm: Optional[BaseLLM] = None,
-        routes: Optional[list[Route]] = None,
+        routes: Optional[List[Route]] = None,
         top_k_routes: int = 3,
     ):
         logger.info("Initializing RouteLayer")
@@ -247,7 +247,7 @@ class RouteLayer:
         # add route to routes list
         self.routes.append(route)
 
-    def _add_routes(self, routes: list[Route]):
+    def _add_routes(self, routes: List[Route]):
         # create embeddings for all routes
         all_utterances = [
             utterance for route in routes for utterance in route.utterances
@@ -290,8 +290,8 @@ class RouteLayer:
             logger.warning("No index found for route layer.")
             return []
 
-    def _semantic_classify(self, query_results: list[dict]) -> tuple[str, list[float]]:
-        scores_by_class: dict[str, list[float]] = {}
+    def _semantic_classify(self, query_results: List[dict]) -> Tuple[str, List[float]]:
+        scores_by_class: Dict[str, List[float]] = {}
         for result in query_results:
             score = result["score"]
             route = result["route"]
@@ -311,7 +311,7 @@ class RouteLayer:
             logger.warning("No classification found for semantic classifier.")
             return "", []
 
-    def _pass_threshold(self, scores: list[float], threshold: float) -> bool:
+    def _pass_threshold(self, scores: List[float], threshold: float) -> bool:
         if scores:
             return max(scores) > threshold
         else:
diff --git a/semantic_router/linear.py b/semantic_router/linear.py
index 09b911fbc62245f17e564d6e368ddf627d1b3864..1c13262fbe55bfe4cd92f75c61fb33899d60337e 100644
--- a/semantic_router/linear.py
+++ b/semantic_router/linear.py
@@ -1,3 +1,5 @@
+from typing import Tuple
+
 import numpy as np
 from numpy.linalg import norm
 
@@ -19,7 +21,7 @@ def similarity_matrix(xq: np.ndarray, index: np.ndarray) -> np.ndarray:
     return sim
 
 
-def top_scores(sim: np.ndarray, top_k: int = 5) -> tuple[np.ndarray, np.ndarray]:
+def top_scores(sim: np.ndarray, top_k: int = 5) -> Tuple[np.ndarray, np.ndarray]:
     # get indices of top_k records
     top_k = min(top_k, sim.shape[0])
     idx = np.argpartition(sim, -top_k)[-top_k:]
diff --git a/semantic_router/llms/base.py b/semantic_router/llms/base.py
index bf5f29b6005daaa76abc4674971dc8f775f4af80..12d89f2d31e1cd181346322daf01d0b206222a20 100644
--- a/semantic_router/llms/base.py
+++ b/semantic_router/llms/base.py
@@ -1,4 +1,4 @@
-from typing import Optional
+from typing import Optional, List
 
 from pydantic import BaseModel
 
@@ -11,5 +11,5 @@ class BaseLLM(BaseModel):
     class Config:
         arbitrary_types_allowed = True
 
-    def __call__(self, messages: list[Message]) -> Optional[str]:
+    def __call__(self, messages: List[Message]) -> Optional[str]:
         raise NotImplementedError("Subclasses must implement this method")
diff --git a/semantic_router/llms/cohere.py b/semantic_router/llms/cohere.py
index 0ec21f354c090f0d1d00da7c20b89c5b233c3a89..0eebbe6d6e8385e66ed9df42b941a915fa144e22 100644
--- a/semantic_router/llms/cohere.py
+++ b/semantic_router/llms/cohere.py
@@ -1,5 +1,5 @@
 import os
-from typing import Optional
+from typing import Optional, List
 
 import cohere
 
@@ -26,7 +26,7 @@ class CohereLLM(BaseLLM):
         except Exception as e:
             raise ValueError(f"Cohere API client failed to initialize. Error: {e}")
 
-    def __call__(self, messages: list[Message]) -> str:
+    def __call__(self, messages: List[Message]) -> str:
         if self.client is None:
             raise ValueError("Cohere client is not initialized.")
         try:
diff --git a/semantic_router/llms/openai.py b/semantic_router/llms/openai.py
index 8b3442c742c2f268773bf551fb49ce0cd24645af..06d6865ca1ec095d04453aaf8deb7c8e8d5ef54e 100644
--- a/semantic_router/llms/openai.py
+++ b/semantic_router/llms/openai.py
@@ -1,5 +1,5 @@
 import os
-from typing import Optional
+from typing import Optional, List
 
 import openai
 
@@ -33,7 +33,7 @@ class OpenAILLM(BaseLLM):
         self.temperature = temperature
         self.max_tokens = max_tokens
 
-    def __call__(self, messages: list[Message]) -> str:
+    def __call__(self, messages: List[Message]) -> str:
         if self.client is None:
             raise ValueError("OpenAI client is not initialized.")
         try:
diff --git a/semantic_router/llms/openrouter.py b/semantic_router/llms/openrouter.py
index 4cc15d6bfedbfa67fb5957129d1ce901544dcb38..8c3efb8d1f67fc246f62116555368eafa1f36288 100644
--- a/semantic_router/llms/openrouter.py
+++ b/semantic_router/llms/openrouter.py
@@ -1,5 +1,5 @@
 import os
-from typing import Optional
+from typing import Optional, List
 
 import openai
 
@@ -38,7 +38,7 @@ class OpenRouterLLM(BaseLLM):
         self.temperature = temperature
         self.max_tokens = max_tokens
 
-    def __call__(self, messages: list[Message]) -> str:
+    def __call__(self, messages: List[Message]) -> str:
         if self.client is None:
             raise ValueError("OpenRouter client is not initialized.")
         try:
diff --git a/semantic_router/route.py b/semantic_router/route.py
index b492ae1346aa959e30f1d73effb2fbdc24f64b4b..cc7dc17ab1f160484831c6474627eae2c372fcf1 100644
--- a/semantic_router/route.py
+++ b/semantic_router/route.py
@@ -1,6 +1,6 @@
 import json
 import re
-from typing import Any, Callable, Optional, Union
+from typing import Any, Callable, Optional, Union, List, Dict
 
 from pydantic import BaseModel
 
@@ -40,9 +40,9 @@ def is_valid(route_config: str) -> bool:
 
 class Route(BaseModel):
     name: str
-    utterances: list[str]
+    utterances: List[str]
     description: Optional[str] = None
-    function_schema: Optional[dict[str, Any]] = None
+    function_schema: Optional[Dict[str, Any]] = None
     llm: Optional[BaseLLM] = None
 
     def __call__(self, query: str) -> RouteChoice:
@@ -62,11 +62,11 @@ class Route(BaseModel):
             func_call = None
         return RouteChoice(name=self.name, function_call=func_call)
 
-    def to_dict(self) -> dict[str, Any]:
+    def to_dict(self) -> Dict[str, Any]:
         return self.dict()
 
     @classmethod
-    def from_dict(cls, data: dict[str, Any]):
+    def from_dict(cls, data: Dict[str, Any]):
         return cls(**data)
 
     @classmethod
@@ -92,7 +92,7 @@ class Route(BaseModel):
             raise ValueError("No <config></config> tags found in the output.")
 
     @classmethod
-    def _generate_dynamic_route(cls, llm: BaseLLM, function_schema: dict[str, Any]):
+    def _generate_dynamic_route(cls, llm: BaseLLM, function_schema: Dict[str, Any]):
         logger.info("Generating dynamic route...")
 
         prompt = f"""
diff --git a/semantic_router/schema.py b/semantic_router/schema.py
index c7912fa1c888f79ebf0b3c32af1836f6948698f4..7dcb7fde1252088ab7736510c7f04fa32c3a6f6d 100644
--- a/semantic_router/schema.py
+++ b/semantic_router/schema.py
@@ -1,5 +1,5 @@
 from enum import Enum
-from typing import Optional, Literal
+from typing import Optional, Literal, List, Dict
 
 from pydantic import BaseModel
 from pydantic.dataclasses import dataclass
@@ -47,7 +47,7 @@ class Encoder:
         else:
             raise ValueError
 
-    def __call__(self, texts: list[str]) -> list[list[float]]:
+    def __call__(self, texts: List[str]) -> List[List[float]]:
         return self.model(texts)
 
 
@@ -65,7 +65,7 @@ class Message(BaseModel):
 
 
 class Conversation(BaseModel):
-    messages: list[Message]
+    messages: List[Message]
 
     def split_by_topic(
         self,
@@ -74,7 +74,7 @@ class Conversation(BaseModel):
         split_method: Literal[
             "consecutive_similarity_drop", "cumulative_similarity_drop"
         ] = "consecutive_similarity_drop",
-    ) -> dict[str, list[str]]:
+    ) -> Dict[str, List[str]]:
         docs = [f"{m.role}: {m.content}" for m in self.messages]
         return semantic_splitter(
             encoder=encoder, docs=docs, threshold=threshold, split_method=split_method
diff --git a/semantic_router/utils/function_call.py b/semantic_router/utils/function_call.py
index cedd9b6ecd86131b630cf6d4921848604dc88fa0..ad09970f072f2f2d76594d58d2e31c6f93273f62 100644
--- a/semantic_router/utils/function_call.py
+++ b/semantic_router/utils/function_call.py
@@ -1,6 +1,6 @@
 import inspect
 import json
-from typing import Any, Callable, Union
+from typing import Any, Callable, Union, Dict, List
 
 from pydantic import BaseModel
 
@@ -9,7 +9,7 @@ from semantic_router.schema import Message, RouteChoice
 from semantic_router.utils.logger import logger
 
 
-def get_schema(item: Union[BaseModel, Callable]) -> dict[str, Any]:
+def get_schema(item: Union[BaseModel, Callable]) -> Dict[str, Any]:
     if isinstance(item, BaseModel):
         signature_parts = []
         for field_name, field_model in item.__annotations__.items():
@@ -42,8 +42,8 @@ def get_schema(item: Union[BaseModel, Callable]) -> dict[str, Any]:
 
 
 def extract_function_inputs(
-    query: str, llm: BaseLLM, function_schema: dict[str, Any]
-) -> dict:
+    query: str, llm: BaseLLM, function_schema: Dict[str, Any]
+) -> Dict[str, Any]:
     logger.info("Extracting function input...")
 
     prompt = f"""
@@ -87,7 +87,7 @@ def extract_function_inputs(
     return function_inputs
 
 
-def is_valid_inputs(inputs: dict[str, Any], function_schema: dict[str, Any]) -> bool:
+def is_valid_inputs(inputs: Dict[str, Any], function_schema: Dict[str, Any]) -> bool:
     """Validate the extracted inputs against the function schema"""
     try:
         # Extract parameter names and types from the signature string
@@ -110,7 +110,7 @@ def is_valid_inputs(inputs: dict[str, Any], function_schema: dict[str, Any]) ->
 
 # TODO: Add route layer object to the input, solve circular import issue
 async def route_and_execute(
-    query: str, llm: BaseLLM, functions: list[Callable], layer
+    query: str, llm: BaseLLM, functions: List[Callable], layer
 ) -> Any:
     route_choice: RouteChoice = layer(query)
 
diff --git a/semantic_router/utils/splitters.py b/semantic_router/utils/splitters.py
index 2016031955b8173b66a8c9e246555b295f555bb6..83a32839c5efc3b528f9a14643c3f3db3571f3e3 100644
--- a/semantic_router/utils/splitters.py
+++ b/semantic_router/utils/splitters.py
@@ -1,17 +1,17 @@
 import numpy as np
-from typing import Literal
+from typing import List, Dict, Literal
 
 from semantic_router.encoders import BaseEncoder
 
 
 def semantic_splitter(
     encoder: BaseEncoder,
-    docs: list[str],
+    docs: List[str],
     threshold: float,
     split_method: Literal[
         "consecutive_similarity_drop", "cumulative_similarity_drop"
     ] = "consecutive_similarity_drop",
-) -> dict[str, list[str]]:
+) -> Dict[str, List[str]]:
     """
     Splits a list of documents base on semantic similarity changes.
 
@@ -23,13 +23,13 @@ def semantic_splitter(
 
     Args:
         encoder (BaseEncoder): Encoder for document embeddings.
-        docs (list[str]): Documents to split.
+        docs (List[str]): Documents to split.
         threshold (float): The similarity drop value that will trigger a new document
         split.
         split_method (str): The method to use for splitting.
 
     Returns:
-        dict[str, list[str]]: Splits with corresponding documents.
+        Dict[str, List[str]]: Splits with corresponding documents.
     """
     total_docs = len(docs)
     splits = {}