diff --git a/semantic_router/encoders/__init__.py b/semantic_router/encoders/__init__.py
index 893d1d61867721bc8602ac8522294ba52078f432..85b32e2cb56ae55e85fde68da8f83a5e0afa5147 100644
--- a/semantic_router/encoders/__init__.py
+++ b/semantic_router/encoders/__init__.py
@@ -1,7 +1,7 @@
 from typing import List, Optional
 
 from semantic_router.encoders.aurelio import AurelioSparseEncoder
-from semantic_router.encoders.base import BaseEncoder
+from semantic_router.encoders.base import DenseEncoder, SparseEncoder
 from semantic_router.encoders.bedrock import BedrockEncoder
 from semantic_router.encoders.bm25 import BM25Encoder
 from semantic_router.encoders.clip import CLIPEncoder
@@ -19,7 +19,8 @@ from semantic_router.schema import EncoderType
 
 __all__ = [
     "AurelioSparseEncoder",
-    "BaseEncoder",
+    "DenseEncoder",
+    "SparseEncoder",
     "AzureOpenAIEncoder",
     "CohereEncoder",
     "OpenAIEncoder",
@@ -39,7 +40,7 @@ __all__ = [
 class AutoEncoder:
     type: EncoderType
     name: Optional[str]
-    model: BaseEncoder
+    model: DenseEncoder | SparseEncoder
 
     def __init__(self, type: str, name: Optional[str]):
         self.type = EncoderType(type)
diff --git a/semantic_router/encoders/aurelio.py b/semantic_router/encoders/aurelio.py
index bc150e5061bafd78829e26efe0680a6a5b966b74..d226e3d322f789f872d970608120b940f0139367 100644
--- a/semantic_router/encoders/aurelio.py
+++ b/semantic_router/encoders/aurelio.py
@@ -4,11 +4,11 @@ from pydantic.v1 import Field
 
 from aurelio_sdk import AurelioClient, AsyncAurelioClient, EmbeddingResponse
 
-from semantic_router.encoders.base import BaseEncoder
+from semantic_router.encoders.base import SparseEncoder
 from semantic_router.schema import SparseEmbedding
 
 
-class AurelioSparseEncoder(BaseEncoder):
+class AurelioSparseEncoder(SparseEncoder):
     model: Optional[Any] = None
     idx_mapping: Optional[Dict[int, int]] = None
     client: AurelioClient = Field(default_factory=AurelioClient, exclude=True)
diff --git a/semantic_router/encoders/base.py b/semantic_router/encoders/base.py
index fcc5734db4cecb730a9202ed1fba944df645b4be..f2cee15de03aa210810cc0844bee5b52a6f4c727 100644
--- a/semantic_router/encoders/base.py
+++ b/semantic_router/encoders/base.py
@@ -2,8 +2,10 @@ from typing import Any, Coroutine, List, Optional
 
 from pydantic.v1 import BaseModel, Field, validator
 
+from semantic_router.schema import SparseEmbedding
 
-class BaseEncoder(BaseModel):
+
+class DenseEncoder(BaseModel):
     name: str
     score_threshold: Optional[float] = None
     type: str = Field(default="base")
@@ -20,3 +22,17 @@ class BaseEncoder(BaseModel):
 
     def acall(self, docs: List[Any]) -> Coroutine[Any, Any, List[List[float]]]:
         raise NotImplementedError("Subclasses must implement this method")
+
+
+class SparseEncoder(BaseModel):
+    name: str
+    type: str = Field(default="base")
+
+    class Config:
+        arbitrary_types_allowed = True
+
+    def __call__(self, docs: List[str]) -> List[SparseEmbedding]:
+        raise NotImplementedError("Subclasses must implement this method")
+
+    def acall(self, docs: List[str]) -> Coroutine[Any, Any, List[SparseEmbedding]]:
+        raise NotImplementedError("Subclasses must implement this method")
\ No newline at end of file
diff --git a/semantic_router/encoders/bedrock.py b/semantic_router/encoders/bedrock.py
index fed53900f4e0b10810d667e75d72ef51f066e565..5ec3381e31a3be32ed23e2dbb283db899c78c3e6 100644
--- a/semantic_router/encoders/bedrock.py
+++ b/semantic_router/encoders/bedrock.py
@@ -1,7 +1,7 @@
 """
 This module provides the BedrockEncoder class for generating embeddings using Amazon's Bedrock Platform.
 
-The BedrockEncoder class is a subclass of BaseEncoder and utilizes the TextEmbeddingModel from the
+The BedrockEncoder class is a subclass of DenseEncoder and utilizes the TextEmbeddingModel from the
 Amazon's Bedrock Platform to generate embeddings for given documents. It requires an AWS Access Key ID
 and AWS Secret Access Key and supports customization of the pre-trained model, score threshold, and region.
 
@@ -21,12 +21,12 @@ from typing import List, Optional, Any
 import os
 from time import sleep
 import tiktoken
-from semantic_router.encoders import BaseEncoder
+from semantic_router.encoders import DenseEncoder
 from semantic_router.utils.defaults import EncoderDefault
 from semantic_router.utils.logger import logger
 
 
-class BedrockEncoder(BaseEncoder):
+class BedrockEncoder(DenseEncoder):
     client: Any = None
     type: str = "bedrock"
     input_type: Optional[str] = "search_query"
diff --git a/semantic_router/encoders/bm25.py b/semantic_router/encoders/bm25.py
index 1965fb6ef1f6fb5032134c562d4c38ac6b22f8fa..4eac26e79c591cf5a0aafba42b426a75c3bfd476 100644
--- a/semantic_router/encoders/bm25.py
+++ b/semantic_router/encoders/bm25.py
@@ -1,10 +1,10 @@
 from typing import Any, Dict, List, Optional
 
-from semantic_router.encoders import BaseEncoder
+from semantic_router.encoders import SparseEncoder
 from semantic_router.utils.logger import logger
 
 
-class BM25Encoder(BaseEncoder):
+class BM25Encoder(SparseEncoder):
     model: Optional[Any] = None
     idx_mapping: Optional[Dict[int, int]] = None
     type: str = "sparse"
diff --git a/semantic_router/encoders/clip.py b/semantic_router/encoders/clip.py
index 6495c870fa25eb5e55f4ddf93081009dc752597c..65fbdb8f0312d5339d68ae5900542aea2c896693 100644
--- a/semantic_router/encoders/clip.py
+++ b/semantic_router/encoders/clip.py
@@ -3,10 +3,10 @@ from typing import Any, List, Optional
 import numpy as np
 from pydantic.v1 import PrivateAttr
 from typing import Dict
-from semantic_router.encoders import BaseEncoder
+from semantic_router.encoders import DenseEncoder
 
 
-class CLIPEncoder(BaseEncoder):
+class CLIPEncoder(DenseEncoder):
     name: str = "openai/clip-vit-base-patch16"
     type: str = "huggingface"
     score_threshold: float = 0.2
diff --git a/semantic_router/encoders/cohere.py b/semantic_router/encoders/cohere.py
index cdc114bbed659abceb58f258dc6ed0f35c767f2d..04b878141a4b33d742d5bb28a5bfc8bcd1347d19 100644
--- a/semantic_router/encoders/cohere.py
+++ b/semantic_router/encoders/cohere.py
@@ -3,11 +3,11 @@ from typing import Any, List, Optional
 
 from pydantic.v1 import PrivateAttr
 
-from semantic_router.encoders import BaseEncoder
+from semantic_router.encoders import DenseEncoder
 from semantic_router.utils.defaults import EncoderDefault
 
 
-class CohereEncoder(BaseEncoder):
+class CohereEncoder(DenseEncoder):
     _client: Any = PrivateAttr()
     _embed_type: Any = PrivateAttr()
     type: str = "cohere"
diff --git a/semantic_router/encoders/fastembed.py b/semantic_router/encoders/fastembed.py
index 27590bc398e34fc6f0ed847775db7680f6106d05..5cda5e643746c660cd6e820cb767a6a333981b2d 100644
--- a/semantic_router/encoders/fastembed.py
+++ b/semantic_router/encoders/fastembed.py
@@ -3,10 +3,10 @@ from typing import Any, List, Optional
 import numpy as np
 from pydantic.v1 import PrivateAttr
 
-from semantic_router.encoders import BaseEncoder
+from semantic_router.encoders import DenseEncoder
 
 
-class FastEmbedEncoder(BaseEncoder):
+class FastEmbedEncoder(DenseEncoder):
     type: str = "fastembed"
     name: str = "BAAI/bge-small-en-v1.5"
     max_length: int = 512
diff --git a/semantic_router/encoders/google.py b/semantic_router/encoders/google.py
index 088d4bba943360202c455a1bb4fbf1b6dc51b927..5d50a0e14ea573269202e04086ed21f44997e310 100644
--- a/semantic_router/encoders/google.py
+++ b/semantic_router/encoders/google.py
@@ -1,7 +1,7 @@
 """
 This module provides the GoogleEncoder class for generating embeddings using Google's AI Platform.
 
-The GoogleEncoder class is a subclass of BaseEncoder and utilizes the TextEmbeddingModel from the
+The GoogleEncoder class is a subclass of DenseEncoder and utilizes the TextEmbeddingModel from the
 Google AI Platform to generate embeddings for given documents. It requires a Google Cloud project ID
 and supports customization of the pre-trained model, score threshold, location, and API endpoint.
 
@@ -19,11 +19,11 @@ Classes:
 import os
 from typing import Any, List, Optional
 
-from semantic_router.encoders import BaseEncoder
+from semantic_router.encoders import DenseEncoder
 from semantic_router.utils.defaults import EncoderDefault
 
 
-class GoogleEncoder(BaseEncoder):
+class GoogleEncoder(DenseEncoder):
     """GoogleEncoder class for generating embeddings using Google's AI Platform.
 
     Attributes:
diff --git a/semantic_router/encoders/huggingface.py b/semantic_router/encoders/huggingface.py
index 7ca7580d972238824c5b42c8a2e8e89547cf9522..7c7e56f77f11cf5143437c822ffda338c8f92cf0 100644
--- a/semantic_router/encoders/huggingface.py
+++ b/semantic_router/encoders/huggingface.py
@@ -1,7 +1,7 @@
 """
 This module provides the HFEndpointEncoder class to embeddings models using Huggingface's endpoint.
 
-The HFEndpointEncoder class is a subclass of BaseEncoder and utilizes a specified Huggingface 
+The HFEndpointEncoder class is a subclass of DenseEncoder and utilizes a specified Huggingface 
 endpoint to generate embeddings for given documents. It requires the URL of the Huggingface 
 API endpoint and an API key for authentication. The class supports customization of the score 
 threshold for filtering or processing the embeddings.
@@ -27,11 +27,11 @@ from typing import Any, List, Optional, Dict
 
 from pydantic.v1 import PrivateAttr
 
-from semantic_router.encoders import BaseEncoder
+from semantic_router.encoders import DenseEncoder
 from semantic_router.utils.logger import logger
 
 
-class HuggingFaceEncoder(BaseEncoder):
+class HuggingFaceEncoder(DenseEncoder):
     name: str = "sentence-transformers/all-MiniLM-L6-v2"
     type: str = "huggingface"
     score_threshold: float = 0.5
@@ -140,7 +140,7 @@ class HuggingFaceEncoder(BaseEncoder):
         return self._torch.max(token_embeddings, 1)[0]
 
 
-class HFEndpointEncoder(BaseEncoder):
+class HFEndpointEncoder(DenseEncoder):
     """
     A class to encode documents using a Hugging Face transformer model endpoint.
 
diff --git a/semantic_router/encoders/mistral.py b/semantic_router/encoders/mistral.py
index 974f11284a162979420ac897474413c266fff1a5..46bc89aec0ed9ed41fffc5734fa5a7658f6f5cd7 100644
--- a/semantic_router/encoders/mistral.py
+++ b/semantic_router/encoders/mistral.py
@@ -6,11 +6,11 @@ from typing import Any, List, Optional
 
 from pydantic.v1 import PrivateAttr
 
-from semantic_router.encoders import BaseEncoder
+from semantic_router.encoders import DenseEncoder
 from semantic_router.utils.defaults import EncoderDefault
 
 
-class MistralEncoder(BaseEncoder):
+class MistralEncoder(DenseEncoder):
     """Class to encode text using MistralAI"""
 
     _client: Any = PrivateAttr()
diff --git a/semantic_router/encoders/openai.py b/semantic_router/encoders/openai.py
index 4bc86ac29b16892536bfc7f5afb8ba3f2540f7f1..fb8a83f097e252a7fcd03a356f648077eb267cda 100644
--- a/semantic_router/encoders/openai.py
+++ b/semantic_router/encoders/openai.py
@@ -10,7 +10,7 @@ from openai._types import NotGiven
 from openai.types import CreateEmbeddingResponse
 import tiktoken
 
-from semantic_router.encoders import BaseEncoder
+from semantic_router.encoders import DenseEncoder
 from semantic_router.schema import EncoderInfo
 from semantic_router.utils.defaults import EncoderDefault
 from semantic_router.utils.logger import logger
@@ -35,7 +35,7 @@ model_configs = {
 }
 
 
-class OpenAIEncoder(BaseEncoder):
+class OpenAIEncoder(DenseEncoder):
     client: Optional[openai.Client]
     async_client: Optional[openai.AsyncClient]
     dimensions: Union[int, NotGiven] = NotGiven()
diff --git a/semantic_router/encoders/tfidf.py b/semantic_router/encoders/tfidf.py
index 17cc569ae8c9045ed40f3d47b0cb3912df46e4cd..873d900af239d00e65d462eebf597d6e4ef24bab 100644
--- a/semantic_router/encoders/tfidf.py
+++ b/semantic_router/encoders/tfidf.py
@@ -6,11 +6,11 @@ import numpy as np
 from numpy import ndarray
 from numpy.linalg import norm
 
-from semantic_router.encoders import BaseEncoder
+from semantic_router.encoders import SparseEncoder
 from semantic_router.route import Route
 
 
-class TfidfEncoder(BaseEncoder):
+class TfidfEncoder(SparseEncoder):
     idf: ndarray = np.array([])
     word_index: Dict = {}
 
diff --git a/semantic_router/encoders/vit.py b/semantic_router/encoders/vit.py
index 44ae58018635e67a10d37150a1de1c783a4cee97..73cb058281d3c47d452bfa3d5a8e4c92246ae173 100644
--- a/semantic_router/encoders/vit.py
+++ b/semantic_router/encoders/vit.py
@@ -2,10 +2,10 @@ from typing import Any, List, Optional, Dict
 
 from pydantic.v1 import PrivateAttr
 
-from semantic_router.encoders import BaseEncoder
+from semantic_router.encoders import DenseEncoder
 
 
-class VitEncoder(BaseEncoder):
+class VitEncoder(DenseEncoder):
     name: str = "google/vit-base-patch16-224"
     type: str = "huggingface"
     score_threshold: float = 0.5
diff --git a/semantic_router/encoders/zure.py b/semantic_router/encoders/zure.py
index d6f656609cdcc83ae8b23c2df5e44cf753facfdf..fd8594a986ea5911cd78c4761b304f72fb6b4b2e 100644
--- a/semantic_router/encoders/zure.py
+++ b/semantic_router/encoders/zure.py
@@ -8,12 +8,12 @@ from openai._types import NotGiven
 from openai import OpenAIError
 from openai.types import CreateEmbeddingResponse
 
-from semantic_router.encoders import BaseEncoder
+from semantic_router.encoders import DenseEncoder
 from semantic_router.utils.defaults import EncoderDefault
 from semantic_router.utils.logger import logger
 
 
-class AzureOpenAIEncoder(BaseEncoder):
+class AzureOpenAIEncoder(DenseEncoder):
     client: Optional[openai.AzureOpenAI] = None
     async_client: Optional[openai.AsyncAzureOpenAI] = None
     dimensions: Union[int, NotGiven] = NotGiven()
diff --git a/semantic_router/routers/base.py b/semantic_router/routers/base.py
index f7ab522608dcb3ec86368459b498ac8856544535..1af3e38b9d418b5eaa7647493e1367aaf4e7213b 100644
--- a/semantic_router/routers/base.py
+++ b/semantic_router/routers/base.py
@@ -10,7 +10,7 @@ import numpy as np
 import yaml  # type: ignore
 from tqdm.auto import tqdm
 
-from semantic_router.encoders import AutoEncoder, BaseEncoder, OpenAIEncoder
+from semantic_router.encoders import AutoEncoder, DenseEncoder, OpenAIEncoder
 from semantic_router.index.base import BaseIndex
 from semantic_router.index.local import LocalIndex
 from semantic_router.index.pinecone import PineconeIndex
@@ -307,7 +307,7 @@ class RouterConfig:
 
 
 class BaseRouter(BaseModel):
-    encoder: BaseEncoder
+    encoder: DenseEncoder
     index: BaseIndex = Field(default_factory=BaseIndex)
     score_threshold: Optional[float] = Field(default=None)
     routes: List[Route] = []
@@ -322,7 +322,7 @@ class BaseRouter(BaseModel):
 
     def __init__(
         self,
-        encoder: Optional[BaseEncoder] = None,
+        encoder: Optional[DenseEncoder] = None,
         llm: Optional[BaseLLM] = None,
         routes: List[Route] = [],
         index: Optional[BaseIndex] = None,  # type: ignore
diff --git a/semantic_router/routers/hybrid.py b/semantic_router/routers/hybrid.py
index a0a966718d04e858a9410c74e7ab60a68ca4c959..f8ec89cd8380c6d82d9c3e6a8233ccba0ea48277 100644
--- a/semantic_router/routers/hybrid.py
+++ b/semantic_router/routers/hybrid.py
@@ -5,7 +5,7 @@ from pydantic.v1 import Field
 import numpy as np
 
 from semantic_router.encoders import (
-    BaseEncoder,
+    DenseEncoder,
     BM25Encoder,
     TfidfEncoder,
 )
@@ -21,13 +21,13 @@ class HybridRouter(BaseRouter):
     """A hybrid layer that uses both dense and sparse embeddings to classify routes."""
 
     # there are a few additional attributes for hybrid
-    sparse_encoder: Optional[BaseEncoder] = Field(default=None)
+    sparse_encoder: Optional[DenseEncoder] = Field(default=None)
     alpha: float = 0.3
 
     def __init__(
         self,
-        encoder: BaseEncoder,
-        sparse_encoder: Optional[BaseEncoder] = None,
+        encoder: DenseEncoder,
+        sparse_encoder: Optional[DenseEncoder] = None,
         llm: Optional[BaseLLM] = None,
         routes: List[Route] = [],
         index: Optional[HybridLocalIndex] = None,
@@ -61,7 +61,7 @@ class HybridRouter(BaseRouter):
         if self.auto_sync:
             self._init_index_state()
     
-    def _set_sparse_encoder(self, sparse_encoder: Optional[BaseEncoder]):
+    def _set_sparse_encoder(self, sparse_encoder: Optional[DenseEncoder]):
         if sparse_encoder is None:
             logger.warning("No sparse_encoder provided. Using default BM25Encoder.")
             self.sparse_encoder = BM25Encoder()
diff --git a/semantic_router/routers/semantic.py b/semantic_router/routers/semantic.py
index f0df271736cfc8f6c3f678ba675734a9ae927556..e8a7db14ca13d6adb3cba3892ba90cc7267bcf3e 100644
--- a/semantic_router/routers/semantic.py
+++ b/semantic_router/routers/semantic.py
@@ -6,7 +6,7 @@ from pydantic.v1 import validator, Field
 import numpy as np
 from tqdm.auto import tqdm
 
-from semantic_router.encoders import AutoEncoder, BaseEncoder, OpenAIEncoder
+from semantic_router.encoders import AutoEncoder, DenseEncoder, OpenAIEncoder
 from semantic_router.index.base import BaseIndex
 from semantic_router.index.local import LocalIndex
 from semantic_router.index.pinecone import PineconeIndex
@@ -55,7 +55,7 @@ def is_valid(layer_config: str) -> bool:
 class SemanticRouter(BaseRouter):
     def __init__(
         self,
-        encoder: Optional[BaseEncoder] = None,
+        encoder: Optional[DenseEncoder] = None,
         llm: Optional[BaseLLM] = None,
         routes: Optional[List[Route]] = None,
         index: Optional[BaseIndex] = None,  # type: ignore
diff --git a/tests/integration/encoders/test_openai_integration.py b/tests/integration/encoders/test_openai_integration.py
index 47e617a5b931f02e25346c8c9e21b3607bc27f9e..c7bd8d9add219c14a9937ba2325c34ebf5b679e7 100644
--- a/tests/integration/encoders/test_openai_integration.py
+++ b/tests/integration/encoders/test_openai_integration.py
@@ -1,7 +1,7 @@
 import os
 import pytest
 from openai import OpenAIError
-from semantic_router.encoders.base import BaseEncoder
+from semantic_router.encoders.base import DenseEncoder
 from semantic_router.encoders.openai import OpenAIEncoder
 
 with open("tests/integration/57640.4032.txt", "r") as fp:
@@ -11,7 +11,7 @@ with open("tests/integration/57640.4032.txt", "r") as fp:
 @pytest.fixture
 def openai_encoder():
     if os.environ.get("OPENAI_API_KEY") is None:
-        return BaseEncoder()
+        return DenseEncoder()
     else:
         return OpenAIEncoder()
 
diff --git a/tests/unit/encoders/test_base.py b/tests/unit/encoders/test_base.py
index 4d4b87aecee1b69494d09c8c1a25cde085081d5e..53a6233183254406a3d8be7889d5dca9c93f46cf 100644
--- a/tests/unit/encoders/test_base.py
+++ b/tests/unit/encoders/test_base.py
@@ -1,12 +1,12 @@
 import pytest
 
-from semantic_router.encoders import BaseEncoder
+from semantic_router.encoders import DenseEncoder
 
 
-class TestBaseEncoder:
+class TestDenseEncoder:
     @pytest.fixture
     def base_encoder(self):
-        return BaseEncoder(name="TestEncoder", score_threshold=0.5)
+        return DenseEncoder(name="TestEncoder", score_threshold=0.5)
 
     def test_base_encoder_initialization(self, base_encoder):
         assert base_encoder.name == "TestEncoder", "Initialization of name failed"
diff --git a/tests/unit/test_hybrid_layer.py b/tests/unit/test_hybrid_layer.py
index 3cba34caa8ca28f4c421117857b163047b0be4ff..564bd3a191c7c8595147f089f1741b238e9011c4 100644
--- a/tests/unit/test_hybrid_layer.py
+++ b/tests/unit/test_hybrid_layer.py
@@ -2,13 +2,13 @@ import pytest
 
 from semantic_router.encoders import (
     AzureOpenAIEncoder,
-    BaseEncoder,
+    DenseEncoder,
     BM25Encoder,
     CohereEncoder,
     OpenAIEncoder,
     TfidfEncoder,
 )
-from semantic_router.OLD_hybrid_layer import HybridRouter
+from semantic_router.routers import HybridRouter
 from semantic_router.route import Route
 
 
@@ -26,8 +26,8 @@ def mock_encoder_call(utterances):
 
 @pytest.fixture
 def base_encoder(mocker):
-    mock_base_encoder = BaseEncoder(name="test-encoder", score_threshold=0.5)
-    mocker.patch.object(BaseEncoder, "__call__", return_value=[[0.1, 0.2, 0.3]])
+    mock_base_encoder = DenseEncoder(name="test-encoder", score_threshold=0.5)
+    mocker.patch.object(DenseEncoder, "__call__", return_value=[[0.1, 0.2, 0.3]])
     return mock_base_encoder
 
 
diff --git a/tests/unit/test_router.py b/tests/unit/test_router.py
index 2eef4666559db631cce399af990a40b067d0ea3d..8d47ee4b1e76d3f8526e2bbd14a588d789e8ad0d 100644
--- a/tests/unit/test_router.py
+++ b/tests/unit/test_router.py
@@ -6,7 +6,7 @@ from datetime import datetime
 import pytest
 import time
 from typing import Optional
-from semantic_router.encoders import BaseEncoder, CohereEncoder, OpenAIEncoder
+from semantic_router.encoders import DenseEncoder, CohereEncoder, OpenAIEncoder
 from semantic_router.index.local import LocalIndex
 from semantic_router.index.pinecone import PineconeIndex
 from semantic_router.index.qdrant import QdrantIndex
@@ -102,7 +102,7 @@ routes:
 
 @pytest.fixture
 def base_encoder():
-    return BaseEncoder(name="test-encoder", score_threshold=0.5)
+    return DenseEncoder(name="test-encoder", score_threshold=0.5)
 
 
 @pytest.fixture
diff --git a/tests/unit/test_sync.py b/tests/unit/test_sync.py
index 2327fc0642b7acfe41103c366376840fa662dd06..8405add9dcfcc3db94164a3c41baec558cd07eff 100644
--- a/tests/unit/test_sync.py
+++ b/tests/unit/test_sync.py
@@ -4,7 +4,7 @@ from datetime import datetime
 import pytest
 import time
 from typing import Optional
-from semantic_router.encoders import BaseEncoder, CohereEncoder, OpenAIEncoder
+from semantic_router.encoders import DenseEncoder, CohereEncoder, OpenAIEncoder
 from semantic_router.index.pinecone import PineconeIndex
 from semantic_router.schema import Utterance
 from semantic_router.routers.base import SemanticRouter
@@ -100,7 +100,7 @@ routes:
 
 @pytest.fixture
 def base_encoder():
-    return BaseEncoder(name="test-encoder", score_threshold=0.5)
+    return DenseEncoder(name="test-encoder", score_threshold=0.5)
 
 
 @pytest.fixture