diff --git a/semantic_router/schema.py b/semantic_router/schema.py
index d13975c080b237075c6b13c952323f4c98650e77..b0de34d01da3fe17fcecab900accbbf5daba3c45 100644
--- a/semantic_router/schema.py
+++ b/semantic_router/schema.py
@@ -19,6 +19,7 @@ class Decision(BaseModel):
 class EncoderType(Enum):
     OPENAI = "openai"
     COHERE = "cohere"
+    HUGGINGFACE = "huggingface"
 
 
 @dataclass
diff --git a/tests/encoders/test_openai.py b/tests/encoders/test_openai.py
index dbc3ebb60602392ee6654c41b0e824944859d8a5..9dd113adb1b05ecca696a0b3680637800ee40b56 100644
--- a/tests/encoders/test_openai.py
+++ b/tests/encoders/test_openai.py
@@ -1,8 +1,9 @@
 import os
+
 import pytest
 import openai
-from semantic_router.encoders import OpenAIEncoder
 from openai.error import RateLimitError
+from semantic_router.encoders import OpenAIEncoder
 
 
 @pytest.fixture
@@ -29,8 +30,8 @@ class TestOpenAIEncoder:
         assert isinstance(result, list), "Result should be a list"
         assert len(result) == 1 and len(result[0]) == 3, "Result list size is incorrect"
 
-    @pytest.mark.skip(reason="Currently quite a slow test")
-    def test_call_method_rate_limit_error(self, openai_encoder, mocker):
+    def test_call_method_rate_limit_error__raises_value_error_after_max_retries(self, openai_encoder, mocker):
+        mocker.patch("semantic_router.encoders.openai.sleep")
         mocker.patch(
             "openai.Embedding.create", side_effect=RateLimitError(message="rate limit exceeded", http_status=429)
         )
@@ -43,3 +44,64 @@ class TestOpenAIEncoder:
 
         with pytest.raises(ValueError):
             openai_encoder(["test"])
+
+    def test_call_method_rate_limit_error__exponential_backoff_single_retry(self, openai_encoder, mocker):
+        mock_sleep = mocker.patch("semantic_router.encoders.openai.sleep")
+        mocker.patch(
+            "openai.Embedding.create",
+            side_effect=[
+                RateLimitError("rate limit exceeded"),
+                {"data": [{"embedding": [1, 2, 3]}]},
+            ],
+        )
+
+        openai_encoder(["sample text"])
+
+        mock_sleep.assert_called_once_with(1)  # 2**0
+
+    def test_call_method_rate_limit_error__exponential_backoff_multiple_retries(self, openai_encoder, mocker):
+        mock_sleep = mocker.patch("semantic_router.encoders.openai.sleep")
+        mocker.patch(
+            "openai.Embedding.create",
+            side_effect=[
+                RateLimitError("rate limit exceeded"),
+                RateLimitError("rate limit exceeded"),
+                {"data": [{"embedding": [1, 2, 3]}]},
+            ],
+        )
+
+        openai_encoder(["sample text"])
+
+        assert mock_sleep.call_count == 2
+        mock_sleep.assert_any_call(1)  # 2**0
+        mock_sleep.assert_any_call(2)  # 2**1
+
+    def test_call_method_rate_limit_error__exponential_backoff_max_retries_exceeded(self, openai_encoder, mocker):
+        mock_sleep = mocker.patch("semantic_router.encoders.openai.sleep")
+        mocker.patch("openai.Embedding.create", side_effect=RateLimitError("rate limit exceeded"))
+
+        with pytest.raises(ValueError):
+            openai_encoder(["sample text"])
+
+        assert mock_sleep.call_count == 5  # Assuming 5 retries
+        mock_sleep.assert_any_call(1)  # 2**0
+        mock_sleep.assert_any_call(2)  # 2**1
+        mock_sleep.assert_any_call(4)  # 2**2
+        mock_sleep.assert_any_call(8)  # 2**3
+        mock_sleep.assert_any_call(16)  # 2**4
+
+    def test_call_method_rate_limit_error__exponential_backoff_successful(self, openai_encoder, mocker):
+        mock_sleep = mocker.patch("semantic_router.encoders.openai.sleep")
+        mocker.patch(
+            "openai.Embedding.create",
+            side_effect=[
+                RateLimitError("rate limit exceeded"),
+                RateLimitError("rate limit exceeded"),
+                {"data": [{"embedding": [1, 2, 3]}]},
+            ],
+        )
+
+        embeddings = openai_encoder(["sample text"])
+
+        assert mock_sleep.call_count == 2
+        assert embeddings == [[1, 2, 3]]
diff --git a/tests/test_layer.py b/tests/test_layer.py
index ce06e4a0f8ebf3de5435cede5e3f531cf377263d..63209de38acd5e6bd035ebd55c248b6be7737bff 100644
--- a/tests/test_layer.py
+++ b/tests/test_layer.py
@@ -17,6 +17,11 @@ def mock_encoder_call(utterances):
     return [mock_responses.get(u, [0, 0, 0]) for u in utterances]
 
 
+@pytest.fixture
+def base_encoder():
+    return BaseEncoder(name="test-encoder")
+
+
 @pytest.fixture
 def cohere_encoder(mocker):
     mocker.patch.object(CohereEncoder, "__call__", side_effect=mock_encoder_call)
@@ -102,5 +107,9 @@ class TestDecisionLayer:
         assert not decision_layer._pass_threshold([], 0.5)
         assert decision_layer._pass_threshold([0.6, 0.7], 0.5)
 
+    def test_failover_similarity_threshold(self, base_encoder):
+        decision_layer = DecisionLayer(encoder=base_encoder)
+        assert decision_layer.similarity_threshold == 0.82
+
 
 # Add more tests for edge cases and error handling as needed.
diff --git a/tests/test_schema.py b/tests/test_schema.py
new file mode 100644
index 0000000000000000000000000000000000000000..0088c358bf4043933560d14759373c312e143145
--- /dev/null
+++ b/tests/test_schema.py
@@ -0,0 +1,48 @@
+import pytest
+from semantic_router.schema import Decision, Encoder, EncoderType, OpenAIEncoder, CohereEncoder, SemanticSpace
+
+
+class TestEncoderDataclass:
+    def test_encoder_initialization_openai(self, mocker):
+        mocker.patch.dict("os.environ", {"OPENAI_API_KEY": "test"})
+        encoder = Encoder(type="openai", name="test-engine")
+        assert encoder.type == EncoderType.OPENAI
+        assert isinstance(encoder.model, OpenAIEncoder)
+
+    def test_encoder_initialization_cohere(self, mocker):
+        mocker.patch.dict("os.environ", {"COHERE_API_KEY": "test"})
+        encoder = Encoder(type="cohere", name="test-engine")
+        assert encoder.type == EncoderType.COHERE
+        assert isinstance(encoder.model, CohereEncoder)
+
+    def test_encoder_initialization_unsupported_type(self):
+        with pytest.raises(ValueError):
+            Encoder(type="unsupported", name="test-engine")
+
+    def test_encoder_initialization_huggingface(self):
+        with pytest.raises(NotImplementedError):
+            Encoder(type="huggingface", name="test-engine")
+
+    def test_encoder_call_method(self, mocker):
+        mocker.patch.dict("os.environ", {"OPENAI_API_KEY": "test"})
+        mocker.patch("semantic_router.encoders.openai.OpenAIEncoder.__call__", return_value=[0.1, 0.2, 0.3])
+        encoder = Encoder(type="openai", name="test-engine")
+        result = encoder(["test"])
+        assert result == [0.1, 0.2, 0.3]
+
+
+class TestSemanticSpaceDataclass:
+    def test_semanticspace_initialization(self):
+        semantic_space = SemanticSpace()
+        assert semantic_space.id == ""
+        assert semantic_space.decisions == []
+
+    def test_semanticspace_add_decision(self):
+        decision = Decision(name="test", utterances=["hello", "hi"], description="greeting")
+        semantic_space = SemanticSpace()
+        semantic_space.add(decision)
+
+        assert len(semantic_space.decisions) == 1
+        assert semantic_space.decisions[0].name == "test"
+        assert semantic_space.decisions[0].utterances == ["hello", "hi"]
+        assert semantic_space.decisions[0].description == "greeting"