diff --git a/semantic_router/encoders/__init__.py b/semantic_router/encoders/__init__.py
index 713f8a995e7e52db60cf38d3023bf182dde56361..b46681541f26631f37d1b3fcfc14410997ea3d85 100644
--- a/semantic_router/encoders/__init__.py
+++ b/semantic_router/encoders/__init__.py
@@ -5,9 +5,11 @@ from semantic_router.encoders.fastembed import FastEmbedEncoder
 from semantic_router.encoders.huggingface import HuggingFaceEncoder
 from semantic_router.encoders.openai import OpenAIEncoder
 from semantic_router.encoders.tfidf import TfidfEncoder
+from semantic_router.encoders.zure import AzureOpenAIEncoder
 
 __all__ = [
     "BaseEncoder",
+    "AzureOpenAIEncoder",
     "CohereEncoder",
     "OpenAIEncoder",
     "BM25Encoder",
diff --git a/semantic_router/encoders/base.py b/semantic_router/encoders/base.py
index edc98641147668705150a0ee1242e77eeeebb431..a3bac32458be5ab0e62fcb7b26205b6489fac1bd 100644
--- a/semantic_router/encoders/base.py
+++ b/semantic_router/encoders/base.py
@@ -1,4 +1,5 @@
 from typing import List
+
 from pydantic import BaseModel, Field
 
 
diff --git a/semantic_router/encoders/bm25.py b/semantic_router/encoders/bm25.py
index 83cbccc06fe453203cd729e6ab2f56c4237a0f74..1965fb6ef1f6fb5032134c562d4c38ac6b22f8fa 100644
--- a/semantic_router/encoders/bm25.py
+++ b/semantic_router/encoders/bm25.py
@@ -1,4 +1,4 @@
-from typing import Any, Optional, List, Dict
+from typing import Any, Dict, List, Optional
 
 from semantic_router.encoders import BaseEncoder
 from semantic_router.utils.logger import logger
diff --git a/semantic_router/encoders/cohere.py b/semantic_router/encoders/cohere.py
index 803fe779f82b54460040d5ba57b82aff1bcb1f13..4f108095473467c895c5bf5112c35ddba29190c0 100644
--- a/semantic_router/encoders/cohere.py
+++ b/semantic_router/encoders/cohere.py
@@ -1,5 +1,5 @@
 import os
-from typing import Optional, List
+from typing import List, Optional
 
 import cohere
 
diff --git a/semantic_router/encoders/fastembed.py b/semantic_router/encoders/fastembed.py
index ec356317671fc93848e0f3977985cec1a221d827..018eeee17334aed89db5f6675eb0dbeb409d4448 100644
--- a/semantic_router/encoders/fastembed.py
+++ b/semantic_router/encoders/fastembed.py
@@ -1,4 +1,4 @@
-from typing import Any, Optional, List
+from typing import Any, List, Optional
 
 import numpy as np
 from pydantic import PrivateAttr
diff --git a/semantic_router/encoders/huggingface.py b/semantic_router/encoders/huggingface.py
index 2166ea13f68cb263d76fabb96b310501d58169fb..d1b2075781e9f27496146db834eb008752c1b713 100644
--- a/semantic_router/encoders/huggingface.py
+++ b/semantic_router/encoders/huggingface.py
@@ -1,4 +1,4 @@
-from typing import Any, Optional, List
+from typing import Any, List, Optional
 
 from pydantic import PrivateAttr
 
diff --git a/semantic_router/encoders/openai.py b/semantic_router/encoders/openai.py
index 3b06d33de2a4ad01da3ad950feddf15731d332c8..761f493102790e7e79d33b0d5a4731f6fa0e7154 100644
--- a/semantic_router/encoders/openai.py
+++ b/semantic_router/encoders/openai.py
@@ -1,6 +1,6 @@
 import os
 from time import sleep
-from typing import Optional, List
+from typing import List, Optional
 
 import openai
 from openai import OpenAIError
diff --git a/semantic_router/encoders/zure.py b/semantic_router/encoders/zure.py
new file mode 100644
index 0000000000000000000000000000000000000000..8a558d0dc4b860c59da0d2e80a6536b618083b6f
--- /dev/null
+++ b/semantic_router/encoders/zure.py
@@ -0,0 +1,113 @@
+import os
+from time import sleep
+from typing import List, Optional
+
+import openai
+from openai import OpenAIError
+from openai.types import CreateEmbeddingResponse
+
+from semantic_router.encoders import BaseEncoder
+from semantic_router.utils.logger import logger
+
+
+class AzureOpenAIEncoder(BaseEncoder):
+    client: Optional[openai.AzureOpenAI] = None
+    type: str = "azure"
+    api_key: Optional[str] = None
+    deployment_name: Optional[str] = None
+    azure_endpoint: Optional[str] = None
+    api_version: Optional[str] = None
+    model: Optional[str] = None
+
+    def __init__(
+        self,
+        api_key: Optional[str] = None,
+        deployment_name: Optional[str] = None,
+        azure_endpoint: Optional[str] = None,
+        api_version: Optional[str] = None,
+        model: Optional[str] = None,
+        score_threshold: float = 0.82,
+    ):
+        name = deployment_name
+        if name is None:
+            name = os.getenv("AZURE_OPENAI_DEPLOYMENT_NAME", "text-embedding-ada-002")
+        super().__init__(name=name, score_threshold=score_threshold)
+        self.api_key = api_key
+        self.deployment_name = deployment_name
+        self.azure_endpoint = azure_endpoint
+        self.api_version = api_version
+        self.model = model
+        if self.api_key is None:
+            self.api_key = os.getenv("AZURE_OPENAI_API_KEY")
+            if self.api_key is None:
+                raise ValueError("No Azure OpenAI API key provided.")
+        if self.deployment_name is None:
+            self.deployment_name = os.getenv(
+                "AZURE_OPENAI_DEPLOYMENT_NAME", "text-embedding-ada-002"
+            )
+        # deployment_name may still be None, but it is optional in the API
+        if self.azure_endpoint is None:
+            self.azure_endpoint = os.getenv("AZURE_OPENAI_ENDPOINT")
+            if self.azure_endpoint is None:
+                raise ValueError("No Azure OpenAI endpoint provided.")
+        if self.api_version is None:
+            self.api_version = os.getenv("AZURE_OPENAI_API_VERSION")
+            if self.api_version is None:
+                raise ValueError("No Azure OpenAI API version provided.")
+        if self.model is None:
+            self.model = os.getenv("AZURE_OPENAI_MODEL")
+            if self.model is None:
+                raise ValueError("No Azure OpenAI model provided.")
+        assert (
+            self.api_key is not None
+            and self.azure_endpoint is not None
+            and self.api_version is not None
+            and self.model is not None
+        )
+
+        try:
+            self.client = openai.AzureOpenAI(
+                azure_deployment=str(deployment_name) if deployment_name else None,
+                api_key=str(api_key),
+                azure_endpoint=str(azure_endpoint),
+                api_version=str(api_version),
+                # _strict_response_validation=True,
+            )
+        except Exception as e:
+            raise ValueError(f"OpenAI API client failed to initialize. Error: {e}")
+
+    def __call__(self, docs: List[str]) -> List[List[float]]:
+        if self.client is None:
+            raise ValueError("OpenAI client is not initialized.")
+        embeds = None
+        error_message = ""
+
+        # Exponential backoff
+        for j in range(3):
+            try:
+                embeds = self.client.embeddings.create(
+                    input=docs, model=str(self.model)
+                )
+                if embeds.data:
+                    break
+            except OpenAIError as e:
+                # print full traceback
+                import traceback
+
+                traceback.print_exc()
+                sleep(2**j)
+                error_message = str(e)
+                logger.warning(f"Retrying in {2**j} seconds...")
+            except Exception as e:
+                logger.error(f"Azure OpenAI API call failed. Error: {error_message}")
+                raise ValueError(f"Azure OpenAI API call failed. Error: {e}")
+
+        if (
+            not embeds
+            or not isinstance(embeds, CreateEmbeddingResponse)
+            or not embeds.data
+        ):
+            raise ValueError(f"No embeddings returned. Error: {error_message}")
+
+        embeddings = [embeds_obj.embedding for embeds_obj in embeds.data]
+        return embeddings
diff --git a/semantic_router/hybrid_layer.py b/semantic_router/hybrid_layer.py
index f3eb3e6427b0b7e7f55261cceffb8c5082db3f63..71598a15aa1235dd2a4cf018de5834287c5eed5d 100644
--- a/semantic_router/hybrid_layer.py
+++ b/semantic_router/hybrid_layer.py
@@ -1,4 +1,4 @@
-from typing import Optional, List, Dict, Tuple
+from typing import Dict, List, Optional, Tuple
 
 import numpy as np
 from numpy.linalg import norm
diff --git a/semantic_router/layer.py b/semantic_router/layer.py
index bce160ba7853e5f70d30ea9a223ee3d44630c40f..15b3fc0d2700529a0daee273aab43df2af03c7dc 100644
--- a/semantic_router/layer.py
+++ b/semantic_router/layer.py
@@ -1,6 +1,6 @@
 import json
 import os
-from typing import Optional, Any, List, Dict, Tuple
+from typing import Any, Dict, List, Optional, Tuple
 
 import numpy as np
 import yaml
diff --git a/semantic_router/llms/base.py b/semantic_router/llms/base.py
index 12d89f2d31e1cd181346322daf01d0b206222a20..2560261173e61bac8f7769b12028261e812b1327 100644
--- a/semantic_router/llms/base.py
+++ b/semantic_router/llms/base.py
@@ -1,4 +1,4 @@
-from typing import Optional, List
+from typing import List, Optional
 
 from pydantic import BaseModel
 
diff --git a/semantic_router/llms/cohere.py b/semantic_router/llms/cohere.py
index 0eebbe6d6e8385e66ed9df42b941a915fa144e22..1a3c8e3c7741cb83232e6115ea547aaee8a93015 100644
--- a/semantic_router/llms/cohere.py
+++ b/semantic_router/llms/cohere.py
@@ -1,5 +1,5 @@
 import os
-from typing import Optional, List
+from typing import List, Optional
 
 import cohere
 
diff --git a/semantic_router/llms/openai.py b/semantic_router/llms/openai.py
index 06d6865ca1ec095d04453aaf8deb7c8e8d5ef54e..b0c7d0e69ce22b5a90f57e42d2c3ada235e221db 100644
--- a/semantic_router/llms/openai.py
+++ b/semantic_router/llms/openai.py
@@ -1,5 +1,5 @@
 import os
-from typing import Optional, List
+from typing import List, Optional
 
 import openai
 
diff --git a/semantic_router/llms/openrouter.py b/semantic_router/llms/openrouter.py
index 8c3efb8d1f67fc246f62116555368eafa1f36288..4e687207ba206d38ac5d7b831214baeb81a386d4 100644
--- a/semantic_router/llms/openrouter.py
+++ b/semantic_router/llms/openrouter.py
@@ -1,5 +1,5 @@
 import os
-from typing import Optional, List
+from typing import List, Optional
 
 import openai
 
diff --git a/semantic_router/route.py b/semantic_router/route.py
index bf24b14c13ca2b43d087d6574af1e9fc2fe14326..3934d64fb700c3b61606fd7929fcbe85e9ba56e5 100644
--- a/semantic_router/route.py
+++ b/semantic_router/route.py
@@ -1,6 +1,6 @@
 import json
 import re
-from typing import Any, Callable, Optional, Union, List, Dict
+from typing import Any, Callable, Dict, List, Optional, Union
 
 from pydantic import BaseModel
 
diff --git a/semantic_router/schema.py b/semantic_router/schema.py
index 7dcb7fde1252088ab7736510c7f04fa32c3a6f6d..7529750df20999e767b50b21517a297994ee75ca 100644
--- a/semantic_router/schema.py
+++ b/semantic_router/schema.py
@@ -1,5 +1,5 @@
 from enum import Enum
-from typing import Optional, Literal, List, Dict
+from typing import Dict, List, Literal, Optional
 
 from pydantic import BaseModel
 from pydantic.dataclasses import dataclass
diff --git a/semantic_router/utils/function_call.py b/semantic_router/utils/function_call.py
index 1b42a6133a3faeb0a001f646b45d2a842b4e7d4e..fd009c40f1ca96ecd1c709f4af9b1bb4f13e68c9 100644
--- a/semantic_router/utils/function_call.py
+++ b/semantic_router/utils/function_call.py
@@ -1,6 +1,6 @@
 import inspect
 import json
-from typing import Any, Callable, Union, Dict, List
+from typing import Any, Callable, Dict, List, Union
 
 from pydantic import BaseModel
 
diff --git a/semantic_router/utils/splitters.py b/semantic_router/utils/splitters.py
index 83a32839c5efc3b528f9a14643c3f3db3571f3e3..9f0c4704de778b78d03371f71cc73637698213db 100644
--- a/semantic_router/utils/splitters.py
+++ b/semantic_router/utils/splitters.py
@@ -1,5 +1,6 @@
+from typing import Dict, List, Literal
+
 import numpy as np
-from typing import List, Dict, Literal
 
 from semantic_router.encoders import BaseEncoder
 
diff --git a/tests/unit/encoders/test_azure.py b/tests/unit/encoders/test_azure.py
new file mode 100644
index 0000000000000000000000000000000000000000..93dffb89194606c2d410c1cf749cc57faf6d1327
--- /dev/null
+++ b/tests/unit/encoders/test_azure.py
@@ -0,0 +1,124 @@
+import pytest
+from openai import OpenAIError
+from openai.types import CreateEmbeddingResponse, Embedding
+from openai.types.create_embedding_response import Usage
+
+from semantic_router.encoders import AzureOpenAIEncoder
+
+
+@pytest.fixture
+def openai_encoder(mocker):
+    mocker.patch("openai.Client")
+    return AzureOpenAIEncoder(
+        api_key="test_api_key",
+        deployment_name="test-deployment",
+        azure_endpoint="test_endpoint",
+        api_version="test_version",
+        model="test_model",
+    )
+
+
+class TestAzureOpenAIEncoder:
+    def test_openai_encoder_init_success(self, mocker):
+        mocker.patch("os.getenv", return_value="fake-api-key")
+        encoder = AzureOpenAIEncoder()
+        assert encoder.client is not None
+
+    def test_openai_encoder_init_no_api_key(self, mocker):
+        mocker.patch("os.getenv", return_value=None)
+        with pytest.raises(ValueError) as _:
+            AzureOpenAIEncoder()
+
+    def test_openai_encoder_call_uninitialized_client(self, openai_encoder):
+        # Set the client to None to simulate an uninitialized client
+        openai_encoder.client = None
+        with pytest.raises(ValueError) as e:
+            openai_encoder(["test document"])
+        assert "OpenAI client is not initialized." in str(e.value)
+
+    def test_openai_encoder_init_exception(self, mocker):
+        mocker.patch("os.getenv", return_value="fake-api-stuff")
+        mocker.patch(
+            "openai.AzureOpenAI", side_effect=Exception("Initialization error")
+        )
+        with pytest.raises(ValueError) as e:
+            AzureOpenAIEncoder()
+        assert (
+            "OpenAI API client failed to initialize. Error: Initialization error"
+            in str(e.value)
+        )
+
+    def test_openai_encoder_call_success(self, openai_encoder, mocker):
+        mock_embeddings = mocker.Mock()
+        mock_embeddings.data = [
+            Embedding(embedding=[0.1, 0.2], index=0, object="embedding")
+        ]
+
+        mocker.patch("os.getenv", return_value="fake-api-key")
+        mocker.patch("time.sleep", return_value=None)  # To speed up the test
+
+        mock_embedding = Embedding(index=0, object="embedding", embedding=[0.1, 0.2])
+        # Mock the CreateEmbeddingResponse object
+        mock_response = CreateEmbeddingResponse(
+            model="text-embedding-ada-002",
+            object="list",
+            usage=Usage(prompt_tokens=0, total_tokens=20),
+            data=[mock_embedding],
+        )
+
+        responses = [OpenAIError("OpenAI error"), mock_response]
+        mocker.patch.object(
+            openai_encoder.client.embeddings, "create", side_effect=responses
+        )
+        embeddings = openai_encoder(["test document"])
+        assert embeddings == [[0.1, 0.2]]
+
+    def test_openai_encoder_call_with_retries(self, openai_encoder, mocker):
+        mocker.patch("os.getenv", return_value="fake-api-key")
+        mocker.patch("time.sleep", return_value=None)  # To speed up the test
+        mocker.patch.object(
+            openai_encoder.client.embeddings,
+            "create",
+            side_effect=OpenAIError("Test error"),
+        )
+        with pytest.raises(ValueError) as e:
+            openai_encoder(["test document"])
+        assert "No embeddings returned. Error" in str(e.value)
+
+    def test_openai_encoder_call_failure_non_openai_error(self, openai_encoder, mocker):
+        mocker.patch("os.getenv", return_value="fake-api-key")
+        mocker.patch("time.sleep", return_value=None)  # To speed up the test
+        mocker.patch.object(
+            openai_encoder.client.embeddings,
+            "create",
+            side_effect=Exception("Non-OpenAIError"),
+        )
+        with pytest.raises(ValueError) as e:
+            openai_encoder(["test document"])
+
+        assert "OpenAI API call failed. Error: Non-OpenAIError" in str(e.value)
+
+    def test_openai_encoder_call_successful_retry(self, openai_encoder, mocker):
+        mock_embeddings = mocker.Mock()
+        mock_embeddings.data = [
+            Embedding(embedding=[0.1, 0.2], index=0, object="embedding")
+        ]
+
+        mocker.patch("os.getenv", return_value="fake-api-key")
+        mocker.patch("time.sleep", return_value=None)  # To speed up the test
+
+        mock_embedding = Embedding(index=0, object="embedding", embedding=[0.1, 0.2])
+        # Mock the CreateEmbeddingResponse object
+        mock_response = CreateEmbeddingResponse(
+            model="text-embedding-ada-002",
+            object="list",
+            usage=Usage(prompt_tokens=0, total_tokens=20),
+            data=[mock_embedding],
+        )
+
+        responses = [OpenAIError("OpenAI error"), mock_response]
+        mocker.patch.object(
+            openai_encoder.client.embeddings, "create", side_effect=responses
+        )
+        embeddings = openai_encoder(["test document"])
+        assert embeddings == [[0.1, 0.2]]
diff --git a/tests/unit/test_hybrid_layer.py b/tests/unit/test_hybrid_layer.py
index 6a3d225a433c7eeef402c6ebebcb3f010662dcf6..18bdbddf8e5a93aa94b1b2e9f3b3cfb96cef453f 100644
--- a/tests/unit/test_hybrid_layer.py
+++ b/tests/unit/test_hybrid_layer.py
@@ -1,6 +1,7 @@
 import pytest
 
 from semantic_router.encoders import (
+    AzureOpenAIEncoder,
     BaseEncoder,
     BM25Encoder,
     CohereEncoder,
@@ -43,6 +44,17 @@ def openai_encoder(mocker):
 
 
 @pytest.fixture
+def azure_encoder(mocker):
+    mocker.patch.object(AzureOpenAIEncoder, "__call__", side_effect=mock_encoder_call)
+    return AzureOpenAIEncoder(
+        deployment_name="test-deployment",
+        azure_endpoint="test_endpoint",
+        api_key="test_api_key",
+        api_version="test_version",
+        model="test_model",
+    )
+
+
 def bm25_encoder(mocker):
     mocker.patch.object(BM25Encoder, "__call__", side_effect=mock_encoder_call)
     return BM25Encoder(name="test-bm25-encoder")