diff --git a/semantic_router/encoders/__init__.py b/semantic_router/encoders/__init__.py
index ac27ebb4a5aac24d777c4f39929349acab8a438a..c25f11d2db60179a76fedce1daade0c68c60ce99 100644
--- a/semantic_router/encoders/__init__.py
+++ b/semantic_router/encoders/__init__.py
@@ -2,5 +2,12 @@ from semantic_router.encoders.base import BaseEncoder
 from semantic_router.encoders.bm25 import BM25Encoder
 from semantic_router.encoders.cohere import CohereEncoder
 from semantic_router.encoders.openai import OpenAIEncoder
+from semantic_router.encoders.zure import AzureOpenAIEncoder
 
-__all__ = ["BaseEncoder", "CohereEncoder", "OpenAIEncoder", "BM25Encoder"]
+__all__ = [
+    "BaseEncoder",
+    "AzureOpenAIEncoder",
+    "CohereEncoder",
+    "OpenAIEncoder",
+    "BM25Encoder",
+]
diff --git a/semantic_router/encoders/zure.py b/semantic_router/encoders/zure.py
new file mode 100644
index 0000000000000000000000000000000000000000..5b8099b991c0ab9d26f695c50094a5e869438cfb
--- /dev/null
+++ b/semantic_router/encoders/zure.py
@@ -0,0 +1,109 @@
+import os
+from time import sleep
+
+import openai
+from openai import OpenAIError
+from openai.types import CreateEmbeddingResponse
+
+from semantic_router.encoders import BaseEncoder
+from semantic_router.utils.logger import logger
+
+
+class AzureOpenAIEncoder(BaseEncoder):
+    client: openai.AzureOpenAI | None = None
+    type: str = "azure"
+    api_key: str | None = None
+    deployment_name: str | None = None
+    azure_endpoint: str | None = None
+    api_version: str | None = None
+    model: str | None = None
+
+    def __init__(
+        self,
+        api_key: str | None = None,
+        deployment_name: str | None = None,
+        azure_endpoint: str | None = None,
+        api_version: str | None = None,
+        model: str | None = None,
+    ):
+        name = deployment_name
+        if name is None:
+            name = os.getenv("AZURE_OPENAI_DEPLOYMENT_NAME", "text-embedding-ada-002")
+        super().__init__(name=name)
+        self.api_key = api_key
+        self.deployment_name = deployment_name
+        self.azure_endpoint = azure_endpoint
+        self.api_version = api_version
+        self.model = model
+        if self.api_key is None:
+            self.api_key = os.getenv("AZURE_OPENAI_API_KEY")
+            if self.api_key is None:
+                raise ValueError("No Azure OpenAI API key provided.")
+        if self.deployment_name is None:
+            self.deployment_name = os.getenv(
+                "AZURE_OPENAI_DEPLOYMENT_NAME", "text-embedding-ada-002"
+            )
+            if self.deployment_name is None:
+                raise ValueError("No Azure OpenAI deployment name provided.")
+        if self.azure_endpoint is None:
+            self.azure_endpoint = os.getenv("AZURE_OPENAI_ENDPOINT")
+            if self.azure_endpoint is None:
+                raise ValueError("No Azure OpenAI endpoint provided.")
+        if self.api_version is None:
+            self.api_version = os.getenv("AZURE_OPENAI_API_VERSION")
+            if self.api_version is None:
+                raise ValueError("No Azure OpenAI API version provided.")
+        if self.model is None:
+            self.model = os.getenv("AZURE_OPENAI_MODEL")
+            if self.model is None:
+                raise ValueError("No Azure OpenAI model provided.")
+        assert (
+            self.api_key is not None
+            and self.deployment_name is not None
+            and self.azure_endpoint is not None
+            and self.api_version is not None
+            and self.model is not None
+        )
+
+        try:
+            self.client = openai.AzureOpenAI(
+                azure_deployment=str(deployment_name),
+                api_key=str(api_key),
+                azure_endpoint=str(azure_endpoint),
+                api_version=str(api_version),
+                _strict_response_validation=True,
+            )
+        except Exception as e:
+            raise ValueError(f"OpenAI API client failed to initialize. Error: {e}")
+
+    def __call__(self, docs: list[str]) -> list[list[float]]:
+        if self.client is None:
+            raise ValueError("OpenAI client is not initialized.")
+        embeds = None
+        error_message = ""
+
+        # Exponential backoff
+        for j in range(3):
+            try:
+                embeds = self.client.embeddings.create(
+                    input=docs, model=str(self.model)
+                )
+                if embeds.data:
+                    break
+            except OpenAIError as e:
+                sleep(2**j)
+                error_message = str(e)
+                logger.warning(f"Retrying in {2**j} seconds...")
+            except Exception as e:
+                logger.error(f"Azure OpenAI API call failed. Error: {error_message}")
+                raise ValueError(f"Azure OpenAI API call failed. Error: {e}")
+
+        if (
+            not embeds
+            or not isinstance(embeds, CreateEmbeddingResponse)
+            or not embeds.data
+        ):
+            raise ValueError(f"No embeddings returned. Error: {error_message}")
+
+        embeddings = [embeds_obj.embedding for embeds_obj in embeds.data]
+        return embeddings
diff --git a/tests/unit/encoders/test_azure.py b/tests/unit/encoders/test_azure.py
new file mode 100644
index 0000000000000000000000000000000000000000..93dffb89194606c2d410c1cf749cc57faf6d1327
--- /dev/null
+++ b/tests/unit/encoders/test_azure.py
@@ -0,0 +1,124 @@
+import pytest
+from openai import OpenAIError
+from openai.types import CreateEmbeddingResponse, Embedding
+from openai.types.create_embedding_response import Usage
+
+from semantic_router.encoders import AzureOpenAIEncoder
+
+
+@pytest.fixture
+def openai_encoder(mocker):
+    mocker.patch("openai.Client")
+    return AzureOpenAIEncoder(
+        api_key="test_api_key",
+        deployment_name="test-deployment",
+        azure_endpoint="test_endpoint",
+        api_version="test_version",
+        model="test_model",
+    )
+
+
+class TestAzureOpenAIEncoder:
+    def test_openai_encoder_init_success(self, mocker):
+        mocker.patch("os.getenv", return_value="fake-api-key")
+        encoder = AzureOpenAIEncoder()
+        assert encoder.client is not None
+
+    def test_openai_encoder_init_no_api_key(self, mocker):
+        mocker.patch("os.getenv", return_value=None)
+        with pytest.raises(ValueError) as _:
+            AzureOpenAIEncoder()
+
+    def test_openai_encoder_call_uninitialized_client(self, openai_encoder):
+        # Set the client to None to simulate an uninitialized client
+        openai_encoder.client = None
+        with pytest.raises(ValueError) as e:
+            openai_encoder(["test document"])
+        assert "OpenAI client is not initialized." in str(e.value)
+
+    def test_openai_encoder_init_exception(self, mocker):
+        mocker.patch("os.getenv", return_value="fake-api-stuff")
+        mocker.patch(
+            "openai.AzureOpenAI", side_effect=Exception("Initialization error")
+        )
+        with pytest.raises(ValueError) as e:
+            AzureOpenAIEncoder()
+        assert (
+            "OpenAI API client failed to initialize. Error: Initialization error"
+            in str(e.value)
+        )
+
+    def test_openai_encoder_call_success(self, openai_encoder, mocker):
+        mock_embeddings = mocker.Mock()
+        mock_embeddings.data = [
+            Embedding(embedding=[0.1, 0.2], index=0, object="embedding")
+        ]
+
+        mocker.patch("os.getenv", return_value="fake-api-key")
+        mocker.patch("time.sleep", return_value=None)  # To speed up the test
+
+        mock_embedding = Embedding(index=0, object="embedding", embedding=[0.1, 0.2])
+        # Mock the CreateEmbeddingResponse object
+        mock_response = CreateEmbeddingResponse(
+            model="text-embedding-ada-002",
+            object="list",
+            usage=Usage(prompt_tokens=0, total_tokens=20),
+            data=[mock_embedding],
+        )
+
+        responses = [OpenAIError("OpenAI error"), mock_response]
+        mocker.patch.object(
+            openai_encoder.client.embeddings, "create", side_effect=responses
+        )
+        embeddings = openai_encoder(["test document"])
+        assert embeddings == [[0.1, 0.2]]
+
+    def test_openai_encoder_call_with_retries(self, openai_encoder, mocker):
+        mocker.patch("os.getenv", return_value="fake-api-key")
+        mocker.patch("time.sleep", return_value=None)  # To speed up the test
+        mocker.patch.object(
+            openai_encoder.client.embeddings,
+            "create",
+            side_effect=OpenAIError("Test error"),
+        )
+        with pytest.raises(ValueError) as e:
+            openai_encoder(["test document"])
+        assert "No embeddings returned. Error" in str(e.value)
+
+    def test_openai_encoder_call_failure_non_openai_error(self, openai_encoder, mocker):
+        mocker.patch("os.getenv", return_value="fake-api-key")
+        mocker.patch("time.sleep", return_value=None)  # To speed up the test
+        mocker.patch.object(
+            openai_encoder.client.embeddings,
+            "create",
+            side_effect=Exception("Non-OpenAIError"),
+        )
+        with pytest.raises(ValueError) as e:
+            openai_encoder(["test document"])
+
+        assert "OpenAI API call failed. Error: Non-OpenAIError" in str(e.value)
+
+    def test_openai_encoder_call_successful_retry(self, openai_encoder, mocker):
+        mock_embeddings = mocker.Mock()
+        mock_embeddings.data = [
+            Embedding(embedding=[0.1, 0.2], index=0, object="embedding")
+        ]
+
+        mocker.patch("os.getenv", return_value="fake-api-key")
+        mocker.patch("time.sleep", return_value=None)  # To speed up the test
+
+        mock_embedding = Embedding(index=0, object="embedding", embedding=[0.1, 0.2])
+        # Mock the CreateEmbeddingResponse object
+        mock_response = CreateEmbeddingResponse(
+            model="text-embedding-ada-002",
+            object="list",
+            usage=Usage(prompt_tokens=0, total_tokens=20),
+            data=[mock_embedding],
+        )
+
+        responses = [OpenAIError("OpenAI error"), mock_response]
+        mocker.patch.object(
+            openai_encoder.client.embeddings, "create", side_effect=responses
+        )
+        embeddings = openai_encoder(["test document"])
+        assert embeddings == [[0.1, 0.2]]