diff --git a/tests/unit/encoders/test_azure.py b/tests/unit/encoders/test_azure.py
index 01a495a44ec1f4e58f9ee0a0c45f211d62226d76..15554805e5b2891d72a15fce39cecd0c8f2f9299 100644
--- a/tests/unit/encoders/test_azure.py
+++ b/tests/unit/encoders/test_azure.py
@@ -23,11 +23,12 @@ def mock_openai_async_client():
 @pytest.fixture
 def openai_encoder(mock_openai_client, mock_openai_async_client):
     return AzureOpenAIEncoder(
+        azure_endpoint="https://test-endpoint.openai.azure.com",
+        api_version="test-version",
         api_key="test_api_key",
+        http_client_options={"timeout": 10},
         deployment_name="test-deployment",
-        azure_endpoint="test_endpoint",
-        api_version="test_version",
-        model="test_model",
+        dimensions=1536,
         max_retries=2,
     )
 
@@ -84,7 +85,7 @@ class TestAzureOpenAIEncoder:
         mocker.patch.object(
             openai_encoder.client.embeddings, "create", side_effect=responses
         )
-        with patch("semantic_router.encoders.zure.sleep", return_value=None):
+        with patch("semantic_router.encoders.azure_openai.sleep", return_value=None):
             embeddings = openai_encoder(["test document"])
         assert embeddings == [[0.1, 0.2]]
 
@@ -96,7 +97,7 @@ class TestAzureOpenAIEncoder:
             "create",
             side_effect=Exception("Non-OpenAIError"),
         )
-        with patch("semantic_router.encoders.zure.sleep", return_value=None):
+        with patch("semantic_router.encoders.azure_openai.sleep", return_value=None):
             with pytest.raises(ValueError) as e:
                 openai_encoder(["test document"])
 
@@ -124,7 +125,7 @@ class TestAzureOpenAIEncoder:
         mocker.patch.object(
             openai_encoder.client.embeddings, "create", side_effect=responses
         )
-        with patch("semantic_router.encoders.zure.sleep", return_value=None):
+        with patch("semantic_router.encoders.azure_openai.sleep", return_value=None):
             embeddings = openai_encoder(["test document"])
         assert embeddings == [[0.1, 0.2]]
 
@@ -150,7 +151,7 @@ class TestAzureOpenAIEncoder:
         mocker.patch("time.sleep", return_value=None)  # To speed up the test
 
         # Patch the sleep function in the encoder module to avoid actual sleep
-        with patch("semantic_router.encoders.zure.sleep", return_value=None):
+        with patch("semantic_router.encoders.azure_openai.sleep", return_value=None):
             result = openai_encoder(["test document"])
 
         assert result == [[0.1, 0.2, 0.3]]
@@ -176,7 +177,7 @@ class TestAzureOpenAIEncoder:
         mocker.patch("time.sleep", return_value=None)  # To speed up the test
 
         # Patch the sleep function in the encoder module to avoid actual sleep
-        with patch("semantic_router.encoders.zure.sleep", return_value=None):
+        with patch("semantic_router.encoders.azure_openai.sleep", return_value=None):
             with pytest.raises(OpenAIError):
                 openai_encoder(["test document"])
 
@@ -207,7 +208,7 @@ class TestAzureOpenAIEncoder:
         mocker.patch("asyncio.sleep", return_value=None)  # To speed up the test
 
         # Patch the asleep function in the encoder module to avoid actual sleep
-        with patch("semantic_router.encoders.zure.asleep", return_value=None):
+        with patch("semantic_router.encoders.azure_openai.asleep", return_value=None):
             result = await openai_encoder.acall(["test document"])
 
         assert result == [[0.1, 0.2, 0.3]]
@@ -226,7 +227,7 @@ class TestAzureOpenAIEncoder:
         mocker.patch("asyncio.sleep", return_value=None)  # To speed up the test
 
         # Patch the asleep function in the encoder module to avoid actual sleep
-        with patch("semantic_router.encoders.zure.asleep", return_value=None):
+        with patch("semantic_router.encoders.azure_openai.asleep", return_value=None):
             with pytest.raises(OpenAIError):
                 await openai_encoder.acall(["test document"])