Skip to content
Snippets Groups Projects
Commit 23426e00 authored by Taylor's avatar Taylor
Browse files

refactor: update AzureOpenAIEncoder tests to use new module name and parameters

parent 742a5221
No related branches found
No related tags found
No related merge requests found
...@@ -23,11 +23,12 @@ def mock_openai_async_client(): ...@@ -23,11 +23,12 @@ def mock_openai_async_client():
@pytest.fixture @pytest.fixture
def openai_encoder(mock_openai_client, mock_openai_async_client): def openai_encoder(mock_openai_client, mock_openai_async_client):
return AzureOpenAIEncoder( return AzureOpenAIEncoder(
azure_endpoint="https://test-endpoint.openai.azure.com",
api_version="test-version",
api_key="test_api_key", api_key="test_api_key",
http_client_options={"timeout": 10},
deployment_name="test-deployment", deployment_name="test-deployment",
azure_endpoint="test_endpoint", dimensions=1536,
api_version="test_version",
model="test_model",
max_retries=2, max_retries=2,
) )
...@@ -84,7 +85,7 @@ class TestAzureOpenAIEncoder: ...@@ -84,7 +85,7 @@ class TestAzureOpenAIEncoder:
mocker.patch.object( mocker.patch.object(
openai_encoder.client.embeddings, "create", side_effect=responses openai_encoder.client.embeddings, "create", side_effect=responses
) )
with patch("semantic_router.encoders.zure.sleep", return_value=None): with patch("semantic_router.encoders.azure_openai.sleep", return_value=None):
embeddings = openai_encoder(["test document"]) embeddings = openai_encoder(["test document"])
assert embeddings == [[0.1, 0.2]] assert embeddings == [[0.1, 0.2]]
...@@ -96,7 +97,7 @@ class TestAzureOpenAIEncoder: ...@@ -96,7 +97,7 @@ class TestAzureOpenAIEncoder:
"create", "create",
side_effect=Exception("Non-OpenAIError"), side_effect=Exception("Non-OpenAIError"),
) )
with patch("semantic_router.encoders.zure.sleep", return_value=None): with patch("semantic_router.encoders.azure_openai.sleep", return_value=None):
with pytest.raises(ValueError) as e: with pytest.raises(ValueError) as e:
openai_encoder(["test document"]) openai_encoder(["test document"])
...@@ -124,7 +125,7 @@ class TestAzureOpenAIEncoder: ...@@ -124,7 +125,7 @@ class TestAzureOpenAIEncoder:
mocker.patch.object( mocker.patch.object(
openai_encoder.client.embeddings, "create", side_effect=responses openai_encoder.client.embeddings, "create", side_effect=responses
) )
with patch("semantic_router.encoders.zure.sleep", return_value=None): with patch("semantic_router.encoders.azure_openai.sleep", return_value=None):
embeddings = openai_encoder(["test document"]) embeddings = openai_encoder(["test document"])
assert embeddings == [[0.1, 0.2]] assert embeddings == [[0.1, 0.2]]
...@@ -150,7 +151,7 @@ class TestAzureOpenAIEncoder: ...@@ -150,7 +151,7 @@ class TestAzureOpenAIEncoder:
mocker.patch("time.sleep", return_value=None) # To speed up the test mocker.patch("time.sleep", return_value=None) # To speed up the test
# Patch the sleep function in the encoder module to avoid actual sleep # Patch the sleep function in the encoder module to avoid actual sleep
with patch("semantic_router.encoders.zure.sleep", return_value=None): with patch("semantic_router.encoders.azure_openai.sleep", return_value=None):
result = openai_encoder(["test document"]) result = openai_encoder(["test document"])
assert result == [[0.1, 0.2, 0.3]] assert result == [[0.1, 0.2, 0.3]]
...@@ -176,7 +177,7 @@ class TestAzureOpenAIEncoder: ...@@ -176,7 +177,7 @@ class TestAzureOpenAIEncoder:
mocker.patch("time.sleep", return_value=None) # To speed up the test mocker.patch("time.sleep", return_value=None) # To speed up the test
# Patch the sleep function in the encoder module to avoid actual sleep # Patch the sleep function in the encoder module to avoid actual sleep
with patch("semantic_router.encoders.zure.sleep", return_value=None): with patch("semantic_router.encoders.azure_openai.sleep", return_value=None):
with pytest.raises(OpenAIError): with pytest.raises(OpenAIError):
openai_encoder(["test document"]) openai_encoder(["test document"])
...@@ -207,7 +208,7 @@ class TestAzureOpenAIEncoder: ...@@ -207,7 +208,7 @@ class TestAzureOpenAIEncoder:
mocker.patch("asyncio.sleep", return_value=None) # To speed up the test mocker.patch("asyncio.sleep", return_value=None) # To speed up the test
# Patch the asleep function in the encoder module to avoid actual sleep # Patch the asleep function in the encoder module to avoid actual sleep
with patch("semantic_router.encoders.zure.asleep", return_value=None): with patch("semantic_router.encoders.azure_openai.asleep", return_value=None):
result = await openai_encoder.acall(["test document"]) result = await openai_encoder.acall(["test document"])
assert result == [[0.1, 0.2, 0.3]] assert result == [[0.1, 0.2, 0.3]]
...@@ -226,7 +227,7 @@ class TestAzureOpenAIEncoder: ...@@ -226,7 +227,7 @@ class TestAzureOpenAIEncoder:
mocker.patch("asyncio.sleep", return_value=None) # To speed up the test mocker.patch("asyncio.sleep", return_value=None) # To speed up the test
# Patch the asleep function in the encoder module to avoid actual sleep # Patch the asleep function in the encoder module to avoid actual sleep
with patch("semantic_router.encoders.zure.asleep", return_value=None): with patch("semantic_router.encoders.azure_openai.asleep", return_value=None):
with pytest.raises(OpenAIError): with pytest.raises(OpenAIError):
await openai_encoder.acall(["test document"]) await openai_encoder.acall(["test document"])
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment