From 82c06d519c4733b3c3a1ce4a627dfdfe2e3b53ed Mon Sep 17 00:00:00 2001
From: Smart2332 <omribenshoham2@gmail.com>
Date: Sun, 26 May 2024 17:08:44 +0300
Subject: [PATCH] Fixed the right file

---
 semantic_router/encoders/zure.py |   9 +-
 semantic_router/llms/zure.py     | 139 +++++++++----------------------
 2 files changed, 47 insertions(+), 101 deletions(-)

diff --git a/semantic_router/encoders/zure.py b/semantic_router/encoders/zure.py
index df2bf858..7215e7f5 100644
--- a/semantic_router/encoders/zure.py
+++ b/semantic_router/encoders/zure.py
@@ -1,8 +1,9 @@
 import os
 from time import sleep
-from typing import List, Optional
+from typing import List, Optional, Union
 
 import openai
+from openai._types import NotGiven
 from openai import OpenAIError
 from openai.types import CreateEmbeddingResponse
 
@@ -13,6 +14,7 @@ from semantic_router.utils.logger import logger
 
 class AzureOpenAIEncoder(BaseEncoder):
     client: Optional[openai.AzureOpenAI] = None
+    dimensions: Union[int, NotGiven] = NotGiven()
     type: str = "azure"
     api_key: Optional[str] = None
     deployment_name: Optional[str] = None
@@ -28,6 +30,7 @@ class AzureOpenAIEncoder(BaseEncoder):
         api_version: Optional[str] = None,
         model: Optional[str] = None,  # TODO we should change to `name` JB
         score_threshold: float = 0.82,
+        dimensions: Union[int, NotGiven] = NotGiven(),
     ):
         name = deployment_name
         if name is None:
@@ -38,6 +41,8 @@ class AzureOpenAIEncoder(BaseEncoder):
         self.azure_endpoint = azure_endpoint
         self.api_version = api_version
         self.model = model
+        # set dimensions to support openai embed 3 dimensions param
+        self.dimensions = dimensions
         if self.api_key is None:
             self.api_key = os.getenv("AZURE_OPENAI_API_KEY")
             if self.api_key is None:
@@ -89,7 +94,7 @@ class AzureOpenAIEncoder(BaseEncoder):
         for j in range(3):
             try:
                 embeds = self.client.embeddings.create(
-                    input=docs, model=str(self.model)
+                    input=docs, model=str(self.model), dimensions=self.dimensions,
                 )
                 if embeds.data:
                     break
diff --git a/semantic_router/llms/zure.py b/semantic_router/llms/zure.py
index 7215e7f5..26b7901f 100644
--- a/semantic_router/llms/zure.py
+++ b/semantic_router/llms/zure.py
@@ -1,121 +1,62 @@
 import os
-from time import sleep
-from typing import List, Optional, Union
+from typing import List, Optional
 
 import openai
-from openai._types import NotGiven
-from openai import OpenAIError
-from openai.types import CreateEmbeddingResponse
 
-from semantic_router.encoders import BaseEncoder
+from semantic_router.llms import BaseLLM
+from semantic_router.schema import Message
 from semantic_router.utils.defaults import EncoderDefault
 from semantic_router.utils.logger import logger
 
 
-class AzureOpenAIEncoder(BaseEncoder):
-    client: Optional[openai.AzureOpenAI] = None
-    dimensions: Union[int, NotGiven] = NotGiven()
-    type: str = "azure"
-    api_key: Optional[str] = None
-    deployment_name: Optional[str] = None
-    azure_endpoint: Optional[str] = None
-    api_version: Optional[str] = None
-    model: Optional[str] = None
+class AzureOpenAILLM(BaseLLM):
+    client: Optional[openai.AzureOpenAI]
+    temperature: Optional[float]
+    max_tokens: Optional[int]
 
     def __init__(
         self,
-        api_key: Optional[str] = None,
-        deployment_name: Optional[str] = None,
+        name: Optional[str] = None,
+        openai_api_key: Optional[str] = None,
         azure_endpoint: Optional[str] = None,
-        api_version: Optional[str] = None,
-        model: Optional[str] = None,  # TODO we should change to `name` JB
-        score_threshold: float = 0.82,
-        dimensions: Union[int, NotGiven] = NotGiven(),
+        temperature: float = 0.01,
+        max_tokens: int = 200,
+        api_version="2023-07-01-preview",
     ):
-        name = deployment_name
         if name is None:
-            name = EncoderDefault.AZURE.value["embedding_model"]
-        super().__init__(name=name, score_threshold=score_threshold)
-        self.api_key = api_key
-        self.deployment_name = deployment_name
-        self.azure_endpoint = azure_endpoint
-        self.api_version = api_version
-        self.model = model
-        # set dimensions to support openai embed 3 dimensions param
-        self.dimensions = dimensions
-        if self.api_key is None:
-            self.api_key = os.getenv("AZURE_OPENAI_API_KEY")
-            if self.api_key is None:
-                raise ValueError("No Azure OpenAI API key provided.")
-        if self.deployment_name is None:
-            self.deployment_name = EncoderDefault.AZURE.value["deployment_name"]
-        # deployment_name may still be None, but it is optional in the API
-        if self.azure_endpoint is None:
-            self.azure_endpoint = os.getenv("AZURE_OPENAI_ENDPOINT")
-            if self.azure_endpoint is None:
-                raise ValueError("No Azure OpenAI endpoint provided.")
-        if self.api_version is None:
-            self.api_version = os.getenv("AZURE_OPENAI_API_VERSION")
-            if self.api_version is None:
-                raise ValueError("No Azure OpenAI API version provided.")
-        if self.model is None:
-            self.model = os.getenv("AZURE_OPENAI_MODEL")
-            if self.model is None:
-                raise ValueError("No Azure OpenAI model provided.")
-        assert (
-            self.api_key is not None
-            and self.azure_endpoint is not None
-            and self.api_version is not None
-            and self.model is not None
-        )
-
+            name = EncoderDefault.AZURE.value["language_model"]
+        super().__init__(name=name)
+        api_key = openai_api_key or os.getenv("AZURE_OPENAI_API_KEY")
+        if api_key is None:
+            raise ValueError("AzureOpenAI API key cannot be 'None'.")
+        azure_endpoint = azure_endpoint or os.getenv("AZURE_OPENAI_ENDPOINT")
+        if azure_endpoint is None:
+            raise ValueError("Azure endpoint API key cannot be 'None'.")
         try:
             self.client = openai.AzureOpenAI(
-                azure_deployment=(
-                    str(self.deployment_name) if self.deployment_name else None
-                ),
-                api_key=str(self.api_key),
-                azure_endpoint=str(self.azure_endpoint),
-                api_version=str(self.api_version),
-                # _strict_response_validation=True,
+                api_key=api_key, azure_endpoint=azure_endpoint, api_version=api_version
             )
         except Exception as e:
-            raise ValueError(
-                f"OpenAI API client failed to initialize. Error: {e}"
-            ) from e
+            raise ValueError(f"AzureOpenAI API client failed to initialize. Error: {e}")
+        self.temperature = temperature
+        self.max_tokens = max_tokens
 
-    def __call__(self, docs: List[str]) -> List[List[float]]:
+    def __call__(self, messages: List[Message]) -> str:
         if self.client is None:
-            raise ValueError("OpenAI client is not initialized.")
-        embeds = None
-        error_message = ""
-
-        # Exponential backoff
-        for j in range(3):
-            try:
-                embeds = self.client.embeddings.create(
-                    input=docs, model=str(self.model), dimensions=self.dimensions,
-                )
-                if embeds.data:
-                    break
-            except OpenAIError as e:
-                # print full traceback
-                import traceback
-
-                traceback.print_exc()
-                sleep(2**j)
-                error_message = str(e)
-                logger.warning(f"Retrying in {2**j} seconds...")
-            except Exception as e:
-                logger.error(f"Azure OpenAI API call failed. Error: {error_message}")
-                raise ValueError(f"Azure OpenAI API call failed. Error: {e}") from e
+            raise ValueError("AzureOpenAI client is not initialized.")
+        try:
+            completion = self.client.chat.completions.create(
+                model=self.name,
+                messages=[m.to_openai() for m in messages],
+                temperature=self.temperature,
+                max_tokens=self.max_tokens,
+            )
 
-        if (
-            not embeds
-            or not isinstance(embeds, CreateEmbeddingResponse)
-            or not embeds.data
-        ):
-            raise ValueError(f"No embeddings returned. Error: {error_message}")
+            output = completion.choices[0].message.content
 
-        embeddings = [embeds_obj.embedding for embeds_obj in embeds.data]
-        return embeddings
+            if not output:
+                raise Exception("No output generated")
+            return output
+        except Exception as e:
+            logger.error(f"LLM error: {e}")
+            raise Exception(f"LLM error: {e}") from e
-- 
GitLab