diff --git a/semantic_router/index/pinecone.py b/semantic_router/index/pinecone.py index 9042eafe4ad4b9360b60120030c92107fdeb4f2e..5199421b799bb573770cf32c35621e1ac761549a 100644 --- a/semantic_router/index/pinecone.py +++ b/semantic_router/index/pinecone.py @@ -797,7 +797,6 @@ class PineconeIndex(BaseIndex): return self.index.describe_index_stats()["total_vector_count"] - def parse_route_info(metadata: List[Dict[str, Any]]) -> List[Tuple]: """Parses metadata from Pinecone index to extract route, utterance, function schema and additional metadata. @@ -814,12 +813,14 @@ def parse_route_info(metadata: List[Dict[str, Any]]) -> List[Tuple]: sr_function_schema = json.loads(record.get("sr_function_schema", "{}")) if sr_function_schema == {}: sr_function_schema = None - + additional_metadata = { key: value for key, value in record.items() if key not in ["sr_route", "sr_utterance", "sr_function_schema"] } # TODO: Not a fan of tuple packing here - route_info.append((sr_route, sr_utterance, sr_function_schema, additional_metadata)) + route_info.append( + (sr_route, sr_utterance, sr_function_schema, additional_metadata) + ) return route_info diff --git a/semantic_router/layer.py b/semantic_router/layer.py index 5991c4686cfefe4a014489b902cd63a4f366a0f3..4217945959d8966d2c990a9eace58ed0d70c7acc 100644 --- a/semantic_router/layer.py +++ b/semantic_router/layer.py @@ -219,7 +219,6 @@ class RouteLayer: # if routes list has been passed, we initialize index now if self.index.sync: # initialize index now - logger.info(f"JB TEMP: {self.routes=}") if len(self.routes) > 0: self._add_and_sync_routes(routes=self.routes) else: @@ -545,7 +544,6 @@ class RouteLayer: ) # Update local route layer state - logger.info([data.get("function_schemas", None) for _, data in layer_routes_dict.items()]) self.routes = [] for route, data in layer_routes_dict.items(): function_schemas = data.get("function_schemas", None) diff --git a/semantic_router/llms/openai.py b/semantic_router/llms/openai.py index 4c49974a52a78f73c9e099cee4cd048bf4fd3cd8..dfff80968a588cc1dbf387fc049219b64718f9ec 100644 --- a/semantic_router/llms/openai.py +++ b/semantic_router/llms/openai.py @@ -94,9 +94,6 @@ class OpenAILLM(BaseLLM): tools: Union[List[Dict[str, Any]], NotGiven] = ( function_schemas if function_schemas else NOT_GIVEN ) - logger.info(f"{function_schemas=}") - logger.info(f"{function_schemas is None=}") - logger.info(f"{tools=}") completion = self.client.chat.completions.create( model=self.name, @@ -187,8 +184,6 @@ class OpenAILLM(BaseLLM): raise Exception("No output generated for extract function input") output = output.replace("'", '"') function_inputs = json.loads(output) - logger.info(f"Function inputs: {function_inputs}") - logger.info(f"function_schemas: {function_schemas}") if not self._is_valid_inputs(function_inputs, function_schemas): raise ValueError("Invalid inputs") return function_inputs @@ -206,7 +201,6 @@ class OpenAILLM(BaseLLM): raise Exception("No output generated for extract function input") output = output.replace("'", '"') function_inputs = json.loads(output) - logger.info(f"OpenAI => Function Inputs: {function_inputs}") if not self._is_valid_inputs(function_inputs, function_schemas): raise ValueError("Invalid inputs") return function_inputs