diff --git a/tests/unit/llms/test_llm_base.py b/tests/unit/llms/test_llm_base.py index 076b2fc584e8f71de586faa2b8df0a88c3db6fa3..2208928a107575e8e5bc5306fc92b534713365f8 100644 --- a/tests/unit/llms/test_llm_base.py +++ b/tests/unit/llms/test_llm_base.py @@ -41,7 +41,7 @@ class TestBaseLLM: def test_base_llm_is_valid_inputs_invalid_false(self, base_llm): test_schema = { "name": "get_time", - "description": 'Finds the current time in a specific timezone.\n\n:param timezone: The timezone to find the current time in, should\n be a valid timezone from the IANA Time Zone Database like\n "America/New_York" or "Europe/London". Do NOT put the place\n name itself like "rome", or "new york", you must provide\n the IANA format.\n:type timezone: str\n:return: The current time in the specified timezone.' + "description": 'Finds the current time in a specific timezone.\n\n:param timezone: The timezone to find the current time in, should\n be a valid timezone from the IANA Time Zone Database like\n "America/New_York" or "Europe/London". Do NOT put the place\n name itself like "rome", or "new york", you must provide\n the IANA format.\n:type timezone: str\n:return: The current time in the specified timezone.', } test_inputs = {"timezone": "America/New_York"} diff --git a/tests/unit/llms/test_llm_llamacpp.py b/tests/unit/llms/test_llm_llamacpp.py index 1344dda0cb9b2e3aea507f69c4aafaafd8df56f8..5793c2d2f1b008ccc2a5fe3b183e4698f20dee9c 100644 --- a/tests/unit/llms/test_llm_llamacpp.py +++ b/tests/unit/llms/test_llm_llamacpp.py @@ -33,7 +33,11 @@ class TestLlamaCppLLM: def test_llamacpp_extract_function_inputs(self, llamacpp_llm, mocker): llamacpp_llm.llm.create_chat_completion = mocker.Mock( - return_value={"choices": [{"message": {"content": "{'timezone': 'America/New_York'}"}}]} + return_value={ + "choices": [ + {"message": {"content": "{'timezone': 'America/New_York'}"}} + ] + } ) test_schema = { "name": "get_time", @@ -50,7 +54,11 @@ class TestLlamaCppLLM: def test_llamacpp_extract_function_inputs_invalid(self, llamacpp_llm, mocker): with pytest.raises(ValueError): llamacpp_llm.llm.create_chat_completion = mocker.Mock( - return_value={"choices": [{"message": {"content": "{'time': 'America/New_York'}"}}]} + return_value={ + "choices": [ + {"message": {"content": "{'time': 'America/New_York'}"}} + ] + } ) test_schema = { "name": "get_time", @@ -62,4 +70,4 @@ class TestLlamaCppLLM: llamacpp_llm.extract_function_inputs( query=test_query, function_schema=test_schema - ) \ No newline at end of file + )