Skip to content
Snippets Groups Projects
Unverified Commit 22857714 authored by James Briggs's avatar James Briggs
Browse files

lint

parent f16529f8
No related branches found
No related tags found
No related merge requests found
...@@ -41,7 +41,7 @@ class TestBaseLLM: ...@@ -41,7 +41,7 @@ class TestBaseLLM:
def test_base_llm_is_valid_inputs_invalid_false(self, base_llm): def test_base_llm_is_valid_inputs_invalid_false(self, base_llm):
test_schema = { test_schema = {
"name": "get_time", "name": "get_time",
"description": 'Finds the current time in a specific timezone.\n\n:param timezone: The timezone to find the current time in, should\n be a valid timezone from the IANA Time Zone Database like\n "America/New_York" or "Europe/London". Do NOT put the place\n name itself like "rome", or "new york", you must provide\n the IANA format.\n:type timezone: str\n:return: The current time in the specified timezone.' "description": 'Finds the current time in a specific timezone.\n\n:param timezone: The timezone to find the current time in, should\n be a valid timezone from the IANA Time Zone Database like\n "America/New_York" or "Europe/London". Do NOT put the place\n name itself like "rome", or "new york", you must provide\n the IANA format.\n:type timezone: str\n:return: The current time in the specified timezone.',
} }
test_inputs = {"timezone": "America/New_York"} test_inputs = {"timezone": "America/New_York"}
......
...@@ -33,7 +33,11 @@ class TestLlamaCppLLM: ...@@ -33,7 +33,11 @@ class TestLlamaCppLLM:
def test_llamacpp_extract_function_inputs(self, llamacpp_llm, mocker): def test_llamacpp_extract_function_inputs(self, llamacpp_llm, mocker):
llamacpp_llm.llm.create_chat_completion = mocker.Mock( llamacpp_llm.llm.create_chat_completion = mocker.Mock(
return_value={"choices": [{"message": {"content": "{'timezone': 'America/New_York'}"}}]} return_value={
"choices": [
{"message": {"content": "{'timezone': 'America/New_York'}"}}
]
}
) )
test_schema = { test_schema = {
"name": "get_time", "name": "get_time",
...@@ -50,7 +54,11 @@ class TestLlamaCppLLM: ...@@ -50,7 +54,11 @@ class TestLlamaCppLLM:
def test_llamacpp_extract_function_inputs_invalid(self, llamacpp_llm, mocker): def test_llamacpp_extract_function_inputs_invalid(self, llamacpp_llm, mocker):
with pytest.raises(ValueError): with pytest.raises(ValueError):
llamacpp_llm.llm.create_chat_completion = mocker.Mock( llamacpp_llm.llm.create_chat_completion = mocker.Mock(
return_value={"choices": [{"message": {"content": "{'time': 'America/New_York'}"}}]} return_value={
"choices": [
{"message": {"content": "{'time': 'America/New_York'}"}}
]
}
) )
test_schema = { test_schema = {
"name": "get_time", "name": "get_time",
...@@ -62,4 +70,4 @@ class TestLlamaCppLLM: ...@@ -62,4 +70,4 @@ class TestLlamaCppLLM:
llamacpp_llm.extract_function_inputs( llamacpp_llm.extract_function_inputs(
query=test_query, function_schema=test_schema query=test_query, function_schema=test_schema
) )
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment