Skip to content
Snippets Groups Projects
Unverified Commit 70d4a5cc authored by Derik K's avatar Derik K Committed by GitHub
Browse files

Only firefunction is function calling (#11363)

parent 3a102350
No related branches found
No related tags found
No related merge requests found
......@@ -7,6 +7,7 @@ from llama_index.core.base.llms.generic_utils import get_from_param_or_env
from llama_index.core.types import BaseOutputParser, PydanticProgramMode
from llama_index.llms.fireworks.utils import (
fireworks_modelname_to_contextsize,
is_function_calling_model,
)
from llama_index.llms.openai import OpenAI
......@@ -64,7 +65,9 @@ class Fireworks(OpenAI):
num_output=self.max_tokens,
is_chat_model=True,
model_name=self.model,
is_function_calling_model=True,
is_function_calling_model=is_function_calling_model(
model=self._get_model_name()
),
)
@property
......
......@@ -63,6 +63,10 @@ def fireworks_modelname_to_contextsize(modelname: str) -> int:
return context_size
def is_function_calling_model(model: str) -> bool:
return "function" in model
def _message_to_fireworks_prompt(message: ChatMessage) -> Dict[str, Any]:
if message.role == MessageRole.USER:
prompt = {"role": "user", "content": message.content}
......
......@@ -26,7 +26,7 @@ description = "llama-index llms fireworks integration"
license = "MIT"
name = "llama-index-llms-fireworks"
readme = "README.md"
version = "0.1.2"
version = "0.1.3"
[tool.poetry.dependencies]
python = ">=3.8.1,<3.12"
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment