diff --git a/.changeset/shy-bulldogs-wait.md b/.changeset/shy-bulldogs-wait.md new file mode 100644 index 0000000000000000000000000000000000000000..1da10248bd7cd1c9bbc0fcd18bd20165c7465706 --- /dev/null +++ b/.changeset/shy-bulldogs-wait.md @@ -0,0 +1,5 @@ +--- +"create-llama": patch +--- + +Fix: use generic LLMAgent instead of OpenAIAgent (adds support for Gemini and Anthropic for Agentic RAG) diff --git a/templates/components/engines/typescript/agent/chat.ts b/templates/components/engines/typescript/agent/chat.ts index bf2bb69b16ea29b3fc102c90ea08379c40272381..333ff640e8bc183a64d21172370d89c513f14e43 100644 --- a/templates/components/engines/typescript/agent/chat.ts +++ b/templates/components/engines/typescript/agent/chat.ts @@ -1,7 +1,7 @@ import { BaseChatEngine, BaseToolWithCall, - OpenAIAgent, + LLMAgent, QueryEngineTool, } from "llamaindex"; import fs from "node:fs/promises"; @@ -42,7 +42,7 @@ export async function createChatEngine(documentIds?: string[], params?: any) { tools.push(...(await createTools(toolConfig))); } - const agent = new OpenAIAgent({ + const agent = new LLMAgent({ tools, systemPrompt: process.env.SYSTEM_PROMPT, }) as unknown as BaseChatEngine;