diff --git a/.changeset/real-falcons-fix.md b/.changeset/real-falcons-fix.md new file mode 100644 index 0000000000000000000000000000000000000000..194f4ff1b6b44144af7457f98970d685c2a2b587 --- /dev/null +++ b/.changeset/real-falcons-fix.md @@ -0,0 +1,20 @@ +--- +"@llamaindex/huggingface": patch +"@llamaindex/portkey-ai": patch +"@llamaindex/anthropic": patch +"@llamaindex/deepinfra": patch +"@llamaindex/fireworks": patch +"@llamaindex/replicate": patch +"@llamaindex/deepseek": patch +"@llamaindex/together": patch +"@llamaindex/mistral": patch +"@llamaindex/google": patch +"@llamaindex/ollama": patch +"@llamaindex/openai": patch +"@llamaindex/vercel": patch +"@llamaindex/groq": patch +"@llamaindex/vllm": patch +"@llamaindex/examples": patch +--- + +Add factory convenience factory for each LLM provider, e.g. you can use openai instead of 'new OpenAI' diff --git a/.changeset/sour-rats-complain.md b/.changeset/sour-rats-complain.md index 86597ed2500306aaea53a10738e093de5b731715..0fa483b7278624204137cc6f5b888edb8789c25c 100644 --- a/.changeset/sour-rats-complain.md +++ b/.changeset/sour-rats-complain.md @@ -4,4 +4,4 @@ "@llamaindex/core": patch --- -Add factory methods agent and multiAgent to simplify agent usage +Add factory methods tool, agent and multiAgent to simplify agent usage diff --git a/examples/agentworkflow/blog-writer.ts b/examples/agentworkflow/blog-writer.ts index 282baa64854666af972c73cb053bf9df89dc7163..d72cfd778a9c395944f496733007d4d290ec8a31 100644 --- a/examples/agentworkflow/blog-writer.ts +++ b/examples/agentworkflow/blog-writer.ts @@ -1,4 +1,4 @@ -import { OpenAI } from "@llamaindex/openai"; +import { openai } from "@llamaindex/openai"; import fs from "fs"; import { agent, @@ -11,7 +11,7 @@ import os from "os"; import { z } from "zod"; import { WikipediaTool } from "../wiki"; -const llm = new OpenAI({ +const llm = openai({ model: "gpt-4o-mini", }); diff --git a/examples/agentworkflow/multiple-agents.ts b/examples/agentworkflow/multiple-agents.ts index e65b353fed4d4074111d42bf4286dda4b0ab79d5..6ca95b3ec6fe92ebc9f10149a6096e2b03623f6b 100644 --- a/examples/agentworkflow/multiple-agents.ts +++ b/examples/agentworkflow/multiple-agents.ts @@ -3,7 +3,7 @@ * 1. FetchWeatherAgent - Fetches the weather in a city * 2. TemperatureConverterAgent - Converts the temperature from Fahrenheit to Celsius */ -import { OpenAI } from "@llamaindex/openai"; +import { openai } from "@llamaindex/openai"; import { agent, AgentInput, @@ -17,7 +17,7 @@ import { } from "llamaindex"; import { z } from "zod"; -const llm = new OpenAI({ +const llm = openai({ model: "gpt-4o-mini", }); diff --git a/examples/agentworkflow/single-agent.ts b/examples/agentworkflow/single-agent.ts index dc12d877c84f2430eea11b773b1081c39dbc9fd3..ea94e44a11137e337cd1adcd55666f1712f7b2eb 100644 --- a/examples/agentworkflow/single-agent.ts +++ b/examples/agentworkflow/single-agent.ts @@ -1,11 +1,11 @@ /** * This example shows how to use AgentWorkflow as a single agent with tools */ -import { OpenAI } from "@llamaindex/openai"; +import { openai } from "@llamaindex/openai"; import { Settings, agent } from "llamaindex"; import { getWeatherTool } from "../agent/utils/tools"; -Settings.llm = new OpenAI({ +Settings.llm = openai({ model: "gpt-4o", }); diff --git a/examples/agentworkflow/with-anthropic.ts b/examples/agentworkflow/with-anthropic.ts index 1895c1b53283b1ec6b3e6ccbae6913d24ac5be9a..5039aa3f639174cbfda575e2e739f4c874e213e3 100644 --- a/examples/agentworkflow/with-anthropic.ts +++ b/examples/agentworkflow/with-anthropic.ts @@ -8,11 +8,7 @@ import { } from "llamaindex"; import { z } from "zod"; -import { Anthropic } from "@llamaindex/anthropic"; - -const llm = new Anthropic({ - model: "claude-3-5-sonnet", -}); +import { anthropic } from "@llamaindex/anthropic"; const weatherTool = tool({ name: "weather", @@ -57,6 +53,10 @@ const saveFileTool = tool({ }); async function main() { + const llm = anthropic({ + model: "claude-3-5-sonnet", + }); + const reportAgent = agent({ name: "ReportAgent", description: diff --git a/packages/providers/anthropic/src/index.ts b/packages/providers/anthropic/src/index.ts index 26ad799d59b25795f88a2ef4e8a530759a256ce8..d54004233086ba740142737630a4a53dfe55e10f 100644 --- a/packages/providers/anthropic/src/index.ts +++ b/packages/providers/anthropic/src/index.ts @@ -1,14 +1,2 @@ -export { - AnthropicAgent, - AnthropicAgentWorker, - type AnthropicAgentParams, -} from "./agent"; -export { - ALL_AVAILABLE_ANTHROPIC_LEGACY_MODELS, - ALL_AVAILABLE_ANTHROPIC_MODELS, - ALL_AVAILABLE_V3_5_MODELS, - ALL_AVAILABLE_V3_MODELS, - Anthropic, - AnthropicSession, - type AnthropicAdditionalChatOptions, -} from "./llm"; +export * from "./agent"; +export * from "./llm"; diff --git a/packages/providers/anthropic/src/llm.ts b/packages/providers/anthropic/src/llm.ts index 7cbe9d6da62df3f2c35753a175fee75b416d0c2b..4637d289a5c207e1ff81e63348a2bacb8a8d3966 100644 --- a/packages/providers/anthropic/src/llm.ts +++ b/packages/providers/anthropic/src/llm.ts @@ -60,7 +60,7 @@ const defaultAnthropicSession: { * @param options * @returns */ -export function getAnthropicSession(options: ClientOptions = {}) { +function getAnthropicSession(options: ClientOptions = {}) { let session = defaultAnthropicSession.find((session) => { return isDeepEqual(session.options, options); })?.session; @@ -586,3 +586,11 @@ export class Anthropic extends ToolCallLLM< }; } } + +/** + * Convenience function to create a new Anthropic instance. + * @param init - Optional initialization parameters for the Anthropic instance. + * @returns A new Anthropic instance. + */ +export const anthropic = (init?: ConstructorParameters<typeof Anthropic>[0]) => + new Anthropic(init); diff --git a/packages/providers/deepinfra/src/index.ts b/packages/providers/deepinfra/src/index.ts index 1002d8ec26a59e0ff899b97a9d3dd51ee3eb64f6..450d1167ac7be6ad8b96d85ef52be82fb176de18 100644 --- a/packages/providers/deepinfra/src/index.ts +++ b/packages/providers/deepinfra/src/index.ts @@ -3,4 +3,4 @@ export { type DeepInfraEmbeddingResponse, type InferenceStatus, } from "./embedding"; -export { DeepInfra } from "./llm"; +export * from "./llm"; diff --git a/packages/providers/deepinfra/src/llm.ts b/packages/providers/deepinfra/src/llm.ts index e56e9469c00ba3b498a54e189b48c4e1a92a414f..158852de9c0571fad163b45510b8d1d781e52d7e 100644 --- a/packages/providers/deepinfra/src/llm.ts +++ b/packages/providers/deepinfra/src/llm.ts @@ -31,3 +31,11 @@ export class DeepInfra extends OpenAI { }); } } + +/** + * Convenience function to create a new DeepInfra instance. + * @param init - Optional initialization parameters for the DeepInfra instance. + * @returns A new DeepInfra instance. + */ +export const deepinfra = (init?: ConstructorParameters<typeof DeepInfra>[0]) => + new DeepInfra(init); diff --git a/packages/providers/deepseek/src/llm.ts b/packages/providers/deepseek/src/llm.ts index 1638475fb743e05c1b8501e23a3983019ca63c2a..b9bcfdf5d7e468055d9c036d36e5060b39846e96 100644 --- a/packages/providers/deepseek/src/llm.ts +++ b/packages/providers/deepseek/src/llm.ts @@ -35,3 +35,11 @@ export class DeepSeekLLM extends OpenAI { }); } } + +/** + * Convenience function to create a new DeepSeekLLM instance. + * @param init - Optional initialization parameters for the DeepSeekLLM instance. + * @returns A new DeepSeekLLM instance. + */ +export const deepseek = (init?: ConstructorParameters<typeof DeepSeekLLM>[0]) => + new DeepSeekLLM(init); diff --git a/packages/providers/fireworks/src/llm.ts b/packages/providers/fireworks/src/llm.ts index 4aa9cd8dca362e3ef11f2136268a14cc085117f1..60c20a7c89a914cfd91902c6f84d0b15b6b592a7 100644 --- a/packages/providers/fireworks/src/llm.ts +++ b/packages/providers/fireworks/src/llm.ts @@ -26,3 +26,12 @@ export class FireworksLLM extends OpenAI { }); } } + +/** + * Convenience function to create a new FireworksLLM instance. + * @param init - Optional initialization parameters for the FireworksLLM instance. + * @returns A new FireworksLLM instance. + */ +export const fireworks = ( + init?: ConstructorParameters<typeof FireworksLLM>[0], +) => new FireworksLLM(init); diff --git a/packages/providers/google/src/base.ts b/packages/providers/google/src/base.ts index b9424ef181e11d85cd6169e3416d64479d5b6c5e..2aef44d7e2e25546e6cb82f25150049490d99dfa 100644 --- a/packages/providers/google/src/base.ts +++ b/packages/providers/google/src/base.ts @@ -336,3 +336,11 @@ export class Gemini extends ToolCallLLM<GeminiAdditionalChatOptions> { }; } } + +/** + * Convenience function to create a new Gemini instance. + * @param init - Optional initialization parameters for the Gemini instance. + * @returns A new Gemini instance. + */ +export const gemini = (init?: ConstructorParameters<typeof Gemini>[0]) => + new Gemini(init); diff --git a/packages/providers/groq/src/index.ts b/packages/providers/groq/src/index.ts index b2ca4889258def4e231037afae94b1ce47ada2d6..e6679f111f51c327bb3c414234979edfcf3491c1 100644 --- a/packages/providers/groq/src/index.ts +++ b/packages/providers/groq/src/index.ts @@ -1 +1 @@ -export { Groq } from "./llm"; +export * from "./llm"; diff --git a/packages/providers/groq/src/llm.ts b/packages/providers/groq/src/llm.ts index bfff280a18db5d8f36c8cca54f8b785ac62a10c6..228147c1f06d7a757b12e6c28dba70f0936cd286 100644 --- a/packages/providers/groq/src/llm.ts +++ b/packages/providers/groq/src/llm.ts @@ -29,3 +29,11 @@ export class Groq extends OpenAI { }) as never; } } + +/** + * Convenience function to create a new Groq instance. + * @param init - Optional initialization parameters for the Groq instance. + * @returns A new Groq instance. + */ +export const groq = (init?: ConstructorParameters<typeof Groq>[0]) => + new Groq(init); diff --git a/packages/providers/huggingface/src/index.ts b/packages/providers/huggingface/src/index.ts index 06b31c9a93d499eb3b3c72e4d30f8fcf84952ec6..22d04a567f5018bbd35870183cff92d576266b1b 100644 --- a/packages/providers/huggingface/src/index.ts +++ b/packages/providers/huggingface/src/index.ts @@ -2,7 +2,7 @@ export { HuggingFaceEmbedding, type HuggingFaceEmbeddingParams, } from "./embedding"; -export { HuggingFaceLLM, type HFLLMConfig } from "./llm"; +export * from "./llm"; export { HuggingFaceEmbeddingModelType, HuggingFaceInferenceAPI, diff --git a/packages/providers/huggingface/src/llm.ts b/packages/providers/huggingface/src/llm.ts index 4af3fd49b4f4a4fb0407eb90e5ac208ca1595074..83befb8e485555ca3a5e6da6ec46f344ed0d022a 100644 --- a/packages/providers/huggingface/src/llm.ts +++ b/packages/providers/huggingface/src/llm.ts @@ -146,3 +146,12 @@ export class HuggingFaceLLM extends BaseLLM { throw new Error("Method not implemented."); } } + +/** + * Convenience function to create a new HuggingFaceLLM instance. + * @param init - Optional initialization parameters for the HuggingFaceLLM instance. + * @returns A new HuggingFaceLLM instance. + */ +export const huggingface = ( + init?: ConstructorParameters<typeof HuggingFaceLLM>[0], +) => new HuggingFaceLLM(init); diff --git a/packages/providers/mistral/src/llm.ts b/packages/providers/mistral/src/llm.ts index 17d78eca845ad43b4449f2d92d051bca76a52603..d1a61ad1654585a42206539c0f9b3ec52655f9e2 100644 --- a/packages/providers/mistral/src/llm.ts +++ b/packages/providers/mistral/src/llm.ts @@ -136,3 +136,11 @@ export class MistralAI extends BaseLLM { return; } } + +/** + * Convenience function to create a new MistralAI instance. + * @param init - Optional initialization parameters for the MistralAI instance. + * @returns A new MistralAI instance. + */ +export const mistral = (init?: ConstructorParameters<typeof MistralAI>[0]) => + new MistralAI(init); diff --git a/packages/providers/ollama/src/index.ts b/packages/providers/ollama/src/index.ts index 002d16f41c3ec08a3e7fd3d6dd52ab085d349120..9de93491ddc806c94a963882685066cac2a23022 100644 --- a/packages/providers/ollama/src/index.ts +++ b/packages/providers/ollama/src/index.ts @@ -4,4 +4,4 @@ export { type OllamaAgentParams, } from "./agent"; export { OllamaEmbedding } from "./embedding"; -export { Ollama, type OllamaParams } from "./llm"; +export * from "./llm"; diff --git a/packages/providers/ollama/src/llm.ts b/packages/providers/ollama/src/llm.ts index 3878a96783de066fb1f1a7f537a94187e1af4929..15bbf31cf9aaf3135c82e61a7cd4f3ca1b692749 100644 --- a/packages/providers/ollama/src/llm.ts +++ b/packages/providers/ollama/src/llm.ts @@ -222,3 +222,11 @@ export class Ollama extends ToolCallLLM { }; } } + +/** + * Convenience function to create a new Ollama instance. + * @param init - Optional initialization parameters for the Ollama instance. + * @returns A new Ollama instance. + */ +export const ollama = (init: ConstructorParameters<typeof Ollama>[0]) => + new Ollama(init); diff --git a/packages/providers/openai/src/index.ts b/packages/providers/openai/src/index.ts index 068984a8ebdc093cb1436d9614ce5663787323be..0c62119eedc4689fc3a91d68ae633fba3905096c 100644 --- a/packages/providers/openai/src/index.ts +++ b/packages/providers/openai/src/index.ts @@ -1,15 +1,3 @@ -export { - OpenAIAgent, - OpenAIAgentWorker, - type OpenAIAgentParams, -} from "./agent"; -export { ALL_OPENAI_EMBEDDING_MODELS, OpenAIEmbedding } from "./embedding"; -export { - ALL_AVAILABLE_OPENAI_MODELS, - GPT35_MODELS, - GPT4_MODELS, - O1_MODELS, - OpenAI, - type OpenAIAdditionalChatOptions, - type OpenAIAdditionalMetadata, -} from "./llm"; +export * from "./agent"; +export * from "./embedding"; +export * from "./llm"; diff --git a/packages/providers/openai/src/llm.ts b/packages/providers/openai/src/llm.ts index 3383d75518d6487919226f727ac8e0e8729a8568..5d4e7478e3371aa4dd9585003abfbcea91cd9369 100644 --- a/packages/providers/openai/src/llm.ts +++ b/packages/providers/openai/src/llm.ts @@ -142,7 +142,7 @@ export const ALL_AVAILABLE_OPENAI_MODELS = { ...O3_MODELS, } satisfies Record<ChatModel, { contextWindow: number }>; -export function isFunctionCallingModel(llm: LLM): llm is OpenAI { +function isFunctionCallingModel(llm: LLM): llm is OpenAI { let model: string; if (llm instanceof OpenAI) { model = llm.model; @@ -157,13 +157,13 @@ export function isFunctionCallingModel(llm: LLM): llm is OpenAI { return isChatModel && !isOld && !isO1; } -export function isReasoningModel(model: ChatModel | string): boolean { +function isReasoningModel(model: ChatModel | string): boolean { const isO1 = model.startsWith("o1"); const isO3 = model.startsWith("o3"); return isO1 || isO3; } -export function isTemperatureSupported(model: ChatModel | string): boolean { +function isTemperatureSupported(model: ChatModel | string): boolean { return !model.startsWith("o3"); } @@ -540,3 +540,11 @@ export class OpenAI extends ToolCallLLM<OpenAIAdditionalChatOptions> { }; } } + +/** + * Convenience function to create a new OpenAI instance. + * @param init - Optional initialization parameters for the OpenAI instance. + * @returns A new OpenAI instance. + */ +export const openai = (init?: ConstructorParameters<typeof OpenAI>[0]) => + new OpenAI(init); diff --git a/packages/providers/portkey-ai/src/index.ts b/packages/providers/portkey-ai/src/index.ts index 3a248442ea65d7e4ff43a77e8042fd387bc5ef9a..6fbee5a2293a870c42634066f3bc72366962cd0f 100644 --- a/packages/providers/portkey-ai/src/index.ts +++ b/packages/providers/portkey-ai/src/index.ts @@ -1 +1 @@ -export { Portkey, PortkeySession, getPortkeySession } from "./portkey"; +export * from "./portkey"; diff --git a/packages/providers/portkey-ai/src/portkey.ts b/packages/providers/portkey-ai/src/portkey.ts index 334e39099dcb1f1422b492d4f73e9c3bcfa90135..694c3f6c358a0198c3ca97d033671ddf1dd2e501 100644 --- a/packages/providers/portkey-ai/src/portkey.ts +++ b/packages/providers/portkey-ai/src/portkey.ts @@ -133,3 +133,11 @@ export class Portkey extends BaseLLM { return; } } + +/** + * Convenience function to create a new Portkey instance. + * @param init - Optional initialization parameters for the Portkey instance. + * @returns A new Portkey instance. + */ +export const portkey = (init?: ConstructorParameters<typeof Portkey>[0]) => + new Portkey(init); diff --git a/packages/providers/replicate/src/llm.ts b/packages/providers/replicate/src/llm.ts index 7012af8c02fd14918826871143c93b363770cfcc..f757411799df129bbb8110ae63acff6a45bb93b6 100644 --- a/packages/providers/replicate/src/llm.ts +++ b/packages/providers/replicate/src/llm.ts @@ -380,3 +380,12 @@ If a question does not make any sense, or is not factually coherent, explain why } export const LlamaDeuce = ReplicateLLM; + +/** + * Convenience function to create a new ReplicateLLM instance. + * @param init - Optional initialization parameters for the ReplicateLLM instance. + * @returns A new ReplicateLLM instance. + */ +export const replicate = ( + init?: ConstructorParameters<typeof ReplicateLLM>[0], +) => new ReplicateLLM(init); diff --git a/packages/providers/together/src/llm.ts b/packages/providers/together/src/llm.ts index fffaea08dfb8b9ab6db1901934495060a41d1267..5afd9646851a7daa5da2d8fdd05ede99c0f8ee71 100644 --- a/packages/providers/together/src/llm.ts +++ b/packages/providers/together/src/llm.ts @@ -25,3 +25,11 @@ export class TogetherLLM extends OpenAI { }); } } + +/** + * Convenience function to create a new TogetherLLM instance. + * @param init - Optional initialization parameters for the TogetherLLM instance. + * @returns A new TogetherLLM instance. + */ +export const together = (init?: ConstructorParameters<typeof TogetherLLM>[0]) => + new TogetherLLM(init); diff --git a/packages/providers/vercel/src/index.ts b/packages/providers/vercel/src/index.ts index c0347b5c21b89abfcfeee06e9c9a504dd0f03474..e94a5dbb4dc24c6a1e6bec513cdc23dc80984d16 100644 --- a/packages/providers/vercel/src/index.ts +++ b/packages/providers/vercel/src/index.ts @@ -1,2 +1,2 @@ -export { VercelLLM } from "./llm"; +export * from "./llm"; export { llamaindex } from "./tool"; diff --git a/packages/providers/vercel/src/llm.ts b/packages/providers/vercel/src/llm.ts index fbf899a37c82520276366978913c50eb252ef765..453b2fc1691c70de2f9a3f7b064af205029670fb 100644 --- a/packages/providers/vercel/src/llm.ts +++ b/packages/providers/vercel/src/llm.ts @@ -181,3 +181,11 @@ export class VercelLLM extends ToolCallLLM<VercelAdditionalChatOptions> { }; } } + +/** + * Convenience function to create a new VercelLLM instance. + * @param init - initialization parameters for the VercelLLM instance. + * @returns A new VercelLLM instance. + */ +export const vercel = (init: ConstructorParameters<typeof VercelLLM>[0]) => + new VercelLLM(init); diff --git a/packages/providers/vllm/src/index.ts b/packages/providers/vllm/src/index.ts index 2c5bc5792a867319085a279f3db206a7650926d7..e6679f111f51c327bb3c414234979edfcf3491c1 100644 --- a/packages/providers/vllm/src/index.ts +++ b/packages/providers/vllm/src/index.ts @@ -1 +1 @@ -export { VLLM, type VLLMParams } from "./llm"; +export * from "./llm"; diff --git a/packages/providers/vllm/src/llm.ts b/packages/providers/vllm/src/llm.ts index 28b56afc04fdb412d5bdb5b76248406d3f6e25dd..28ef5cf3f89b6ba5e0a143471a5332b8b9d0883f 100644 --- a/packages/providers/vllm/src/llm.ts +++ b/packages/providers/vllm/src/llm.ts @@ -23,3 +23,11 @@ export class VLLM extends OpenAI { }); } } + +/** + * Convenience function to create a new VLLM instance. + * @param init - initialization parameters for the VLLM instance. + * @returns A new VLLM instance. + */ +export const vllm = (init: ConstructorParameters<typeof VLLM>[0]) => + new VLLM(init);