Skip to content
Snippets Groups Projects
Unverified Commit aea550af authored by Marcus Schiesser's avatar Marcus Schiesser Committed by GitHub
Browse files

feat: Add factory convenience factory for each LLM provider, e.g. you… (#1731)

parent e66c6e25
No related branches found
No related tags found
No related merge requests found
Showing
with 113 additions and 31 deletions
---
"@llamaindex/huggingface": patch
"@llamaindex/portkey-ai": patch
"@llamaindex/anthropic": patch
"@llamaindex/deepinfra": patch
"@llamaindex/fireworks": patch
"@llamaindex/replicate": patch
"@llamaindex/deepseek": patch
"@llamaindex/together": patch
"@llamaindex/mistral": patch
"@llamaindex/google": patch
"@llamaindex/ollama": patch
"@llamaindex/openai": patch
"@llamaindex/vercel": patch
"@llamaindex/groq": patch
"@llamaindex/vllm": patch
"@llamaindex/examples": patch
---
Add factory convenience factory for each LLM provider, e.g. you can use openai instead of 'new OpenAI'
......@@ -4,4 +4,4 @@
"@llamaindex/core": patch
---
Add factory methods agent and multiAgent to simplify agent usage
Add factory methods tool, agent and multiAgent to simplify agent usage
import { OpenAI } from "@llamaindex/openai";
import { openai } from "@llamaindex/openai";
import fs from "fs";
import {
agent,
......@@ -11,7 +11,7 @@ import os from "os";
import { z } from "zod";
import { WikipediaTool } from "../wiki";
const llm = new OpenAI({
const llm = openai({
model: "gpt-4o-mini",
});
......
......@@ -3,7 +3,7 @@
* 1. FetchWeatherAgent - Fetches the weather in a city
* 2. TemperatureConverterAgent - Converts the temperature from Fahrenheit to Celsius
*/
import { OpenAI } from "@llamaindex/openai";
import { openai } from "@llamaindex/openai";
import {
agent,
AgentInput,
......@@ -17,7 +17,7 @@ import {
} from "llamaindex";
import { z } from "zod";
const llm = new OpenAI({
const llm = openai({
model: "gpt-4o-mini",
});
......
/**
* This example shows how to use AgentWorkflow as a single agent with tools
*/
import { OpenAI } from "@llamaindex/openai";
import { openai } from "@llamaindex/openai";
import { Settings, agent } from "llamaindex";
import { getWeatherTool } from "../agent/utils/tools";
Settings.llm = new OpenAI({
Settings.llm = openai({
model: "gpt-4o",
});
......
......@@ -8,11 +8,7 @@ import {
} from "llamaindex";
import { z } from "zod";
import { Anthropic } from "@llamaindex/anthropic";
const llm = new Anthropic({
model: "claude-3-5-sonnet",
});
import { anthropic } from "@llamaindex/anthropic";
const weatherTool = tool({
name: "weather",
......@@ -57,6 +53,10 @@ const saveFileTool = tool({
});
async function main() {
const llm = anthropic({
model: "claude-3-5-sonnet",
});
const reportAgent = agent({
name: "ReportAgent",
description:
......
export {
AnthropicAgent,
AnthropicAgentWorker,
type AnthropicAgentParams,
} from "./agent";
export {
ALL_AVAILABLE_ANTHROPIC_LEGACY_MODELS,
ALL_AVAILABLE_ANTHROPIC_MODELS,
ALL_AVAILABLE_V3_5_MODELS,
ALL_AVAILABLE_V3_MODELS,
Anthropic,
AnthropicSession,
type AnthropicAdditionalChatOptions,
} from "./llm";
export * from "./agent";
export * from "./llm";
......@@ -60,7 +60,7 @@ const defaultAnthropicSession: {
* @param options
* @returns
*/
export function getAnthropicSession(options: ClientOptions = {}) {
function getAnthropicSession(options: ClientOptions = {}) {
let session = defaultAnthropicSession.find((session) => {
return isDeepEqual(session.options, options);
})?.session;
......@@ -586,3 +586,11 @@ export class Anthropic extends ToolCallLLM<
};
}
}
/**
* Convenience function to create a new Anthropic instance.
* @param init - Optional initialization parameters for the Anthropic instance.
* @returns A new Anthropic instance.
*/
export const anthropic = (init?: ConstructorParameters<typeof Anthropic>[0]) =>
new Anthropic(init);
......@@ -3,4 +3,4 @@ export {
type DeepInfraEmbeddingResponse,
type InferenceStatus,
} from "./embedding";
export { DeepInfra } from "./llm";
export * from "./llm";
......@@ -31,3 +31,11 @@ export class DeepInfra extends OpenAI {
});
}
}
/**
* Convenience function to create a new DeepInfra instance.
* @param init - Optional initialization parameters for the DeepInfra instance.
* @returns A new DeepInfra instance.
*/
export const deepinfra = (init?: ConstructorParameters<typeof DeepInfra>[0]) =>
new DeepInfra(init);
......@@ -35,3 +35,11 @@ export class DeepSeekLLM extends OpenAI {
});
}
}
/**
* Convenience function to create a new DeepSeekLLM instance.
* @param init - Optional initialization parameters for the DeepSeekLLM instance.
* @returns A new DeepSeekLLM instance.
*/
export const deepseek = (init?: ConstructorParameters<typeof DeepSeekLLM>[0]) =>
new DeepSeekLLM(init);
......@@ -26,3 +26,12 @@ export class FireworksLLM extends OpenAI {
});
}
}
/**
* Convenience function to create a new FireworksLLM instance.
* @param init - Optional initialization parameters for the FireworksLLM instance.
* @returns A new FireworksLLM instance.
*/
export const fireworks = (
init?: ConstructorParameters<typeof FireworksLLM>[0],
) => new FireworksLLM(init);
......@@ -336,3 +336,11 @@ export class Gemini extends ToolCallLLM<GeminiAdditionalChatOptions> {
};
}
}
/**
* Convenience function to create a new Gemini instance.
* @param init - Optional initialization parameters for the Gemini instance.
* @returns A new Gemini instance.
*/
export const gemini = (init?: ConstructorParameters<typeof Gemini>[0]) =>
new Gemini(init);
export { Groq } from "./llm";
export * from "./llm";
......@@ -29,3 +29,11 @@ export class Groq extends OpenAI {
}) as never;
}
}
/**
* Convenience function to create a new Groq instance.
* @param init - Optional initialization parameters for the Groq instance.
* @returns A new Groq instance.
*/
export const groq = (init?: ConstructorParameters<typeof Groq>[0]) =>
new Groq(init);
......@@ -2,7 +2,7 @@ export {
HuggingFaceEmbedding,
type HuggingFaceEmbeddingParams,
} from "./embedding";
export { HuggingFaceLLM, type HFLLMConfig } from "./llm";
export * from "./llm";
export {
HuggingFaceEmbeddingModelType,
HuggingFaceInferenceAPI,
......
......@@ -146,3 +146,12 @@ export class HuggingFaceLLM extends BaseLLM {
throw new Error("Method not implemented.");
}
}
/**
* Convenience function to create a new HuggingFaceLLM instance.
* @param init - Optional initialization parameters for the HuggingFaceLLM instance.
* @returns A new HuggingFaceLLM instance.
*/
export const huggingface = (
init?: ConstructorParameters<typeof HuggingFaceLLM>[0],
) => new HuggingFaceLLM(init);
......@@ -136,3 +136,11 @@ export class MistralAI extends BaseLLM {
return;
}
}
/**
* Convenience function to create a new MistralAI instance.
* @param init - Optional initialization parameters for the MistralAI instance.
* @returns A new MistralAI instance.
*/
export const mistral = (init?: ConstructorParameters<typeof MistralAI>[0]) =>
new MistralAI(init);
......@@ -4,4 +4,4 @@ export {
type OllamaAgentParams,
} from "./agent";
export { OllamaEmbedding } from "./embedding";
export { Ollama, type OllamaParams } from "./llm";
export * from "./llm";
......@@ -222,3 +222,11 @@ export class Ollama extends ToolCallLLM {
};
}
}
/**
* Convenience function to create a new Ollama instance.
* @param init - Optional initialization parameters for the Ollama instance.
* @returns A new Ollama instance.
*/
export const ollama = (init: ConstructorParameters<typeof Ollama>[0]) =>
new Ollama(init);
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment