Skip to content
Snippets Groups Projects
Unverified Commit aea550af authored by Marcus Schiesser's avatar Marcus Schiesser Committed by GitHub
Browse files

feat: Add factory convenience factory for each LLM provider, e.g. you… (#1731)

parent e66c6e25
No related branches found
No related tags found
No related merge requests found
export {
OpenAIAgent,
OpenAIAgentWorker,
type OpenAIAgentParams,
} from "./agent";
export { ALL_OPENAI_EMBEDDING_MODELS, OpenAIEmbedding } from "./embedding";
export {
ALL_AVAILABLE_OPENAI_MODELS,
GPT35_MODELS,
GPT4_MODELS,
O1_MODELS,
OpenAI,
type OpenAIAdditionalChatOptions,
type OpenAIAdditionalMetadata,
} from "./llm";
export * from "./agent";
export * from "./embedding";
export * from "./llm";
......@@ -142,7 +142,7 @@ export const ALL_AVAILABLE_OPENAI_MODELS = {
...O3_MODELS,
} satisfies Record<ChatModel, { contextWindow: number }>;
export function isFunctionCallingModel(llm: LLM): llm is OpenAI {
function isFunctionCallingModel(llm: LLM): llm is OpenAI {
let model: string;
if (llm instanceof OpenAI) {
model = llm.model;
......@@ -157,13 +157,13 @@ export function isFunctionCallingModel(llm: LLM): llm is OpenAI {
return isChatModel && !isOld && !isO1;
}
export function isReasoningModel(model: ChatModel | string): boolean {
function isReasoningModel(model: ChatModel | string): boolean {
const isO1 = model.startsWith("o1");
const isO3 = model.startsWith("o3");
return isO1 || isO3;
}
export function isTemperatureSupported(model: ChatModel | string): boolean {
function isTemperatureSupported(model: ChatModel | string): boolean {
return !model.startsWith("o3");
}
......@@ -540,3 +540,11 @@ export class OpenAI extends ToolCallLLM<OpenAIAdditionalChatOptions> {
};
}
}
/**
* Convenience function to create a new OpenAI instance.
* @param init - Optional initialization parameters for the OpenAI instance.
* @returns A new OpenAI instance.
*/
export const openai = (init?: ConstructorParameters<typeof OpenAI>[0]) =>
new OpenAI(init);
export { Portkey, PortkeySession, getPortkeySession } from "./portkey";
export * from "./portkey";
......@@ -133,3 +133,11 @@ export class Portkey extends BaseLLM {
return;
}
}
/**
* Convenience function to create a new Portkey instance.
* @param init - Optional initialization parameters for the Portkey instance.
* @returns A new Portkey instance.
*/
export const portkey = (init?: ConstructorParameters<typeof Portkey>[0]) =>
new Portkey(init);
......@@ -380,3 +380,12 @@ If a question does not make any sense, or is not factually coherent, explain why
}
export const LlamaDeuce = ReplicateLLM;
/**
* Convenience function to create a new ReplicateLLM instance.
* @param init - Optional initialization parameters for the ReplicateLLM instance.
* @returns A new ReplicateLLM instance.
*/
export const replicate = (
init?: ConstructorParameters<typeof ReplicateLLM>[0],
) => new ReplicateLLM(init);
......@@ -25,3 +25,11 @@ export class TogetherLLM extends OpenAI {
});
}
}
/**
* Convenience function to create a new TogetherLLM instance.
* @param init - Optional initialization parameters for the TogetherLLM instance.
* @returns A new TogetherLLM instance.
*/
export const together = (init?: ConstructorParameters<typeof TogetherLLM>[0]) =>
new TogetherLLM(init);
export { VercelLLM } from "./llm";
export * from "./llm";
export { llamaindex } from "./tool";
......@@ -181,3 +181,11 @@ export class VercelLLM extends ToolCallLLM<VercelAdditionalChatOptions> {
};
}
}
/**
* Convenience function to create a new VercelLLM instance.
* @param init - initialization parameters for the VercelLLM instance.
* @returns A new VercelLLM instance.
*/
export const vercel = (init: ConstructorParameters<typeof VercelLLM>[0]) =>
new VercelLLM(init);
export { VLLM, type VLLMParams } from "./llm";
export * from "./llm";
......@@ -23,3 +23,11 @@ export class VLLM extends OpenAI {
});
}
}
/**
* Convenience function to create a new VLLM instance.
* @param init - initialization parameters for the VLLM instance.
* @returns A new VLLM instance.
*/
export const vllm = (init: ConstructorParameters<typeof VLLM>[0]) =>
new VLLM(init);
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment