Skip to content
Snippets Groups Projects
Unverified Commit edd0f662 authored by Alex Yang's avatar Alex Yang Committed by GitHub
Browse files

feat: support Together AI (#373)

parent 2da407d6
No related branches found
No related tags found
No related merge requests found
import { TogetherEmbedding, TogetherLLM } from "llamaindex";
// process.env.TOGETHER_API_KEY is required
const together = new TogetherLLM({
model: "mistralai/Mixtral-8x7B-Instruct-v0.1",
});
(async () => {
const generator = await together.chat(
[
{
role: "system",
content: "You are an AI assistant",
},
{
role: "user",
content: "Tell me about San Francisco",
},
],
undefined,
true,
);
console.log("Chatting with Together AI...");
for await (const message of generator) {
process.stdout.write(message);
}
const embedding = new TogetherEmbedding();
const vector = await embedding.getTextEmbedding("Hello world!");
console.log("vector:", vector);
})();
......@@ -14,7 +14,7 @@ export enum OpenAIEmbeddingModelType {
}
export class OpenAIEmbedding extends BaseEmbedding {
model: OpenAIEmbeddingModelType;
model: OpenAIEmbeddingModelType | string;
// OpenAI session params
apiKey?: string = undefined;
......
......@@ -3,5 +3,6 @@ export * from "./HuggingFaceEmbedding";
export * from "./MistralAIEmbedding";
export * from "./MultiModalEmbedding";
export * from "./OpenAIEmbedding";
export { TogetherEmbedding } from "./together";
export * from "./types";
export * from "./utils";
import { OpenAIEmbedding } from "./OpenAIEmbedding";
export class TogetherEmbedding extends OpenAIEmbedding {
override model: string;
constructor(init?: Partial<OpenAIEmbedding>) {
super({
apiKey: process.env.TOGETHER_API_KEY,
...init,
additionalSessionOptions: {
...init?.additionalSessionOptions,
baseURL: "https://api.together.xyz/v1",
},
});
this.model = init?.model ?? "togethercomputer/m2-bert-80M-32k-retrieval";
}
}
......@@ -129,7 +129,7 @@ export class OpenAI implements LLM {
hasStreaming: boolean = true;
// Per completion OpenAI params
model: keyof typeof ALL_AVAILABLE_OPENAI_MODELS;
model: keyof typeof ALL_AVAILABLE_OPENAI_MODELS | string;
temperature: number;
topP: number;
maxTokens?: number;
......@@ -205,12 +205,16 @@ export class OpenAI implements LLM {
}
get metadata() {
const contextWindow =
ALL_AVAILABLE_OPENAI_MODELS[
this.model as keyof typeof ALL_AVAILABLE_OPENAI_MODELS
]?.contextWindow ?? 1024;
return {
model: this.model,
temperature: this.temperature,
topP: this.topP,
maxTokens: this.maxTokens,
contextWindow: ALL_AVAILABLE_OPENAI_MODELS[this.model].contextWindow,
contextWindow,
tokenizer: Tokenizers.CL100K_BASE,
};
}
......
export * from "./LLM";
export * from "./mistral";
export { Ollama } from "./ollama";
export { TogetherLLM } from "./together";
import { OpenAI } from "./LLM";
export class TogetherLLM extends OpenAI {
constructor(init?: Partial<OpenAI>) {
super({
...init,
apiKey: process.env.TOGETHER_API_KEY,
additionalSessionOptions: {
...init?.additionalSessionOptions,
baseURL: "https://api.together.xyz/v1",
},
});
}
}
......@@ -10,6 +10,7 @@ module.exports = {
"REPLICATE_API_TOKEN",
"ANTHROPIC_API_KEY",
"ASSEMBLYAI_API_KEY",
"TOGETHER_API_KEY",
"ASTRA_DB_APPLICATION_TOKEN",
"ASTRA_DB_ENDPOINT",
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment