diff --git a/examples/together-ai.ts b/examples/together-ai.ts
new file mode 100644
index 0000000000000000000000000000000000000000..257ce466fd73c19bbbf4c63367670ac6e85fb4f8
--- /dev/null
+++ b/examples/together-ai.ts
@@ -0,0 +1,30 @@
+import { TogetherEmbedding, TogetherLLM } from "llamaindex";
+
+// process.env.TOGETHER_API_KEY is required
+const together = new TogetherLLM({
+  model: "mistralai/Mixtral-8x7B-Instruct-v0.1",
+});
+
+(async () => {
+  const generator = await together.chat(
+    [
+      {
+        role: "system",
+        content: "You are an AI assistant",
+      },
+      {
+        role: "user",
+        content: "Tell me about San Francisco",
+      },
+    ],
+    undefined,
+    true,
+  );
+  console.log("Chatting with Together AI...");
+  for await (const message of generator) {
+    process.stdout.write(message);
+  }
+  const embedding = new TogetherEmbedding();
+  const vector = await embedding.getTextEmbedding("Hello world!");
+  console.log("vector:", vector);
+})();
diff --git a/packages/core/src/embeddings/OpenAIEmbedding.ts b/packages/core/src/embeddings/OpenAIEmbedding.ts
index 106c6cbff294d49bf87d912676e3656339b1b4c8..6bbbfba3ae7ea145e777b5acb2132a94728a1201 100644
--- a/packages/core/src/embeddings/OpenAIEmbedding.ts
+++ b/packages/core/src/embeddings/OpenAIEmbedding.ts
@@ -14,7 +14,7 @@ export enum OpenAIEmbeddingModelType {
 }
 
 export class OpenAIEmbedding extends BaseEmbedding {
-  model: OpenAIEmbeddingModelType;
+  model: OpenAIEmbeddingModelType | string;
 
   // OpenAI session params
   apiKey?: string = undefined;
diff --git a/packages/core/src/embeddings/index.ts b/packages/core/src/embeddings/index.ts
index 32d6535bda4240ae0b77773295d6ab3101821620..80a788f5823f6bab7e4a3d428e304ac52381b7d3 100644
--- a/packages/core/src/embeddings/index.ts
+++ b/packages/core/src/embeddings/index.ts
@@ -3,5 +3,6 @@ export * from "./HuggingFaceEmbedding";
 export * from "./MistralAIEmbedding";
 export * from "./MultiModalEmbedding";
 export * from "./OpenAIEmbedding";
+export { TogetherEmbedding } from "./together";
 export * from "./types";
 export * from "./utils";
diff --git a/packages/core/src/embeddings/together.ts b/packages/core/src/embeddings/together.ts
new file mode 100644
index 0000000000000000000000000000000000000000..dde47c30c3735b025eb1682853750b3c33925239
--- /dev/null
+++ b/packages/core/src/embeddings/together.ts
@@ -0,0 +1,16 @@
+import { OpenAIEmbedding } from "./OpenAIEmbedding";
+
+export class TogetherEmbedding extends OpenAIEmbedding {
+  override model: string;
+  constructor(init?: Partial<OpenAIEmbedding>) {
+    super({
+      apiKey: process.env.TOGETHER_API_KEY,
+      ...init,
+      additionalSessionOptions: {
+        ...init?.additionalSessionOptions,
+        baseURL: "https://api.together.xyz/v1",
+      },
+    });
+    this.model = init?.model ?? "togethercomputer/m2-bert-80M-32k-retrieval";
+  }
+}
diff --git a/packages/core/src/llm/LLM.ts b/packages/core/src/llm/LLM.ts
index 06a0857a6e23d242154d22ff74c6751a72f1e93f..a90e938d7818cdbae173612ea544c82e4424e80c 100644
--- a/packages/core/src/llm/LLM.ts
+++ b/packages/core/src/llm/LLM.ts
@@ -129,7 +129,7 @@ export class OpenAI implements LLM {
   hasStreaming: boolean = true;
 
   // Per completion OpenAI params
-  model: keyof typeof ALL_AVAILABLE_OPENAI_MODELS;
+  model: keyof typeof ALL_AVAILABLE_OPENAI_MODELS | string;
   temperature: number;
   topP: number;
   maxTokens?: number;
@@ -205,12 +205,16 @@ export class OpenAI implements LLM {
   }
 
   get metadata() {
+    const contextWindow =
+      ALL_AVAILABLE_OPENAI_MODELS[
+        this.model as keyof typeof ALL_AVAILABLE_OPENAI_MODELS
+      ]?.contextWindow ?? 1024;
     return {
       model: this.model,
       temperature: this.temperature,
       topP: this.topP,
       maxTokens: this.maxTokens,
-      contextWindow: ALL_AVAILABLE_OPENAI_MODELS[this.model].contextWindow,
+      contextWindow,
       tokenizer: Tokenizers.CL100K_BASE,
     };
   }
diff --git a/packages/core/src/llm/index.ts b/packages/core/src/llm/index.ts
index 5c1a9f3eddf4ad84541330a25e32ebde00f7c600..74e0b91d939065e4d566517c614271e645f58677 100644
--- a/packages/core/src/llm/index.ts
+++ b/packages/core/src/llm/index.ts
@@ -1,3 +1,4 @@
 export * from "./LLM";
 export * from "./mistral";
 export { Ollama } from "./ollama";
+export { TogetherLLM } from "./together";
diff --git a/packages/core/src/llm/together.ts b/packages/core/src/llm/together.ts
new file mode 100644
index 0000000000000000000000000000000000000000..f972faf7d6f049648bf5c5a09f81d17552fff5f7
--- /dev/null
+++ b/packages/core/src/llm/together.ts
@@ -0,0 +1,14 @@
+import { OpenAI } from "./LLM";
+
+export class TogetherLLM extends OpenAI {
+  constructor(init?: Partial<OpenAI>) {
+    super({
+      ...init,
+      apiKey: process.env.TOGETHER_API_KEY,
+      additionalSessionOptions: {
+        ...init?.additionalSessionOptions,
+        baseURL: "https://api.together.xyz/v1",
+      },
+    });
+  }
+}
diff --git a/packages/eslint-config-custom/index.js b/packages/eslint-config-custom/index.js
index 4383def9e00ca5179885ac5038a1549d264eecb5..ff53536b5dc7ef9b60c5f91780a5751b84a82c80 100644
--- a/packages/eslint-config-custom/index.js
+++ b/packages/eslint-config-custom/index.js
@@ -10,6 +10,7 @@ module.exports = {
           "REPLICATE_API_TOKEN",
           "ANTHROPIC_API_KEY",
           "ASSEMBLYAI_API_KEY",
+          "TOGETHER_API_KEY",
 
           "ASTRA_DB_APPLICATION_TOKEN",
           "ASTRA_DB_ENDPOINT",