diff --git a/.changeset/tender-kiwis-unite.md b/.changeset/tender-kiwis-unite.md
new file mode 100644
index 0000000000000000000000000000000000000000..bd31983ebe234a4da8fcb27d5370bc608d647d31
--- /dev/null
+++ b/.changeset/tender-kiwis-unite.md
@@ -0,0 +1,5 @@
+---
+"llamaindex": patch
+---
+
+Add Fireworks to LlamaIndex
diff --git a/README.md b/README.md
index 23284d2f430941ab495fa898708eef3a0ddaceb8..37ed02516d9bfe04f443bca1e279d43890b91b07 100644
--- a/README.md
+++ b/README.md
@@ -127,6 +127,7 @@ module.exports = nextConfig;
 - Anthropic Claude Instant and Claude 2
 - Llama2 Chat LLMs (70B, 13B, and 7B parameters)
 - MistralAI Chat LLMs
+- Fireworks Chat LLMs
 
 ## Contributing:
 
diff --git a/apps/docs/docs/modules/llms/available_llms/fireworks.md b/apps/docs/docs/modules/llms/available_llms/fireworks.md
new file mode 100644
index 0000000000000000000000000000000000000000..de2713b9614fd0e916a2147fce2a8065fcea4fbb
--- /dev/null
+++ b/apps/docs/docs/modules/llms/available_llms/fireworks.md
@@ -0,0 +1,65 @@
+# Fireworks LLM
+
+Fireworks.ai focus on production use cases for open source LLMs, offering speed and quality.
+
+## Usage
+
+```ts
+import { FireworksLLM, serviceContextFromDefaults } from "llamaindex";
+
+const fireworksLLM = new FireworksLLM({
+  apiKey: "<YOUR_API_KEY>",
+});
+
+const serviceContext = serviceContextFromDefaults({ llm: fireworksLLM });
+```
+
+## Load and index documents
+
+For this example, we will load the Berkshire Hathaway 2022 annual report pdf
+
+```ts
+const reader = new PDFReader();
+const documents = await reader.loadData("../data/brk-2022.pdf");
+
+// Split text and create embeddings. Store them in a VectorStoreIndex
+const index = await VectorStoreIndex.fromDocuments(documents, {
+  serviceContext,
+});
+```
+
+## Query
+
+```ts
+const queryEngine = index.asQueryEngine();
+const response = await queryEngine.query({
+  query: "What mistakes did Warren E. Buffett make?",
+});
+```
+
+## Full Example
+
+```ts
+import { VectorStoreIndex } from "llamaindex";
+import { PDFReader } from "llamaindex/readers/PDFReader";
+
+async function main() {
+  // Load PDF
+  const reader = new PDFReader();
+  const documents = await reader.loadData("../data/brk-2022.pdf");
+
+  // Split text and create embeddings. Store them in a VectorStoreIndex
+  const index = await VectorStoreIndex.fromDocuments(documents);
+
+  // Query the index
+  const queryEngine = index.asQueryEngine();
+  const response = await queryEngine.query({
+    query: "What mistakes did Warren E. Buffett make?",
+  });
+
+  // Output response
+  console.log(response.toString());
+}
+
+main().catch(console.error);
+```
diff --git a/examples/readers/src/pdf_fw.ts b/examples/readers/src/pdf_fw.ts
new file mode 100644
index 0000000000000000000000000000000000000000..d4a47e72422ccab79b4719c6e66b7edb4a07baf6
--- /dev/null
+++ b/examples/readers/src/pdf_fw.ts
@@ -0,0 +1,36 @@
+import { FireworksEmbedding, FireworksLLM, VectorStoreIndex } from "llamaindex";
+import { PDFReader } from "llamaindex/readers/PDFReader";
+
+import { serviceContextFromDefaults } from "llamaindex";
+
+const embedModel = new FireworksEmbedding({
+  model: "nomic-ai/nomic-embed-text-v1.5",
+});
+
+const llm = new FireworksLLM({
+  model: "accounts/fireworks/models/mixtral-8x7b-instruct",
+});
+
+const serviceContext = serviceContextFromDefaults({ llm, embedModel });
+
+async function main() {
+  // Load PDF
+  const reader = new PDFReader();
+  const documents = await reader.loadData("../data/brk-2022.pdf");
+
+  // Split text and create embeddings. Store them in a VectorStoreIndex
+  const index = await VectorStoreIndex.fromDocuments(documents, {
+    serviceContext,
+  });
+
+  // Query the index
+  const queryEngine = index.asQueryEngine();
+  const response = await queryEngine.query({
+    query: "What mistakes did Warren E. Buffett make?",
+  });
+
+  // Output response
+  console.log(response.toString());
+}
+
+main().catch(console.error);
diff --git a/examples/readers/src/pdf_fw_openai.ts b/examples/readers/src/pdf_fw_openai.ts
new file mode 100644
index 0000000000000000000000000000000000000000..2910deb251ce8ce07ae631ce81afe86f849eb06f
--- /dev/null
+++ b/examples/readers/src/pdf_fw_openai.ts
@@ -0,0 +1,36 @@
+import { OpenAI, OpenAIEmbedding, VectorStoreIndex } from "llamaindex";
+import { PDFReader } from "llamaindex/readers/PDFReader";
+
+import { serviceContextFromDefaults } from "llamaindex";
+
+const embedModel = new OpenAIEmbedding({
+  model: "nomic-ai/nomic-embed-text-v1.5",
+});
+
+const llm = new OpenAI({
+  model: "accounts/fireworks/models/mixtral-8x7b-instruct",
+});
+
+const serviceContext = serviceContextFromDefaults({ llm, embedModel });
+
+async function main() {
+  // Load PDF
+  const reader = new PDFReader();
+  const documents = await reader.loadData("../data/brk-2022.pdf");
+
+  // Split text and create embeddings. Store them in a VectorStoreIndex
+  const index = await VectorStoreIndex.fromDocuments(documents, {
+    serviceContext,
+  });
+
+  // Query the index
+  const queryEngine = index.asQueryEngine();
+  const response = await queryEngine.query({
+    query: "What mistakes did Warren E. Buffett make?",
+  });
+
+  // Output response
+  console.log(response.toString());
+}
+
+main().catch(console.error);
diff --git a/examples/readers/test.py b/examples/readers/test.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/packages/core/src/embeddings/fireworks.ts b/packages/core/src/embeddings/fireworks.ts
new file mode 100644
index 0000000000000000000000000000000000000000..a2abfa1313b5ecf29abc8cc2268b669827f2f3de
--- /dev/null
+++ b/packages/core/src/embeddings/fireworks.ts
@@ -0,0 +1,27 @@
+import { OpenAIEmbedding } from "./OpenAIEmbedding";
+
+export class FireworksEmbedding extends OpenAIEmbedding {
+  constructor(init?: Partial<OpenAIEmbedding>) {
+    const {
+      apiKey = process.env.FIREWORKS_API_KEY,
+      additionalSessionOptions = {},
+      model = "nomic-ai/nomic-embed-text-v1.5",
+      ...rest
+    } = init ?? {};
+
+    if (!apiKey) {
+      throw new Error("Set Fireworks Key in FIREWORKS_API_KEY env variable");
+    }
+
+    additionalSessionOptions.baseURL =
+      additionalSessionOptions.baseURL ??
+      "https://api.fireworks.ai/inference/v1";
+
+    super({
+      apiKey,
+      additionalSessionOptions,
+      model,
+      ...rest,
+    });
+  }
+}
diff --git a/packages/core/src/embeddings/index.ts b/packages/core/src/embeddings/index.ts
index d106de31b67d698118a51908a485b818dd8130a9..4660233c0897c78bb8676c167cc761a1237127f6 100644
--- a/packages/core/src/embeddings/index.ts
+++ b/packages/core/src/embeddings/index.ts
@@ -4,6 +4,7 @@ export * from "./MistralAIEmbedding";
 export * from "./MultiModalEmbedding";
 export { OllamaEmbedding } from "./OllamaEmbedding";
 export * from "./OpenAIEmbedding";
+export { FireworksEmbedding } from "./fireworks";
 export { TogetherEmbedding } from "./together";
 export * from "./types";
 export * from "./utils";
diff --git a/packages/core/src/llm/fireworks.ts b/packages/core/src/llm/fireworks.ts
new file mode 100644
index 0000000000000000000000000000000000000000..89bd0796fb499165b6053ed10719b773633e4052
--- /dev/null
+++ b/packages/core/src/llm/fireworks.ts
@@ -0,0 +1,27 @@
+import { OpenAI } from "./LLM";
+
+export class FireworksLLM extends OpenAI {
+  constructor(init?: Partial<OpenAI>) {
+    const {
+      apiKey = process.env.FIREWORKS_API_KEY,
+      additionalSessionOptions = {},
+      model = "accounts/fireworks/models/mixtral-8x7b-instruct",
+      ...rest
+    } = init ?? {};
+
+    if (!apiKey) {
+      throw new Error("Set Fireworks API Key in FIREWORKS_AI_KEY env variable");
+    }
+
+    additionalSessionOptions.baseURL =
+      additionalSessionOptions.baseURL ??
+      "https://api.fireworks.ai/inference/v1";
+
+    super({
+      apiKey,
+      additionalSessionOptions,
+      model,
+      ...rest,
+    });
+  }
+}
diff --git a/packages/core/src/llm/index.ts b/packages/core/src/llm/index.ts
index 16dcd5353920905604864b2aae2b27ac1df88f06..6b62b740c11fa569ab10bb3c807ec434b6b77dbd 100644
--- a/packages/core/src/llm/index.ts
+++ b/packages/core/src/llm/index.ts
@@ -1,4 +1,5 @@
 export * from "./LLM";
+export { FireworksLLM } from "./fireworks";
 export {
   ALL_AVAILABLE_MISTRAL_MODELS,
   MistralAI,
diff --git a/packages/eslint-config-custom/index.js b/packages/eslint-config-custom/index.js
index 6920e9db112e299527574c2b8e3261c7b34fd092..8232a9ae82275266660b9b269267646fdfbb86be 100644
--- a/packages/eslint-config-custom/index.js
+++ b/packages/eslint-config-custom/index.js
@@ -12,6 +12,7 @@ module.exports = {
           "ANTHROPIC_API_KEY",
           "ASSEMBLYAI_API_KEY",
           "TOGETHER_API_KEY",
+          "FIREWORKS_API_KEY",
 
           "ASTRA_DB_APPLICATION_TOKEN",
           "ASTRA_DB_ENDPOINT",