From 59f9fb6c3fb5d4a5c050bc5f6f7cc466a4cd77cc Mon Sep 17 00:00:00 2001
From: "Yufei (Benny) Chen" <1585539+benjibc@users.noreply.github.com>
Date: Tue, 20 Feb 2024 05:53:09 -0800
Subject: [PATCH] Add Fireworks to LlamaIndex (#539)

Co-authored-by: Emanuel Ferreira <contatoferreirads@gmail.com>
---
 .changeset/tender-kiwis-unite.md              |  5 ++
 README.md                                     |  1 +
 .../modules/llms/available_llms/fireworks.md  | 65 +++++++++++++++++++
 examples/readers/src/pdf_fw.ts                | 36 ++++++++++
 examples/readers/src/pdf_fw_openai.ts         | 36 ++++++++++
 examples/readers/test.py                      |  0
 packages/core/src/embeddings/fireworks.ts     | 27 ++++++++
 packages/core/src/embeddings/index.ts         |  1 +
 packages/core/src/llm/fireworks.ts            | 27 ++++++++
 packages/core/src/llm/index.ts                |  1 +
 packages/eslint-config-custom/index.js        |  1 +
 11 files changed, 200 insertions(+)
 create mode 100644 .changeset/tender-kiwis-unite.md
 create mode 100644 apps/docs/docs/modules/llms/available_llms/fireworks.md
 create mode 100644 examples/readers/src/pdf_fw.ts
 create mode 100644 examples/readers/src/pdf_fw_openai.ts
 create mode 100644 examples/readers/test.py
 create mode 100644 packages/core/src/embeddings/fireworks.ts
 create mode 100644 packages/core/src/llm/fireworks.ts

diff --git a/.changeset/tender-kiwis-unite.md b/.changeset/tender-kiwis-unite.md
new file mode 100644
index 000000000..bd31983eb
--- /dev/null
+++ b/.changeset/tender-kiwis-unite.md
@@ -0,0 +1,5 @@
+---
+"llamaindex": patch
+---
+
+Add Fireworks to LlamaIndex
diff --git a/README.md b/README.md
index 23284d2f4..37ed02516 100644
--- a/README.md
+++ b/README.md
@@ -127,6 +127,7 @@ module.exports = nextConfig;
 - Anthropic Claude Instant and Claude 2
 - Llama2 Chat LLMs (70B, 13B, and 7B parameters)
 - MistralAI Chat LLMs
+- Fireworks Chat LLMs
 
 ## Contributing:
 
diff --git a/apps/docs/docs/modules/llms/available_llms/fireworks.md b/apps/docs/docs/modules/llms/available_llms/fireworks.md
new file mode 100644
index 000000000..de2713b96
--- /dev/null
+++ b/apps/docs/docs/modules/llms/available_llms/fireworks.md
@@ -0,0 +1,65 @@
+# Fireworks LLM
+
+Fireworks.ai focus on production use cases for open source LLMs, offering speed and quality.
+
+## Usage
+
+```ts
+import { FireworksLLM, serviceContextFromDefaults } from "llamaindex";
+
+const fireworksLLM = new FireworksLLM({
+  apiKey: "<YOUR_API_KEY>",
+});
+
+const serviceContext = serviceContextFromDefaults({ llm: fireworksLLM });
+```
+
+## Load and index documents
+
+For this example, we will load the Berkshire Hathaway 2022 annual report pdf
+
+```ts
+const reader = new PDFReader();
+const documents = await reader.loadData("../data/brk-2022.pdf");
+
+// Split text and create embeddings. Store them in a VectorStoreIndex
+const index = await VectorStoreIndex.fromDocuments(documents, {
+  serviceContext,
+});
+```
+
+## Query
+
+```ts
+const queryEngine = index.asQueryEngine();
+const response = await queryEngine.query({
+  query: "What mistakes did Warren E. Buffett make?",
+});
+```
+
+## Full Example
+
+```ts
+import { VectorStoreIndex } from "llamaindex";
+import { PDFReader } from "llamaindex/readers/PDFReader";
+
+async function main() {
+  // Load PDF
+  const reader = new PDFReader();
+  const documents = await reader.loadData("../data/brk-2022.pdf");
+
+  // Split text and create embeddings. Store them in a VectorStoreIndex
+  const index = await VectorStoreIndex.fromDocuments(documents);
+
+  // Query the index
+  const queryEngine = index.asQueryEngine();
+  const response = await queryEngine.query({
+    query: "What mistakes did Warren E. Buffett make?",
+  });
+
+  // Output response
+  console.log(response.toString());
+}
+
+main().catch(console.error);
+```
diff --git a/examples/readers/src/pdf_fw.ts b/examples/readers/src/pdf_fw.ts
new file mode 100644
index 000000000..d4a47e724
--- /dev/null
+++ b/examples/readers/src/pdf_fw.ts
@@ -0,0 +1,36 @@
+import { FireworksEmbedding, FireworksLLM, VectorStoreIndex } from "llamaindex";
+import { PDFReader } from "llamaindex/readers/PDFReader";
+
+import { serviceContextFromDefaults } from "llamaindex";
+
+const embedModel = new FireworksEmbedding({
+  model: "nomic-ai/nomic-embed-text-v1.5",
+});
+
+const llm = new FireworksLLM({
+  model: "accounts/fireworks/models/mixtral-8x7b-instruct",
+});
+
+const serviceContext = serviceContextFromDefaults({ llm, embedModel });
+
+async function main() {
+  // Load PDF
+  const reader = new PDFReader();
+  const documents = await reader.loadData("../data/brk-2022.pdf");
+
+  // Split text and create embeddings. Store them in a VectorStoreIndex
+  const index = await VectorStoreIndex.fromDocuments(documents, {
+    serviceContext,
+  });
+
+  // Query the index
+  const queryEngine = index.asQueryEngine();
+  const response = await queryEngine.query({
+    query: "What mistakes did Warren E. Buffett make?",
+  });
+
+  // Output response
+  console.log(response.toString());
+}
+
+main().catch(console.error);
diff --git a/examples/readers/src/pdf_fw_openai.ts b/examples/readers/src/pdf_fw_openai.ts
new file mode 100644
index 000000000..2910deb25
--- /dev/null
+++ b/examples/readers/src/pdf_fw_openai.ts
@@ -0,0 +1,36 @@
+import { OpenAI, OpenAIEmbedding, VectorStoreIndex } from "llamaindex";
+import { PDFReader } from "llamaindex/readers/PDFReader";
+
+import { serviceContextFromDefaults } from "llamaindex";
+
+const embedModel = new OpenAIEmbedding({
+  model: "nomic-ai/nomic-embed-text-v1.5",
+});
+
+const llm = new OpenAI({
+  model: "accounts/fireworks/models/mixtral-8x7b-instruct",
+});
+
+const serviceContext = serviceContextFromDefaults({ llm, embedModel });
+
+async function main() {
+  // Load PDF
+  const reader = new PDFReader();
+  const documents = await reader.loadData("../data/brk-2022.pdf");
+
+  // Split text and create embeddings. Store them in a VectorStoreIndex
+  const index = await VectorStoreIndex.fromDocuments(documents, {
+    serviceContext,
+  });
+
+  // Query the index
+  const queryEngine = index.asQueryEngine();
+  const response = await queryEngine.query({
+    query: "What mistakes did Warren E. Buffett make?",
+  });
+
+  // Output response
+  console.log(response.toString());
+}
+
+main().catch(console.error);
diff --git a/examples/readers/test.py b/examples/readers/test.py
new file mode 100644
index 000000000..e69de29bb
diff --git a/packages/core/src/embeddings/fireworks.ts b/packages/core/src/embeddings/fireworks.ts
new file mode 100644
index 000000000..a2abfa131
--- /dev/null
+++ b/packages/core/src/embeddings/fireworks.ts
@@ -0,0 +1,27 @@
+import { OpenAIEmbedding } from "./OpenAIEmbedding";
+
+export class FireworksEmbedding extends OpenAIEmbedding {
+  constructor(init?: Partial<OpenAIEmbedding>) {
+    const {
+      apiKey = process.env.FIREWORKS_API_KEY,
+      additionalSessionOptions = {},
+      model = "nomic-ai/nomic-embed-text-v1.5",
+      ...rest
+    } = init ?? {};
+
+    if (!apiKey) {
+      throw new Error("Set Fireworks Key in FIREWORKS_API_KEY env variable");
+    }
+
+    additionalSessionOptions.baseURL =
+      additionalSessionOptions.baseURL ??
+      "https://api.fireworks.ai/inference/v1";
+
+    super({
+      apiKey,
+      additionalSessionOptions,
+      model,
+      ...rest,
+    });
+  }
+}
diff --git a/packages/core/src/embeddings/index.ts b/packages/core/src/embeddings/index.ts
index d106de31b..4660233c0 100644
--- a/packages/core/src/embeddings/index.ts
+++ b/packages/core/src/embeddings/index.ts
@@ -4,6 +4,7 @@ export * from "./MistralAIEmbedding";
 export * from "./MultiModalEmbedding";
 export { OllamaEmbedding } from "./OllamaEmbedding";
 export * from "./OpenAIEmbedding";
+export { FireworksEmbedding } from "./fireworks";
 export { TogetherEmbedding } from "./together";
 export * from "./types";
 export * from "./utils";
diff --git a/packages/core/src/llm/fireworks.ts b/packages/core/src/llm/fireworks.ts
new file mode 100644
index 000000000..89bd0796f
--- /dev/null
+++ b/packages/core/src/llm/fireworks.ts
@@ -0,0 +1,27 @@
+import { OpenAI } from "./LLM";
+
+export class FireworksLLM extends OpenAI {
+  constructor(init?: Partial<OpenAI>) {
+    const {
+      apiKey = process.env.FIREWORKS_API_KEY,
+      additionalSessionOptions = {},
+      model = "accounts/fireworks/models/mixtral-8x7b-instruct",
+      ...rest
+    } = init ?? {};
+
+    if (!apiKey) {
+      throw new Error("Set Fireworks API Key in FIREWORKS_AI_KEY env variable");
+    }
+
+    additionalSessionOptions.baseURL =
+      additionalSessionOptions.baseURL ??
+      "https://api.fireworks.ai/inference/v1";
+
+    super({
+      apiKey,
+      additionalSessionOptions,
+      model,
+      ...rest,
+    });
+  }
+}
diff --git a/packages/core/src/llm/index.ts b/packages/core/src/llm/index.ts
index 16dcd5353..6b62b740c 100644
--- a/packages/core/src/llm/index.ts
+++ b/packages/core/src/llm/index.ts
@@ -1,4 +1,5 @@
 export * from "./LLM";
+export { FireworksLLM } from "./fireworks";
 export {
   ALL_AVAILABLE_MISTRAL_MODELS,
   MistralAI,
diff --git a/packages/eslint-config-custom/index.js b/packages/eslint-config-custom/index.js
index 6920e9db1..8232a9ae8 100644
--- a/packages/eslint-config-custom/index.js
+++ b/packages/eslint-config-custom/index.js
@@ -12,6 +12,7 @@ module.exports = {
           "ANTHROPIC_API_KEY",
           "ASSEMBLYAI_API_KEY",
           "TOGETHER_API_KEY",
+          "FIREWORKS_API_KEY",
 
           "ASTRA_DB_APPLICATION_TOKEN",
           "ASTRA_DB_ENDPOINT",
-- 
GitLab