From 09bf27abd7b8fb9a5c07635896b92b4623839826 Mon Sep 17 00:00:00 2001
From: Graden Rea <gradenr@users.noreply.github.com>
Date: Sun, 25 Feb 2024 22:46:27 -0800
Subject: [PATCH] feat: Add Groq LLM integration (#561)

---
 .changeset/swift-waves-divide.md              |  6 ++
 README.md                                     |  1 +
 .../docs/modules/llms/available_llms/groq.mdx | 56 +++++++++++++++++++
 examples/groq.ts                              | 48 ++++++++++++++++
 packages/core/src/llm/groq.ts                 | 26 +++++++++
 packages/core/src/llm/index.ts                |  1 +
 packages/eslint-config-custom/index.js        |  1 +
 7 files changed, 139 insertions(+)
 create mode 100644 .changeset/swift-waves-divide.md
 create mode 100644 apps/docs/docs/modules/llms/available_llms/groq.mdx
 create mode 100644 examples/groq.ts
 create mode 100644 packages/core/src/llm/groq.ts

diff --git a/.changeset/swift-waves-divide.md b/.changeset/swift-waves-divide.md
new file mode 100644
index 000000000..7c68a05c7
--- /dev/null
+++ b/.changeset/swift-waves-divide.md
@@ -0,0 +1,6 @@
+---
+"llamaindex": patch
+"docs": patch
+---
+
+Add Groq LLM to LlamaIndex
diff --git a/README.md b/README.md
index 37ed02516..2c001de9f 100644
--- a/README.md
+++ b/README.md
@@ -125,6 +125,7 @@ module.exports = nextConfig;
 
 - OpenAI GPT-3.5-turbo and GPT-4
 - Anthropic Claude Instant and Claude 2
+- Groq LLMs
 - Llama2 Chat LLMs (70B, 13B, and 7B parameters)
 - MistralAI Chat LLMs
 - Fireworks Chat LLMs
diff --git a/apps/docs/docs/modules/llms/available_llms/groq.mdx b/apps/docs/docs/modules/llms/available_llms/groq.mdx
new file mode 100644
index 000000000..f3862f0c3
--- /dev/null
+++ b/apps/docs/docs/modules/llms/available_llms/groq.mdx
@@ -0,0 +1,56 @@
+import CodeBlock from "@theme/CodeBlock";
+import CodeSource from "!raw-loader!../../../../../../examples/groq.ts";
+
+# Groq
+
+## Usage
+
+First, create an API key at the [Groq Console](https://console.groq.com/keys). Then save it in your environment:
+
+```bash
+export GROQ_API_KEY=<your-api-key>
+```
+
+The initialize the Groq module.
+
+```ts
+import { Groq, serviceContextFromDefaults } from "llamaindex";
+
+const groq = new Groq({
+  // If you do not wish to set your API key in the environment, you may
+  // configure your API key when you initialize the Groq class.
+  // apiKey: "<your-api-key>",
+});
+
+const serviceContext = serviceContextFromDefaults({ llm: groq });
+```
+
+## Load and index documents
+
+For this example, we will use a single document. In a real-world scenario, you would have multiple documents to index.
+
+```ts
+const document = new Document({ text: essay, id_: "essay" });
+
+const index = await VectorStoreIndex.fromDocuments([document], {
+  serviceContext,
+});
+```
+
+## Query
+
+```ts
+const queryEngine = index.asQueryEngine();
+
+const query = "What is the meaning of life?";
+
+const results = await queryEngine.query({
+  query,
+});
+```
+
+## Full Example
+
+<CodeBlock language="ts" showLineNumbers>
+  {CodeSource}
+</CodeBlock>
diff --git a/examples/groq.ts b/examples/groq.ts
new file mode 100644
index 000000000..581031532
--- /dev/null
+++ b/examples/groq.ts
@@ -0,0 +1,48 @@
+import fs from "node:fs/promises";
+
+import {
+  Document,
+  Groq,
+  VectorStoreIndex,
+  serviceContextFromDefaults,
+} from "llamaindex";
+
+async function main() {
+  // Create an instance of the LLM
+  const groq = new Groq({
+    apiKey: process.env.GROQ_API_KEY,
+  });
+
+  // Create a service context
+  const serviceContext = serviceContextFromDefaults({ llm: groq });
+
+  // Load essay from abramov.txt in Node
+  const path = "node_modules/llamaindex/examples/abramov.txt";
+  const essay = await fs.readFile(path, "utf-8");
+  const document = new Document({ text: essay, id_: "essay" });
+
+  // Load and index documents
+  const index = await VectorStoreIndex.fromDocuments([document], {
+    serviceContext,
+  });
+
+  // get retriever
+  const retriever = index.asRetriever();
+
+  // Create a query engine
+  const queryEngine = index.asQueryEngine({
+    retriever,
+  });
+
+  const query = "What is the meaning of life?";
+
+  // Query
+  const response = await queryEngine.query({
+    query,
+  });
+
+  // Log the response
+  console.log(response.response);
+}
+
+await main();
diff --git a/packages/core/src/llm/groq.ts b/packages/core/src/llm/groq.ts
new file mode 100644
index 000000000..0d287f56d
--- /dev/null
+++ b/packages/core/src/llm/groq.ts
@@ -0,0 +1,26 @@
+import { OpenAI } from "./LLM.js";
+
+export class Groq extends OpenAI {
+  constructor(init?: Partial<OpenAI>) {
+    const {
+      apiKey = process.env.GROQ_API_KEY,
+      additionalSessionOptions = {},
+      model = "mixtral-8x7b-32768",
+      ...rest
+    } = init ?? {};
+
+    if (!apiKey) {
+      throw new Error("Set Groq Key in GROQ_API_KEY env variable"); // Tell user to set correct env variable, and not OPENAI_API_KEY
+    }
+
+    additionalSessionOptions.baseURL =
+      additionalSessionOptions.baseURL ?? "https://api.groq.com/openai/v1";
+
+    super({
+      apiKey,
+      additionalSessionOptions,
+      model,
+      ...rest,
+    });
+  }
+}
diff --git a/packages/core/src/llm/index.ts b/packages/core/src/llm/index.ts
index 10a2dbc1a..20aeb4dfe 100644
--- a/packages/core/src/llm/index.ts
+++ b/packages/core/src/llm/index.ts
@@ -1,5 +1,6 @@
 export * from "./LLM.js";
 export { FireworksLLM } from "./fireworks.js";
+export { Groq } from "./groq.js";
 export {
   ALL_AVAILABLE_MISTRAL_MODELS,
   MistralAI,
diff --git a/packages/eslint-config-custom/index.js b/packages/eslint-config-custom/index.js
index 4747b1696..c2c670059 100644
--- a/packages/eslint-config-custom/index.js
+++ b/packages/eslint-config-custom/index.js
@@ -14,6 +14,7 @@ module.exports = {
           "ASSEMBLYAI_API_KEY",
           "TOGETHER_API_KEY",
           "FIREWORKS_API_KEY",
+          "GROQ_API_KEY",
 
           "ASTRA_DB_APPLICATION_TOKEN",
           "ASTRA_DB_ENDPOINT",
-- 
GitLab