diff --git a/.changeset/swift-waves-divide.md b/.changeset/swift-waves-divide.md
new file mode 100644
index 0000000000000000000000000000000000000000..7c68a05c717a4b1c1b3e47afcffcb38183df2524
--- /dev/null
+++ b/.changeset/swift-waves-divide.md
@@ -0,0 +1,6 @@
+---
+"llamaindex": patch
+"docs": patch
+---
+
+Add Groq LLM to LlamaIndex
diff --git a/README.md b/README.md
index 37ed02516d9bfe04f443bca1e279d43890b91b07..2c001de9fc6fa81813cbabed201c74e3296ca539 100644
--- a/README.md
+++ b/README.md
@@ -125,6 +125,7 @@ module.exports = nextConfig;
 
 - OpenAI GPT-3.5-turbo and GPT-4
 - Anthropic Claude Instant and Claude 2
+- Groq LLMs
 - Llama2 Chat LLMs (70B, 13B, and 7B parameters)
 - MistralAI Chat LLMs
 - Fireworks Chat LLMs
diff --git a/apps/docs/docs/modules/llms/available_llms/groq.mdx b/apps/docs/docs/modules/llms/available_llms/groq.mdx
new file mode 100644
index 0000000000000000000000000000000000000000..f3862f0c3feba8582fa15d70d0b0f17e02e2960d
--- /dev/null
+++ b/apps/docs/docs/modules/llms/available_llms/groq.mdx
@@ -0,0 +1,56 @@
+import CodeBlock from "@theme/CodeBlock";
+import CodeSource from "!raw-loader!../../../../../../examples/groq.ts";
+
+# Groq
+
+## Usage
+
+First, create an API key at the [Groq Console](https://console.groq.com/keys). Then save it in your environment:
+
+```bash
+export GROQ_API_KEY=<your-api-key>
+```
+
+The initialize the Groq module.
+
+```ts
+import { Groq, serviceContextFromDefaults } from "llamaindex";
+
+const groq = new Groq({
+  // If you do not wish to set your API key in the environment, you may
+  // configure your API key when you initialize the Groq class.
+  // apiKey: "<your-api-key>",
+});
+
+const serviceContext = serviceContextFromDefaults({ llm: groq });
+```
+
+## Load and index documents
+
+For this example, we will use a single document. In a real-world scenario, you would have multiple documents to index.
+
+```ts
+const document = new Document({ text: essay, id_: "essay" });
+
+const index = await VectorStoreIndex.fromDocuments([document], {
+  serviceContext,
+});
+```
+
+## Query
+
+```ts
+const queryEngine = index.asQueryEngine();
+
+const query = "What is the meaning of life?";
+
+const results = await queryEngine.query({
+  query,
+});
+```
+
+## Full Example
+
+<CodeBlock language="ts" showLineNumbers>
+  {CodeSource}
+</CodeBlock>
diff --git a/examples/groq.ts b/examples/groq.ts
new file mode 100644
index 0000000000000000000000000000000000000000..581031532a0f462c6329adfb5b47eb2ee42c01b8
--- /dev/null
+++ b/examples/groq.ts
@@ -0,0 +1,48 @@
+import fs from "node:fs/promises";
+
+import {
+  Document,
+  Groq,
+  VectorStoreIndex,
+  serviceContextFromDefaults,
+} from "llamaindex";
+
+async function main() {
+  // Create an instance of the LLM
+  const groq = new Groq({
+    apiKey: process.env.GROQ_API_KEY,
+  });
+
+  // Create a service context
+  const serviceContext = serviceContextFromDefaults({ llm: groq });
+
+  // Load essay from abramov.txt in Node
+  const path = "node_modules/llamaindex/examples/abramov.txt";
+  const essay = await fs.readFile(path, "utf-8");
+  const document = new Document({ text: essay, id_: "essay" });
+
+  // Load and index documents
+  const index = await VectorStoreIndex.fromDocuments([document], {
+    serviceContext,
+  });
+
+  // get retriever
+  const retriever = index.asRetriever();
+
+  // Create a query engine
+  const queryEngine = index.asQueryEngine({
+    retriever,
+  });
+
+  const query = "What is the meaning of life?";
+
+  // Query
+  const response = await queryEngine.query({
+    query,
+  });
+
+  // Log the response
+  console.log(response.response);
+}
+
+await main();
diff --git a/packages/core/src/llm/groq.ts b/packages/core/src/llm/groq.ts
new file mode 100644
index 0000000000000000000000000000000000000000..0d287f56d1c9afa08349c9f8113ecd8fd1dd090e
--- /dev/null
+++ b/packages/core/src/llm/groq.ts
@@ -0,0 +1,26 @@
+import { OpenAI } from "./LLM.js";
+
+export class Groq extends OpenAI {
+  constructor(init?: Partial<OpenAI>) {
+    const {
+      apiKey = process.env.GROQ_API_KEY,
+      additionalSessionOptions = {},
+      model = "mixtral-8x7b-32768",
+      ...rest
+    } = init ?? {};
+
+    if (!apiKey) {
+      throw new Error("Set Groq Key in GROQ_API_KEY env variable"); // Tell user to set correct env variable, and not OPENAI_API_KEY
+    }
+
+    additionalSessionOptions.baseURL =
+      additionalSessionOptions.baseURL ?? "https://api.groq.com/openai/v1";
+
+    super({
+      apiKey,
+      additionalSessionOptions,
+      model,
+      ...rest,
+    });
+  }
+}
diff --git a/packages/core/src/llm/index.ts b/packages/core/src/llm/index.ts
index 10a2dbc1a8f9a8a231d033ea26c76649b48cda87..20aeb4dfe760daba66ac70335aed6030d18810a3 100644
--- a/packages/core/src/llm/index.ts
+++ b/packages/core/src/llm/index.ts
@@ -1,5 +1,6 @@
 export * from "./LLM.js";
 export { FireworksLLM } from "./fireworks.js";
+export { Groq } from "./groq.js";
 export {
   ALL_AVAILABLE_MISTRAL_MODELS,
   MistralAI,
diff --git a/packages/eslint-config-custom/index.js b/packages/eslint-config-custom/index.js
index 4747b16961c4a083628b905239b8ef43acc2f2d6..c2c6700597a958b9ed7f93c3376b5278daccf875 100644
--- a/packages/eslint-config-custom/index.js
+++ b/packages/eslint-config-custom/index.js
@@ -14,6 +14,7 @@ module.exports = {
           "ASSEMBLYAI_API_KEY",
           "TOGETHER_API_KEY",
           "FIREWORKS_API_KEY",
+          "GROQ_API_KEY",
 
           "ASTRA_DB_APPLICATION_TOKEN",
           "ASTRA_DB_ENDPOINT",