diff --git a/.changeset/cold-panthers-lie.md b/.changeset/cold-panthers-lie.md
new file mode 100644
index 0000000000000000000000000000000000000000..0c6d808c6675da77d88327622dff668a4c0a5df2
--- /dev/null
+++ b/.changeset/cold-panthers-lie.md
@@ -0,0 +1,6 @@
+---
+"llamaindex": minor
+"docs": minor
+---
+
+add DeepSeek LLM class and documentation
diff --git a/apps/docs/.changeset/tall-parents-add.md b/apps/docs/.changeset/tall-parents-add.md
new file mode 100644
index 0000000000000000000000000000000000000000..8c7582ea59fef1eb37eed0ba6f304fae562a6e1c
--- /dev/null
+++ b/apps/docs/.changeset/tall-parents-add.md
@@ -0,0 +1,6 @@
+---
+"llamaindex": minor
+"docs": minor
+---
+
+Add deepseek llm class
diff --git a/apps/docs/docs/modules/llms/available_llms/deepseek.md b/apps/docs/docs/modules/llms/available_llms/deepseek.md
new file mode 100644
index 0000000000000000000000000000000000000000..83c0c0cfe32c9af7a41bb0c6030773f3201ef136
--- /dev/null
+++ b/apps/docs/docs/modules/llms/available_llms/deepseek.md
@@ -0,0 +1,50 @@
+# DeepSeek LLM
+
+## Usage
+
+```ts
+import { DeepSeekLLM, Settings } from "llamaindex";
+
+Settings.llm = new DeepSeekLLM({
+  apiKey: "<YOUR_API_KEY>",
+  model: "deepseek-coder", // or "deepseek-chat"
+});
+```
+
+## Example
+
+```ts
+import { DeepSeekLLM, Document, VectorStoreIndex, Settings } from "llamaindex";
+
+const deepseekLlm = new DeepSeekLLM({
+  apiKey: "<YOUR_API_KEY>",
+  model: "deepseek-coder", // or "deepseek-chat"
+});
+
+async function main() {
+  const response = await llm.deepseekLlm.chat({
+    messages: [
+      {
+        role: "system",
+        content: "You are an AI assistant",
+      },
+      {
+        role: "user",
+        content: "Tell me about San Francisco",
+      },
+    ],
+    stream: false,
+  });
+  console.log(response);
+}
+```
+
+# Limitations
+
+Currently does not support function calling.
+
+[Currently does not support json-output param while still is very good at json generating.](https://platform.deepseek.com/api-docs/faq#does-your-api-support-json-output)
+
+## API platform
+
+- [DeepSeek platform](https://platform.deepseek.com/)
diff --git a/packages/llamaindex/src/llm/deepseek.ts b/packages/llamaindex/src/llm/deepseek.ts
new file mode 100644
index 0000000000000000000000000000000000000000..7c4f15466df2d81192197a61a96b33dcdc1d10b2
--- /dev/null
+++ b/packages/llamaindex/src/llm/deepseek.ts
@@ -0,0 +1,35 @@
+import { getEnv } from "@llamaindex/env";
+import { OpenAI } from "./openai.js";
+
+export const DEEPSEEK_MODELS = {
+  "deepseek-coder": { contextWindow: 128000 },
+  "deepseek-chat": { contextWindow: 128000 },
+};
+
+type DeepSeekModelName = keyof typeof DEEPSEEK_MODELS;
+const DEFAULT_MODEL: DeepSeekModelName = "deepseek-coder";
+
+export class DeepSeekLLM extends OpenAI {
+  constructor(init?: Partial<OpenAI> & { model?: DeepSeekModelName }) {
+    const {
+      apiKey = getEnv("DEEPSEEK_API_KEY"),
+      additionalSessionOptions = {},
+      model = DEFAULT_MODEL,
+      ...rest
+    } = init ?? {};
+
+    if (!apiKey) {
+      throw new Error("Set DeepSeek Key in DEEPSEEK_API_KEY env variable");
+    }
+
+    additionalSessionOptions.baseURL =
+      additionalSessionOptions.baseURL ?? "https://api.deepseek.com/v1";
+
+    super({
+      apiKey,
+      additionalSessionOptions,
+      model,
+      ...rest,
+    });
+  }
+}
diff --git a/packages/llamaindex/src/llm/index.ts b/packages/llamaindex/src/llm/index.ts
index c5e9e30b3c1c8e1191eed2953903fabf1fadd104..de4dbad211b0b58c4979935c6f76cb6bca3c76a9 100644
--- a/packages/llamaindex/src/llm/index.ts
+++ b/packages/llamaindex/src/llm/index.ts
@@ -34,5 +34,6 @@ export {
   ReplicateSession,
 } from "./replicate_ai.js";
 
+export { DeepSeekLLM } from "./deepseek.js";
 export { TogetherLLM } from "./together.js";
 export * from "./types.js";