From 086b94038e1362c60256fadf61db05cb95fd31cf Mon Sep 17 00:00:00 2001
From: Erik <erik.balfe@protonmail.com>
Date: Thu, 25 Jul 2024 22:15:34 +0400
Subject: [PATCH] feat: add DeepSeek LLM class and documentation (#1071)

---
 .changeset/cold-panthers-lie.md               |  6 +++
 apps/docs/.changeset/tall-parents-add.md      |  6 +++
 .../modules/llms/available_llms/deepseek.md   | 50 +++++++++++++++++++
 packages/llamaindex/src/llm/deepseek.ts       | 35 +++++++++++++
 packages/llamaindex/src/llm/index.ts          |  1 +
 5 files changed, 98 insertions(+)
 create mode 100644 .changeset/cold-panthers-lie.md
 create mode 100644 apps/docs/.changeset/tall-parents-add.md
 create mode 100644 apps/docs/docs/modules/llms/available_llms/deepseek.md
 create mode 100644 packages/llamaindex/src/llm/deepseek.ts

diff --git a/.changeset/cold-panthers-lie.md b/.changeset/cold-panthers-lie.md
new file mode 100644
index 000000000..0c6d808c6
--- /dev/null
+++ b/.changeset/cold-panthers-lie.md
@@ -0,0 +1,6 @@
+---
+"llamaindex": minor
+"docs": minor
+---
+
+add DeepSeek LLM class and documentation
diff --git a/apps/docs/.changeset/tall-parents-add.md b/apps/docs/.changeset/tall-parents-add.md
new file mode 100644
index 000000000..8c7582ea5
--- /dev/null
+++ b/apps/docs/.changeset/tall-parents-add.md
@@ -0,0 +1,6 @@
+---
+"llamaindex": minor
+"docs": minor
+---
+
+Add deepseek llm class
diff --git a/apps/docs/docs/modules/llms/available_llms/deepseek.md b/apps/docs/docs/modules/llms/available_llms/deepseek.md
new file mode 100644
index 000000000..83c0c0cfe
--- /dev/null
+++ b/apps/docs/docs/modules/llms/available_llms/deepseek.md
@@ -0,0 +1,50 @@
+# DeepSeek LLM
+
+## Usage
+
+```ts
+import { DeepSeekLLM, Settings } from "llamaindex";
+
+Settings.llm = new DeepSeekLLM({
+  apiKey: "<YOUR_API_KEY>",
+  model: "deepseek-coder", // or "deepseek-chat"
+});
+```
+
+## Example
+
+```ts
+import { DeepSeekLLM, Document, VectorStoreIndex, Settings } from "llamaindex";
+
+const deepseekLlm = new DeepSeekLLM({
+  apiKey: "<YOUR_API_KEY>",
+  model: "deepseek-coder", // or "deepseek-chat"
+});
+
+async function main() {
+  const response = await llm.deepseekLlm.chat({
+    messages: [
+      {
+        role: "system",
+        content: "You are an AI assistant",
+      },
+      {
+        role: "user",
+        content: "Tell me about San Francisco",
+      },
+    ],
+    stream: false,
+  });
+  console.log(response);
+}
+```
+
+# Limitations
+
+Currently does not support function calling.
+
+[Currently does not support json-output param while still is very good at json generating.](https://platform.deepseek.com/api-docs/faq#does-your-api-support-json-output)
+
+## API platform
+
+- [DeepSeek platform](https://platform.deepseek.com/)
diff --git a/packages/llamaindex/src/llm/deepseek.ts b/packages/llamaindex/src/llm/deepseek.ts
new file mode 100644
index 000000000..7c4f15466
--- /dev/null
+++ b/packages/llamaindex/src/llm/deepseek.ts
@@ -0,0 +1,35 @@
+import { getEnv } from "@llamaindex/env";
+import { OpenAI } from "./openai.js";
+
+export const DEEPSEEK_MODELS = {
+  "deepseek-coder": { contextWindow: 128000 },
+  "deepseek-chat": { contextWindow: 128000 },
+};
+
+type DeepSeekModelName = keyof typeof DEEPSEEK_MODELS;
+const DEFAULT_MODEL: DeepSeekModelName = "deepseek-coder";
+
+export class DeepSeekLLM extends OpenAI {
+  constructor(init?: Partial<OpenAI> & { model?: DeepSeekModelName }) {
+    const {
+      apiKey = getEnv("DEEPSEEK_API_KEY"),
+      additionalSessionOptions = {},
+      model = DEFAULT_MODEL,
+      ...rest
+    } = init ?? {};
+
+    if (!apiKey) {
+      throw new Error("Set DeepSeek Key in DEEPSEEK_API_KEY env variable");
+    }
+
+    additionalSessionOptions.baseURL =
+      additionalSessionOptions.baseURL ?? "https://api.deepseek.com/v1";
+
+    super({
+      apiKey,
+      additionalSessionOptions,
+      model,
+      ...rest,
+    });
+  }
+}
diff --git a/packages/llamaindex/src/llm/index.ts b/packages/llamaindex/src/llm/index.ts
index c5e9e30b3..de4dbad21 100644
--- a/packages/llamaindex/src/llm/index.ts
+++ b/packages/llamaindex/src/llm/index.ts
@@ -34,5 +34,6 @@ export {
   ReplicateSession,
 } from "./replicate_ai.js";
 
+export { DeepSeekLLM } from "./deepseek.js";
 export { TogetherLLM } from "./together.js";
 export * from "./types.js";
-- 
GitLab