From 1587e48a141bb31f1ff790465248a188cbe0a14e Mon Sep 17 00:00:00 2001
From: ANKIT VARSHNEY <132201033+AVtheking@users.noreply.github.com>
Date: Tue, 11 Mar 2025 15:51:57 +0530
Subject: [PATCH] Feat/perplexity (#1719)

---
 .changeset/rude-swans-fetch.md              |  6 ++
 examples/package.json                       |  1 +
 examples/perplexity.ts                      | 44 ++++++++++++
 packages/providers/perplexity/package.json  | 38 ++++++++++
 packages/providers/perplexity/src/index.ts  |  1 +
 packages/providers/perplexity/src/llm.ts    | 78 +++++++++++++++++++++
 packages/providers/perplexity/tsconfig.json | 19 +++++
 pnpm-lock.yaml                              | 19 +++++
 tsconfig.json                               |  3 +
 9 files changed, 209 insertions(+)
 create mode 100644 .changeset/rude-swans-fetch.md
 create mode 100644 examples/perplexity.ts
 create mode 100644 packages/providers/perplexity/package.json
 create mode 100644 packages/providers/perplexity/src/index.ts
 create mode 100644 packages/providers/perplexity/src/llm.ts
 create mode 100644 packages/providers/perplexity/tsconfig.json

diff --git a/.changeset/rude-swans-fetch.md b/.changeset/rude-swans-fetch.md
new file mode 100644
index 000000000..61fb0857f
--- /dev/null
+++ b/.changeset/rude-swans-fetch.md
@@ -0,0 +1,6 @@
+---
+"@llamaindex/perplexity": patch
+"@llamaindex/examples": patch
+---
+
+Added support for Perplexity api
diff --git a/examples/package.json b/examples/package.json
index e930dce8d..1497616c9 100644
--- a/examples/package.json
+++ b/examples/package.json
@@ -48,6 +48,7 @@
     "@llamaindex/fireworks": "^0.0.4",
     "@llamaindex/together": "^0.0.4",
     "@llamaindex/jinaai": "^0.0.4",
+    "@llamaindex/perplexity": "^0.0.1",
     "@notionhq/client": "^2.2.15",
     "@pinecone-database/pinecone": "^4.0.0",
     "@vercel/postgres": "^0.10.0",
diff --git a/examples/perplexity.ts b/examples/perplexity.ts
new file mode 100644
index 000000000..1df63f63c
--- /dev/null
+++ b/examples/perplexity.ts
@@ -0,0 +1,44 @@
+import { perplexity } from "@llamaindex/perplexity";
+
+(async () => {
+  const perplexityLLM = perplexity({
+    apiKey: process.env.PERPLEXITY_API_KEY!,
+    model: "sonar",
+  });
+
+  // Chat API example
+  const response = await perplexityLLM.chat({
+    messages: [
+      {
+        role: "system",
+        content:
+          "You are a helpful AI assistant that provides accurate and concise answers",
+      },
+      {
+        role: "user",
+        content: "What is the capital of France?",
+      },
+    ],
+  });
+  console.log("Chat response:", response.message.content);
+
+  // Streaming example
+  const stream = await perplexityLLM.chat({
+    messages: [
+      {
+        role: "system",
+        content: "You are a creative AI assistant that tells engaging stories",
+      },
+      {
+        role: "user",
+        content: "Tell me a short story",
+      },
+    ],
+    stream: true,
+  });
+
+  console.log("\nStreaming response:");
+  for await (const chunk of stream) {
+    process.stdout.write(chunk.delta);
+  }
+})();
diff --git a/packages/providers/perplexity/package.json b/packages/providers/perplexity/package.json
new file mode 100644
index 000000000..044e9f2aa
--- /dev/null
+++ b/packages/providers/perplexity/package.json
@@ -0,0 +1,38 @@
+{
+  "name": "@llamaindex/perplexity",
+  "description": "Perplexity Adapter for LlamaIndex",
+  "version": "0.0.1",
+  "type": "module",
+  "main": "./dist/index.cjs",
+  "module": "./dist/index.js",
+  "exports": {
+    ".": {
+      "require": {
+        "types": "./dist/index.d.cts",
+        "default": "./dist/index.cjs"
+      },
+      "import": {
+        "types": "./dist/index.d.ts",
+        "default": "./dist/index.js"
+      }
+    }
+  },
+  "files": [
+    "dist"
+  ],
+  "scripts": {
+    "build": "bunchee",
+    "dev": "bunchee --watch"
+  },
+  "devDependencies": {
+    "bunchee": "6.4.0"
+  },
+  "keywords": [],
+  "author": "",
+  "license": "ISC",
+  "dependencies": {
+    "@llamaindex/core": "workspace:*",
+    "@llamaindex/env": "workspace:*",
+    "@llamaindex/openai": "workspace:*"
+  }
+}
diff --git a/packages/providers/perplexity/src/index.ts b/packages/providers/perplexity/src/index.ts
new file mode 100644
index 000000000..e6679f111
--- /dev/null
+++ b/packages/providers/perplexity/src/index.ts
@@ -0,0 +1 @@
+export * from "./llm";
diff --git a/packages/providers/perplexity/src/llm.ts b/packages/providers/perplexity/src/llm.ts
new file mode 100644
index 000000000..bf3fc7e01
--- /dev/null
+++ b/packages/providers/perplexity/src/llm.ts
@@ -0,0 +1,78 @@
+import { getEnv } from "@llamaindex/env";
+import { Tokenizers } from "@llamaindex/env/tokenizers";
+import { OpenAI } from "@llamaindex/openai";
+
+export const PERPLEXITY_MODELS = {
+  "sonar-deep-research": {
+    contextWindow: 128000,
+  },
+  "sonar-reasoning-pro": {
+    contextWindow: 128000,
+  },
+  "sonar-reasoning": {
+    contextWindow: 128000,
+  },
+  "sonar-pro": {
+    contextWindow: 200000,
+  },
+  sonar: {
+    contextWindow: 128000,
+  },
+  "r1-1776": {
+    contextWindow: 128000,
+  },
+};
+
+type PerplexityModelName = keyof typeof PERPLEXITY_MODELS;
+const DEFAULT_MODEL: PerplexityModelName = "sonar";
+
+export class Perplexity extends OpenAI {
+  constructor(
+    init?: Omit<Partial<OpenAI>, "session"> & { model?: PerplexityModelName },
+  ) {
+    const {
+      apiKey = getEnv("PERPLEXITY_API_KEY"),
+      additionalSessionOptions = {},
+      model = DEFAULT_MODEL,
+      ...rest
+    } = init ?? {};
+
+    if (!apiKey) {
+      throw new Error("Perplexity API key is required");
+    }
+
+    additionalSessionOptions.baseURL =
+      additionalSessionOptions.baseURL ?? "https://api.perplexity.ai/";
+
+    super({
+      apiKey,
+      additionalSessionOptions,
+      model,
+      ...rest,
+    });
+  }
+
+  get supportToolCall() {
+    return false;
+  }
+
+  get metadata() {
+    return {
+      model: this.model,
+      temperature: this.temperature,
+      topP: this.topP,
+      contextWindow:
+        PERPLEXITY_MODELS[this.model as PerplexityModelName]?.contextWindow,
+      tokenizer: Tokenizers.CL100K_BASE,
+    };
+  }
+}
+
+/**
+ * Convenience function to create a new Perplexity instance.
+ * @param init - Optional initialization parameters for the Perplexity instance.
+ * @returns A new Perplexity instance.
+ */
+export const perplexity = (
+  init?: ConstructorParameters<typeof Perplexity>[0],
+) => new Perplexity(init);
diff --git a/packages/providers/perplexity/tsconfig.json b/packages/providers/perplexity/tsconfig.json
new file mode 100644
index 000000000..7c73f1cf8
--- /dev/null
+++ b/packages/providers/perplexity/tsconfig.json
@@ -0,0 +1,19 @@
+{
+  "extends": "../../../tsconfig.json",
+  "compilerOptions": {
+    "target": "ESNext",
+    "module": "ESNext",
+    "moduleResolution": "bundler",
+    "outDir": "./lib",
+    "tsBuildInfoFile": "./lib/.tsbuildinfo"
+  },
+  "include": ["./src", "package.json"],
+  "references": [
+    {
+      "path": "../../core/tsconfig.json"
+    },
+    {
+      "path": "../../env/tsconfig.json"
+    }
+  ]
+}
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index e5a6b35e9..a42c6bbc5 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -656,6 +656,9 @@ importers:
       '@llamaindex/openai':
         specifier: ^0.1.60
         version: link:../packages/providers/openai
+      '@llamaindex/perplexity':
+        specifier: ^0.0.1
+        version: link:../packages/providers/perplexity
       '@llamaindex/pinecone':
         specifier: ^0.0.13
         version: link:../packages/providers/storage/pinecone
@@ -1315,6 +1318,22 @@ importers:
         specifier: 6.4.0
         version: 6.4.0(typescript@5.7.3)
 
+  packages/providers/perplexity:
+    dependencies:
+      '@llamaindex/core':
+        specifier: workspace:*
+        version: link:../../core
+      '@llamaindex/env':
+        specifier: workspace:*
+        version: link:../../env
+      '@llamaindex/openai':
+        specifier: workspace:*
+        version: link:../openai
+    devDependencies:
+      bunchee:
+        specifier: 6.4.0
+        version: 6.4.0(typescript@5.7.3)
+
   packages/providers/portkey-ai:
     dependencies:
       '@llamaindex/core':
diff --git a/tsconfig.json b/tsconfig.json
index 8af5db8f3..0dbe6a061 100644
--- a/tsconfig.json
+++ b/tsconfig.json
@@ -187,6 +187,9 @@
     },
     {
       "path": "./packages/providers/jinaai/tsconfig.json"
+    },
+    {
+      "path": "./packages/providers/perplexity/tsconfig.json"
     }
   ]
 }
-- 
GitLab