Skip to content
Snippets Groups Projects
Unverified Commit 1587e48a authored by ANKIT VARSHNEY's avatar ANKIT VARSHNEY Committed by GitHub
Browse files

Feat/perplexity (#1719)

parent bd239aaf
No related branches found
No related tags found
No related merge requests found
---
"@llamaindex/perplexity": patch
"@llamaindex/examples": patch
---
Added support for Perplexity api
......@@ -48,6 +48,7 @@
"@llamaindex/fireworks": "^0.0.4",
"@llamaindex/together": "^0.0.4",
"@llamaindex/jinaai": "^0.0.4",
"@llamaindex/perplexity": "^0.0.1",
"@notionhq/client": "^2.2.15",
"@pinecone-database/pinecone": "^4.0.0",
"@vercel/postgres": "^0.10.0",
......
import { perplexity } from "@llamaindex/perplexity";
(async () => {
const perplexityLLM = perplexity({
apiKey: process.env.PERPLEXITY_API_KEY!,
model: "sonar",
});
// Chat API example
const response = await perplexityLLM.chat({
messages: [
{
role: "system",
content:
"You are a helpful AI assistant that provides accurate and concise answers",
},
{
role: "user",
content: "What is the capital of France?",
},
],
});
console.log("Chat response:", response.message.content);
// Streaming example
const stream = await perplexityLLM.chat({
messages: [
{
role: "system",
content: "You are a creative AI assistant that tells engaging stories",
},
{
role: "user",
content: "Tell me a short story",
},
],
stream: true,
});
console.log("\nStreaming response:");
for await (const chunk of stream) {
process.stdout.write(chunk.delta);
}
})();
{
"name": "@llamaindex/perplexity",
"description": "Perplexity Adapter for LlamaIndex",
"version": "0.0.1",
"type": "module",
"main": "./dist/index.cjs",
"module": "./dist/index.js",
"exports": {
".": {
"require": {
"types": "./dist/index.d.cts",
"default": "./dist/index.cjs"
},
"import": {
"types": "./dist/index.d.ts",
"default": "./dist/index.js"
}
}
},
"files": [
"dist"
],
"scripts": {
"build": "bunchee",
"dev": "bunchee --watch"
},
"devDependencies": {
"bunchee": "6.4.0"
},
"keywords": [],
"author": "",
"license": "ISC",
"dependencies": {
"@llamaindex/core": "workspace:*",
"@llamaindex/env": "workspace:*",
"@llamaindex/openai": "workspace:*"
}
}
export * from "./llm";
import { getEnv } from "@llamaindex/env";
import { Tokenizers } from "@llamaindex/env/tokenizers";
import { OpenAI } from "@llamaindex/openai";
export const PERPLEXITY_MODELS = {
"sonar-deep-research": {
contextWindow: 128000,
},
"sonar-reasoning-pro": {
contextWindow: 128000,
},
"sonar-reasoning": {
contextWindow: 128000,
},
"sonar-pro": {
contextWindow: 200000,
},
sonar: {
contextWindow: 128000,
},
"r1-1776": {
contextWindow: 128000,
},
};
type PerplexityModelName = keyof typeof PERPLEXITY_MODELS;
const DEFAULT_MODEL: PerplexityModelName = "sonar";
export class Perplexity extends OpenAI {
constructor(
init?: Omit<Partial<OpenAI>, "session"> & { model?: PerplexityModelName },
) {
const {
apiKey = getEnv("PERPLEXITY_API_KEY"),
additionalSessionOptions = {},
model = DEFAULT_MODEL,
...rest
} = init ?? {};
if (!apiKey) {
throw new Error("Perplexity API key is required");
}
additionalSessionOptions.baseURL =
additionalSessionOptions.baseURL ?? "https://api.perplexity.ai/";
super({
apiKey,
additionalSessionOptions,
model,
...rest,
});
}
get supportToolCall() {
return false;
}
get metadata() {
return {
model: this.model,
temperature: this.temperature,
topP: this.topP,
contextWindow:
PERPLEXITY_MODELS[this.model as PerplexityModelName]?.contextWindow,
tokenizer: Tokenizers.CL100K_BASE,
};
}
}
/**
* Convenience function to create a new Perplexity instance.
* @param init - Optional initialization parameters for the Perplexity instance.
* @returns A new Perplexity instance.
*/
export const perplexity = (
init?: ConstructorParameters<typeof Perplexity>[0],
) => new Perplexity(init);
{
"extends": "../../../tsconfig.json",
"compilerOptions": {
"target": "ESNext",
"module": "ESNext",
"moduleResolution": "bundler",
"outDir": "./lib",
"tsBuildInfoFile": "./lib/.tsbuildinfo"
},
"include": ["./src", "package.json"],
"references": [
{
"path": "../../core/tsconfig.json"
},
{
"path": "../../env/tsconfig.json"
}
]
}
......@@ -656,6 +656,9 @@ importers:
'@llamaindex/openai':
specifier: ^0.1.60
version: link:../packages/providers/openai
'@llamaindex/perplexity':
specifier: ^0.0.1
version: link:../packages/providers/perplexity
'@llamaindex/pinecone':
specifier: ^0.0.13
version: link:../packages/providers/storage/pinecone
......@@ -1315,6 +1318,22 @@ importers:
specifier: 6.4.0
version: 6.4.0(typescript@5.7.3)
 
packages/providers/perplexity:
dependencies:
'@llamaindex/core':
specifier: workspace:*
version: link:../../core
'@llamaindex/env':
specifier: workspace:*
version: link:../../env
'@llamaindex/openai':
specifier: workspace:*
version: link:../openai
devDependencies:
bunchee:
specifier: 6.4.0
version: 6.4.0(typescript@5.7.3)
packages/providers/portkey-ai:
dependencies:
'@llamaindex/core':
......
......@@ -187,6 +187,9 @@
},
{
"path": "./packages/providers/jinaai/tsconfig.json"
},
{
"path": "./packages/providers/perplexity/tsconfig.json"
}
]
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment