Newer
Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
import { perplexity } from "@llamaindex/perplexity";
(async () => {
const perplexityLLM = perplexity({
apiKey: process.env.PERPLEXITY_API_KEY!,
model: "sonar",
});
// Chat API example
const response = await perplexityLLM.chat({
messages: [
{
role: "system",
content:
"You are a helpful AI assistant that provides accurate and concise answers",
},
{
role: "user",
content: "What is the capital of France?",
},
],
});
console.log("Chat response:", response.message.content);
// Streaming example
const stream = await perplexityLLM.chat({
messages: [
{
role: "system",
content: "You are a creative AI assistant that tells engaging stories",
},
{
role: "user",
content: "Tell me a short story",
},
],
stream: true,
});
console.log("\nStreaming response:");
for await (const chunk of stream) {
process.stdout.write(chunk.delta);
}
})();