diff --git a/apps/simple/llmStream.ts b/apps/simple/llmStream.ts index a4c3c80e3a52a3aaa7f047eaced5423d0e7d7d6b..e9ebce4441cb2849d534947061114999a5e3a980 100644 --- a/apps/simple/llmStream.ts +++ b/apps/simple/llmStream.ts @@ -1,5 +1,5 @@ -import {OpenAI, Anthropic, ChatMessage, SimpleChatEngine } from "llamaindex"; -// import {Anthropic} from '@anthropic-ai/sdk'; +import { ChatMessage, OpenAI, SimpleChatEngine } from "llamaindex"; +import {Anthropic} from "../../packages/core/src/llm/LLM"; import { stdin as input, stdout as output } from "node:process"; import readline from "node:readline/promises"; @@ -9,12 +9,9 @@ Where is Istanbul? `; // const llm = new OpenAI({ model: "gpt-3.5-turbo", temperature: 0.1 }); - const llm = new OpenAI(); + const llm = new Anthropic(); const message: ChatMessage = { content: query, role: "user" }; - // var accumulated_result: string = ""; - // var total_tokens: number = 0; - //TODO: Add callbacks later //Stream Complete @@ -22,7 +19,10 @@ Where is Istanbul? //either an AsyncGenerator or a Response. // Omitting the streaming flag automatically sets streaming to false - const chatEngine: SimpleChatEngine = new SimpleChatEngine({chatHistory: undefined, llm: llm}); + const chatEngine: SimpleChatEngine = new SimpleChatEngine({ + chatHistory: undefined, + llm: llm, + }); const rl = readline.createInterface({ input, output }); while (true) { diff --git a/packages/core/src/callbacks/CallbackManager.ts b/packages/core/src/callbacks/CallbackManager.ts index fb7eff8c9e765439f43e0b34fac324ddbb620324..266058261ecaa2024cf55ecc792ee5ccce606c33 100644 --- a/packages/core/src/callbacks/CallbackManager.ts +++ b/packages/core/src/callbacks/CallbackManager.ts @@ -39,13 +39,12 @@ export interface DefaultStreamToken { //OpenAI stream token schema is the default. //Note: Anthropic and Replicate also use similar token schemas. export type OpenAIStreamToken = DefaultStreamToken; -export type AnthropicStreamToken = -{ - completion: string, - stop_reason: string | undefined, - model: string, - stop: boolean | undefined, - log_id: string +export type AnthropicStreamToken = { + completion: string; + model: string; + stop_reason: string | undefined; + stop?: boolean | undefined; + log_id?: string; }; // diff --git a/packages/core/src/llm/LLM.ts b/packages/core/src/llm/LLM.ts index 6f6c8c4e5d37b34b9ef41bab8b4d90d5c1956817..dd17eb49d614182982923147962193e62a404d3a 100644 --- a/packages/core/src/llm/LLM.ts +++ b/packages/core/src/llm/LLM.ts @@ -663,10 +663,14 @@ export class Anthropic implements LLM { parentEvent?: Event | undefined, streaming?: T, ): Promise<R> { - if(streaming){ + if (streaming) { return this.streamComplete(prompt, parentEvent) as R; } - return this.chat([{ content: prompt, role: "user" }], parentEvent, streaming) as R; + return this.chat( + [{ content: prompt, role: "user" }], + parentEvent, + streaming, + ) as R; } protected streamComplete(