diff --git a/examples/chatHistory.ts b/examples/chatHistory.ts new file mode 100644 index 0000000000000000000000000000000000000000..7ed93471da26c9de0ec5ed4c9db6cec15dd30875 --- /dev/null +++ b/examples/chatHistory.ts @@ -0,0 +1,32 @@ +import { stdin as input, stdout as output } from "node:process"; +import readline from "node:readline/promises"; + +import { OpenAI, SimpleChatEngine, SummaryChatHistory } from "llamaindex"; + +async function main() { + // Set maxTokens to 75% of the context window size of 4096 + // This will trigger the summarizer once the chat history reaches 25% of the context window size (1024 tokens) + const llm = new OpenAI({ model: "gpt-3.5-turbo", maxTokens: 4096 * 0.75 }); + const chatHistory = new SummaryChatHistory({ llm }); + const chatEngine = new SimpleChatEngine({ llm }); + const rl = readline.createInterface({ input, output }); + + while (true) { + const query = await rl.question("Query: "); + const stream = await chatEngine.chat({ + message: query, + chatHistory, + stream: true, + }); + if (chatHistory.getLastSummary()) { + // Print the summary of the conversation so far that is produced by the SummaryChatHistory + console.log(`Summary: ${chatHistory.getLastSummary()?.content}`); + } + for await (const chunk of stream) { + process.stdout.write(chunk.response); + } + console.log(); + } +} + +main().catch(console.error); diff --git a/packages/core/package.json b/packages/core/package.json index cd99db4439019a3ea8c8fe273f2510d91e5b9782..66a9ec896492ea4b87feec78bdb593208b90e87a 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -58,11 +58,6 @@ "import": "./dist/storage/FileSystem.mjs", "require": "./dist/storage/FileSystem.js" }, - "./ChatEngine": { - "types": "./dist/ChatEngine.d.mts", - "import": "./dist/ChatEngine.mjs", - "require": "./dist/ChatEngine.js" - }, "./ChatHistory": { "types": "./dist/ChatHistory.d.mts", "import": "./dist/ChatHistory.mjs",