Skip to content
Snippets Groups Projects
Unverified Commit cce3b792 authored by Alex Yang's avatar Alex Yang Committed by GitHub
Browse files

revert: missing files (#421)

parent bff40f27
Branches
Tags
No related merge requests found
import { stdin as input, stdout as output } from "node:process";
import readline from "node:readline/promises";
import { OpenAI, SimpleChatEngine, SummaryChatHistory } from "llamaindex";
async function main() {
// Set maxTokens to 75% of the context window size of 4096
// This will trigger the summarizer once the chat history reaches 25% of the context window size (1024 tokens)
const llm = new OpenAI({ model: "gpt-3.5-turbo", maxTokens: 4096 * 0.75 });
const chatHistory = new SummaryChatHistory({ llm });
const chatEngine = new SimpleChatEngine({ llm });
const rl = readline.createInterface({ input, output });
while (true) {
const query = await rl.question("Query: ");
const stream = await chatEngine.chat({
message: query,
chatHistory,
stream: true,
});
if (chatHistory.getLastSummary()) {
// Print the summary of the conversation so far that is produced by the SummaryChatHistory
console.log(`Summary: ${chatHistory.getLastSummary()?.content}`);
}
for await (const chunk of stream) {
process.stdout.write(chunk.response);
}
console.log();
}
}
main().catch(console.error);
...@@ -58,11 +58,6 @@ ...@@ -58,11 +58,6 @@
"import": "./dist/storage/FileSystem.mjs", "import": "./dist/storage/FileSystem.mjs",
"require": "./dist/storage/FileSystem.js" "require": "./dist/storage/FileSystem.js"
}, },
"./ChatEngine": {
"types": "./dist/ChatEngine.d.mts",
"import": "./dist/ChatEngine.mjs",
"require": "./dist/ChatEngine.js"
},
"./ChatHistory": { "./ChatHistory": {
"types": "./dist/ChatHistory.d.mts", "types": "./dist/ChatHistory.d.mts",
"import": "./dist/ChatHistory.mjs", "import": "./dist/ChatHistory.mjs",
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment