Skip to content
Snippets Groups Projects
Unverified Commit daf8522b authored by Thuc Pham's avatar Thuc Pham Committed by GitHub
Browse files

feat: use mock llm (#1492)

parent 223f3136
Branches
Tags
No related merge requests found
......@@ -6,6 +6,7 @@ const withMDX = createMDX();
const config = {
reactStrictMode: true,
transpilePackages: ["monaco-editor"],
serverExternalPackages: ["@huggingface/transformers"],
webpack: (config, { isServer }) => {
if (Array.isArray(config.target) && config.target.includes("web")) {
config.target = ["web", "es2020"];
......@@ -26,6 +27,7 @@ const config = {
}),
);
}
config.resolve.alias["replicate"] = false;
return config;
},
};
......
import { Message } from "ai";
import { simulateReadableStream } from "ai/test";
import { NextRequest, NextResponse } from "next/server";
import { llm } from "@/lib/utils";
import { LlamaIndexAdapter, type Message } from "ai";
import { Settings, SimpleChatEngine, type ChatMessage } from "llamaindex";
import { NextResponse, type NextRequest } from "next/server";
Settings.llm = llm;
export async function POST(request: NextRequest) {
try {
......@@ -12,19 +15,16 @@ export async function POST(request: NextRequest) {
{ status: 400 },
);
}
const mockResponse = `Hello! This is a mock response to: ${userMessage.content}`;
return new Response(
simulateReadableStream({
chunkDelayInMs: 20,
values: mockResponse.split(" ").map((t) => `0:"${t} "\n`),
}).pipeThrough(new TextEncoderStream()),
{
status: 200,
headers: {
"X-Vercel-AI-Data-Stream": "v1",
"Content-Type": "text/plain; charset=utf-8",
},
},
const chatEngine = new SimpleChatEngine();
return LlamaIndexAdapter.toDataStreamResponse(
await chatEngine.chat({
message: userMessage.content,
chatHistory: messages as ChatMessage[],
stream: true,
}),
{},
);
} catch (error) {
const detail = (error as Error).message;
......
import { llm } from "@/lib/utils";
import { Markdown } from "@llamaindex/chat-ui/widgets";
import { generateId, Message, parseStreamPart } from "ai";
import { generateId, Message } from "ai";
import { createAI, createStreamableUI, getMutableAIState } from "ai/rsc";
import { simulateReadableStream } from "ai/test";
import { type ChatMessage, Settings, SimpleChatEngine } from "llamaindex";
import { ReactNode } from "react";
type ServerState = Message[];
......@@ -10,6 +11,8 @@ type Actions = {
chat: (message: Message) => Promise<Message & { display: ReactNode }>;
};
Settings.llm = llm;
export const AI = createAI<ServerState, FrontendState, Actions>({
initialAIState: [],
initialUIState: [],
......@@ -20,31 +23,30 @@ export const AI = createAI<ServerState, FrontendState, Actions>({
const aiState = getMutableAIState<typeof AI>();
aiState.update((prev) => [...prev, message]);
const mockResponse = `Hello! This is a mock response to: ${message.content}`;
const responseStream = simulateReadableStream({
chunkDelayInMs: 20,
values: mockResponse.split(" ").map((t) => `0:"${t} "\n`),
});
const uiStream = createStreamableUI();
const chatEngine = new SimpleChatEngine();
const assistantMessage: Message = {
id: generateId(),
role: "assistant",
content: "",
};
responseStream.pipeTo(
new WritableStream({
write: async (message) => {
assistantMessage.content += parseStreamPart(message).value;
uiStream.update(<Markdown content={assistantMessage.content} />);
},
close: () => {
aiState.done([...aiState.get(), assistantMessage]);
uiStream.done();
},
}),
);
// run the async function without blocking
(async () => {
const chatResponse = await chatEngine.chat({
stream: true,
message: message.content,
chatHistory: aiState.get() as ChatMessage[],
});
for await (const chunk of chatResponse) {
assistantMessage.content += chunk.delta;
uiStream.update(<Markdown content={assistantMessage.content} />);
}
aiState.done([...aiState.get(), assistantMessage]);
uiStream.done();
})();
return {
...assistantMessage,
......
import { clsx, type ClassValue } from "clsx"
import { twMerge } from "tailwind-merge"
import { clsx, type ClassValue } from "clsx";
import { LLM, LLMMetadata } from "llamaindex";
import { twMerge } from "tailwind-merge";
export function cn(...inputs: ClassValue[]) {
return twMerge(clsx(inputs))
}
class MockLLM {
metadata: LLMMetadata = {
model: "MockLLM",
temperature: 0.5,
topP: 0.5,
contextWindow: 1024,
tokenizer: undefined,
};
chat() {
const mockResponse = "Hello! This is a mock response";
return Promise.resolve(
new ReadableStream({
async start(controller) {
for (const char of mockResponse) {
controller.enqueue({ delta: char });
await new Promise((resolve) => setTimeout(resolve, 20));
}
controller.close();
},
}),
);
}
}
export const llm = new MockLLM() as unknown as LLM;
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment