diff --git a/.changeset/shaggy-houses-breathe.md b/.changeset/shaggy-houses-breathe.md new file mode 100644 index 0000000000000000000000000000000000000000..94ffcc6b29370bc87e596b761e0f754e508327d0 --- /dev/null +++ b/.changeset/shaggy-houses-breathe.md @@ -0,0 +1,6 @@ +--- +"@llamaindex/core": patch +"llamaindex": patch +--- + +refactor(core): move `ContextChatEngine` and `SimpleChatEngine` diff --git a/apps/next/src/actions/index.tsx b/apps/next/src/actions/index.tsx index 17f7d035ca978fe175dddb427fe23e837044080f..b215c44e4d96b327cd09920738dc5df7f7283d6c 100644 --- a/apps/next/src/actions/index.tsx +++ b/apps/next/src/actions/index.tsx @@ -2,9 +2,9 @@ import { ClientMDXContent } from "@/components/mdx"; import { BotMessage } from "@/components/message"; import { Skeleton } from "@/components/ui/skeleton"; import { LlamaCloudRetriever } from "@/deps/cloud"; +import { ContextChatEngine } from "@llamaindex/core/chat-engine"; import { Settings } from "@llamaindex/core/global"; import { ChatMessage } from "@llamaindex/core/llms"; -import { RetrieverQueryEngine } from "@llamaindex/core/query-engine"; import { OpenAI } from "@llamaindex/openai"; import { createAI, createStreamableUI, getMutableAIState } from "ai/rsc"; import { ReactNode } from "react"; @@ -50,7 +50,7 @@ export const AIProvider = createAI({ actions: { query: async (message: string): Promise<UIMessage> => { "use server"; - const queryEngine = new RetrieverQueryEngine(retriever); + const chatEngine = new ContextChatEngine({ retriever }); const id = Date.now(); const aiState = getMutableAIState<typeof AIProvider>(); @@ -73,10 +73,12 @@ export const AIProvider = createAI({ ); runAsyncFnWithoutBlocking(async () => { - const response = await queryEngine.query({ - query: message, + const response = await chatEngine.chat({ + message, + chatHistory: aiState.get().messages, stream: true, }); + let content = ""; for await (const { delta } of response) { diff --git a/packages/core/src/agent/base.ts b/packages/core/src/agent/base.ts index 64fd92bd4e832ad04651dda7123f339ee06421e2..19fab33fc683ff2852cf7c3cc8efda9373b4ea71 100644 --- a/packages/core/src/agent/base.ts +++ b/packages/core/src/agent/base.ts @@ -3,7 +3,7 @@ import { BaseChatEngine, type NonStreamingChatEngineParams, type StreamingChatEngineParams, -} from "../chat-engine"; +} from "../chat-engine/base"; import { wrapEventCaller } from "../decorator"; import { Settings } from "../global"; import type { diff --git a/packages/core/src/chat-engine/base.ts b/packages/core/src/chat-engine/base.ts new file mode 100644 index 0000000000000000000000000000000000000000..b4bd4cf3b1a4cba3ddccd7f33417222bcce15d7b --- /dev/null +++ b/packages/core/src/chat-engine/base.ts @@ -0,0 +1,36 @@ +import type { ChatMessage, MessageContent } from "../llms"; +import type { BaseMemory } from "../memory"; +import { EngineResponse } from "../schema"; + +export interface BaseChatEngineParams< + AdditionalMessageOptions extends object = object, +> { + message: MessageContent; + /** + * Optional chat history if you want to customize the chat history. + */ + chatHistory?: + | ChatMessage<AdditionalMessageOptions>[] + | BaseMemory<AdditionalMessageOptions>; +} + +export interface StreamingChatEngineParams< + AdditionalMessageOptions extends object = object, +> extends BaseChatEngineParams<AdditionalMessageOptions> { + stream: true; +} + +export interface NonStreamingChatEngineParams< + AdditionalMessageOptions extends object = object, +> extends BaseChatEngineParams<AdditionalMessageOptions> { + stream?: false; +} + +export abstract class BaseChatEngine { + abstract chat(params: NonStreamingChatEngineParams): Promise<EngineResponse>; + abstract chat( + params: StreamingChatEngineParams, + ): Promise<AsyncIterable<EngineResponse>>; + + abstract chatHistory: ChatMessage[] | Promise<ChatMessage[]>; +} diff --git a/packages/llamaindex/src/engines/chat/ContextChatEngine.ts b/packages/core/src/chat-engine/context-chat-engine.ts similarity index 84% rename from packages/llamaindex/src/engines/chat/ContextChatEngine.ts rename to packages/core/src/chat-engine/context-chat-engine.ts index e7b9a9ce26699e1552c694af63ad0d08b0b2a3ab..fb2a50f651e1165dfb5d98cd33b36a597780085e 100644 --- a/packages/llamaindex/src/engines/chat/ContextChatEngine.ts +++ b/packages/core/src/chat-engine/context-chat-engine.ts @@ -1,33 +1,24 @@ -import type { - BaseChatEngine, - NonStreamingChatEngineParams, - StreamingChatEngineParams, -} from "@llamaindex/core/chat-engine"; -import { wrapEventCaller } from "@llamaindex/core/decorator"; -import type { - ChatMessage, - LLM, - MessageContent, - MessageType, -} from "@llamaindex/core/llms"; -import { BaseMemory, ChatMemoryBuffer } from "@llamaindex/core/memory"; -import type { BaseNodePostprocessor } from "@llamaindex/core/postprocessor"; +import { wrapEventCaller } from "../decorator"; +import { Settings } from "../global"; +import type { ChatMessage, LLM, MessageContent, MessageType } from "../llms"; +import { BaseMemory, ChatMemoryBuffer } from "../memory"; +import type { BaseNodePostprocessor } from "../postprocessor"; import { type ContextSystemPrompt, type ModuleRecord, PromptMixin, type PromptsRecord, -} from "@llamaindex/core/prompts"; -import type { BaseRetriever } from "@llamaindex/core/retriever"; -import { EngineResponse, MetadataMode } from "@llamaindex/core/schema"; -import { - extractText, - streamConverter, - streamReducer, -} from "@llamaindex/core/utils"; -import { Settings } from "../../Settings.js"; -import { DefaultContextGenerator } from "./DefaultContextGenerator.js"; -import type { ContextGenerator } from "./types.js"; +} from "../prompts"; +import type { BaseRetriever } from "../retriever"; +import { EngineResponse, MetadataMode } from "../schema"; +import { extractText, streamConverter, streamReducer } from "../utils"; +import type { + BaseChatEngine, + NonStreamingChatEngineParams, + StreamingChatEngineParams, +} from "./base"; +import { DefaultContextGenerator } from "./default-context-generator"; +import type { ContextGenerator } from "./type"; /** * ContextChatEngine uses the Index to get the appropriate context for each query. diff --git a/packages/llamaindex/src/engines/chat/DefaultContextGenerator.ts b/packages/core/src/chat-engine/default-context-generator.ts similarity index 83% rename from packages/llamaindex/src/engines/chat/DefaultContextGenerator.ts rename to packages/core/src/chat-engine/default-context-generator.ts index 55cd3403c8a150b92d318eb06b25c28f11917e0f..9a0e596d5fab3e9b117f2056e580253500a18507 100644 --- a/packages/llamaindex/src/engines/chat/DefaultContextGenerator.ts +++ b/packages/core/src/chat-engine/default-context-generator.ts @@ -1,15 +1,15 @@ -import type { MessageContent, MessageType } from "@llamaindex/core/llms"; -import type { BaseNodePostprocessor } from "@llamaindex/core/postprocessor"; +import type { MessageContent, MessageType } from "../llms"; +import type { BaseNodePostprocessor } from "../postprocessor"; import { type ContextSystemPrompt, defaultContextSystemPrompt, type ModuleRecord, PromptMixin, -} from "@llamaindex/core/prompts"; -import { createMessageContent } from "@llamaindex/core/response-synthesizers"; -import type { BaseRetriever } from "@llamaindex/core/retriever"; -import { MetadataMode, type NodeWithScore } from "@llamaindex/core/schema"; -import type { Context, ContextGenerator } from "./types.js"; +} from "../prompts"; +import { createMessageContent } from "../response-synthesizers"; +import type { BaseRetriever } from "../retriever"; +import { MetadataMode, type NodeWithScore } from "../schema"; +import type { Context, ContextGenerator } from "./type.js"; export class DefaultContextGenerator extends PromptMixin diff --git a/packages/core/src/chat-engine/index.ts b/packages/core/src/chat-engine/index.ts index b4bd4cf3b1a4cba3ddccd7f33417222bcce15d7b..f5af4dd4d8e99ca35c3318ac0e89befa47309f40 100644 --- a/packages/core/src/chat-engine/index.ts +++ b/packages/core/src/chat-engine/index.ts @@ -1,36 +1,9 @@ -import type { ChatMessage, MessageContent } from "../llms"; -import type { BaseMemory } from "../memory"; -import { EngineResponse } from "../schema"; - -export interface BaseChatEngineParams< - AdditionalMessageOptions extends object = object, -> { - message: MessageContent; - /** - * Optional chat history if you want to customize the chat history. - */ - chatHistory?: - | ChatMessage<AdditionalMessageOptions>[] - | BaseMemory<AdditionalMessageOptions>; -} - -export interface StreamingChatEngineParams< - AdditionalMessageOptions extends object = object, -> extends BaseChatEngineParams<AdditionalMessageOptions> { - stream: true; -} - -export interface NonStreamingChatEngineParams< - AdditionalMessageOptions extends object = object, -> extends BaseChatEngineParams<AdditionalMessageOptions> { - stream?: false; -} - -export abstract class BaseChatEngine { - abstract chat(params: NonStreamingChatEngineParams): Promise<EngineResponse>; - abstract chat( - params: StreamingChatEngineParams, - ): Promise<AsyncIterable<EngineResponse>>; - - abstract chatHistory: ChatMessage[] | Promise<ChatMessage[]>; -} +export { + BaseChatEngine, + type BaseChatEngineParams, + type NonStreamingChatEngineParams, + type StreamingChatEngineParams, +} from "./base"; +export { ContextChatEngine } from "./context-chat-engine"; +export { DefaultContextGenerator } from "./default-context-generator"; +export { SimpleChatEngine } from "./simple-chat-engine"; diff --git a/packages/llamaindex/src/engines/chat/SimpleChatEngine.ts b/packages/core/src/chat-engine/simple-chat-engine.ts similarity index 83% rename from packages/llamaindex/src/engines/chat/SimpleChatEngine.ts rename to packages/core/src/chat-engine/simple-chat-engine.ts index 6aa2c8c42bb36a9956fe78daf0c09ec75a5c932f..568d2f584e2e4138e3824174976ef6114531da32 100644 --- a/packages/llamaindex/src/engines/chat/SimpleChatEngine.ts +++ b/packages/core/src/chat-engine/simple-chat-engine.ts @@ -1,15 +1,15 @@ +import type { LLM } from "../llms"; +import { BaseMemory, ChatMemoryBuffer } from "../memory"; +import { EngineResponse } from "../schema"; +import { streamConverter, streamReducer } from "../utils"; import type { BaseChatEngine, NonStreamingChatEngineParams, StreamingChatEngineParams, -} from "@llamaindex/core/chat-engine"; -import type { LLM } from "@llamaindex/core/llms"; -import { BaseMemory, ChatMemoryBuffer } from "@llamaindex/core/memory"; -import { EngineResponse } from "@llamaindex/core/schema"; -import { streamConverter, streamReducer } from "@llamaindex/core/utils"; +} from "./base"; -import { wrapEventCaller } from "@llamaindex/core/decorator"; -import { Settings } from "../../Settings.js"; +import { wrapEventCaller } from "../decorator"; +import { Settings } from "../global"; /** * SimpleChatEngine is the simplest possible chat engine. Useful for using your own custom prompts. diff --git a/packages/llamaindex/src/engines/chat/types.ts b/packages/core/src/chat-engine/type.ts similarity index 68% rename from packages/llamaindex/src/engines/chat/types.ts rename to packages/core/src/chat-engine/type.ts index 2c5a811d1cfb619fb9a54b58223858bf4f2d4e8e..693b724e7eae342610b3a0549d3d7fd295833253 100644 --- a/packages/llamaindex/src/engines/chat/types.ts +++ b/packages/core/src/chat-engine/type.ts @@ -1,10 +1,11 @@ -import type { ChatMessage } from "@llamaindex/core/llms"; -import type { NodeWithScore } from "@llamaindex/core/schema"; +import type { ChatMessage } from "../llms"; +import type { NodeWithScore } from "../schema"; export interface Context { message: ChatMessage; nodes: NodeWithScore[]; } + /** * A ContextGenerator is used to generate a context based on a message's text content */ diff --git a/packages/llamaindex/src/engines/chat/index.ts b/packages/llamaindex/src/engines/chat/index.ts index c66c20fbf7ddd46ac904a887dc186effc03eb790..1411cbac27590909438decbc8669445e43f1f408 100644 --- a/packages/llamaindex/src/engines/chat/index.ts +++ b/packages/llamaindex/src/engines/chat/index.ts @@ -1,4 +1,2 @@ +export * from "@llamaindex/core/chat-engine"; export { CondenseQuestionChatEngine } from "./CondenseQuestionChatEngine.js"; -export { ContextChatEngine } from "./ContextChatEngine.js"; -export { SimpleChatEngine } from "./SimpleChatEngine.js"; -export * from "./types.js";