From 6cf6ae631cfde69eb9ab03a7afe21a64344a1f54 Mon Sep 17 00:00:00 2001 From: Alex Yang <himself65@outlook.com> Date: Thu, 18 Jul 2024 13:27:02 -0700 Subject: [PATCH] feat: abstract query type (#1052) --- .changeset/strange-pumas-whisper.md | 6 + examples/lowlevel.ts | 12 +- packages/core/package.json | 14 ++ packages/core/src/query-engine/base.ts | 29 ++++ packages/core/src/query-engine/index.ts | 1 + packages/core/src/schema/index.ts | 1 + .../type/engine\342\200\223response.ts" | 24 ++-- packages/core/src/utils/llms.ts | 11 +- packages/llamaindex/src/QuestionGenerator.ts | 9 +- packages/llamaindex/src/agent/base.ts | 2 +- .../chat/CondenseQuestionChatEngine.ts | 10 +- .../src/engines/chat/ContextChatEngine.ts | 2 +- .../src/engines/chat/SimpleChatEngine.ts | 2 +- packages/llamaindex/src/engines/chat/types.ts | 3 +- .../src/engines/query/RetrieverQueryEngine.ts | 15 ++- .../src/engines/query/RouterQueryEngine.ts | 24 ++-- .../engines/query/SubQuestionQueryEngine.ts | 51 +++---- .../llamaindex/src/engines/query/types.ts | 3 +- .../llamaindex/src/evaluation/Correctness.ts | 4 +- .../llamaindex/src/evaluation/Faithfulness.ts | 3 +- .../llamaindex/src/evaluation/Relevancy.ts | 3 +- packages/llamaindex/src/evaluation/types.ts | 9 +- packages/llamaindex/src/index.edge.ts | 1 - packages/llamaindex/src/internal/utils.ts | 8 -- packages/llamaindex/src/selectors/base.ts | 11 +- .../llamaindex/src/selectors/llmSelectors.ts | 12 +- .../MultiModalResponseSynthesizer.ts | 30 ++--- .../src/synthesizers/ResponseSynthesizer.ts | 46 ++++--- .../llamaindex/src/synthesizers/builders.ts | 126 ++++++++---------- packages/llamaindex/src/synthesizers/types.ts | 42 ++---- .../llamaindex/src/tools/QueryEngineTool.ts | 8 +- packages/llamaindex/src/types.ts | 15 +-- 32 files changed, 275 insertions(+), 262 deletions(-) create mode 100644 .changeset/strange-pumas-whisper.md create mode 100644 packages/core/src/query-engine/base.ts create mode 100644 packages/core/src/query-engine/index.ts rename packages/llamaindex/src/EngineResponse.ts => "packages/core/src/schema/type/engine\342\200\223response.ts" (80%) diff --git a/.changeset/strange-pumas-whisper.md b/.changeset/strange-pumas-whisper.md new file mode 100644 index 000000000..8bd1aeac1 --- /dev/null +++ b/.changeset/strange-pumas-whisper.md @@ -0,0 +1,6 @@ +--- +"@llamaindex/core": patch +"llamaindex": patch +--- + +feat: abstract query type diff --git a/examples/lowlevel.ts b/examples/lowlevel.ts index 85acee9b6..5db9b71dc 100644 --- a/examples/lowlevel.ts +++ b/examples/lowlevel.ts @@ -27,11 +27,13 @@ import { }, ]; - const stream = await responseSynthesizer.synthesize({ - query: "What age am I?", - nodesWithScore, - stream: true, - }); + const stream = await responseSynthesizer.synthesize( + { + query: "What age am I?", + nodesWithScore, + }, + true, + ); for await (const chunk of stream) { process.stdout.write(chunk.response); } diff --git a/packages/core/package.json b/packages/core/package.json index 01795d511..c26f3ada3 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -4,6 +4,20 @@ "version": "0.1.2", "description": "LlamaIndex Core Module", "exports": { + "./query-engine": { + "require": { + "types": "./dist/query-engine/index.d.cts", + "default": "./dist/query-engine/index.cjs" + }, + "import": { + "types": "./dist/query-engine/index.d.ts", + "default": "./dist/query-engine/index.js" + }, + "default": { + "types": "./dist/query-engine/index.d.ts", + "default": "./dist/query-engine/index.js" + } + }, "./llms": { "require": { "types": "./dist/llms/index.d.cts", diff --git a/packages/core/src/query-engine/base.ts b/packages/core/src/query-engine/base.ts new file mode 100644 index 000000000..3871b8cb9 --- /dev/null +++ b/packages/core/src/query-engine/base.ts @@ -0,0 +1,29 @@ +import type { MessageContent } from "../llms"; +import { EngineResponse, type NodeWithScore } from "../schema"; + +/** + * @link https://docs.llamaindex.ai/en/stable/api_reference/schema/?h=querybundle#llama_index.core.schema.QueryBundle + * + * We don't have `image_path` here, because it is included in the `query` field. + */ +export type QueryBundle = { + query: MessageContent; + customEmbeddings?: string[]; + embeddings?: number[]; +}; + +export type QueryType = string | QueryBundle; + +export interface BaseQueryEngine { + query( + strOrQueryBundle: QueryType, + stream: true, + ): Promise<AsyncIterable<EngineResponse>>; + query(strOrQueryBundle: QueryType, stream?: false): Promise<EngineResponse>; + + synthesize?( + strOrQueryBundle: QueryType, + nodes: NodeWithScore[], + additionalSources?: Iterator<NodeWithScore>, + ): Promise<EngineResponse>; +} diff --git a/packages/core/src/query-engine/index.ts b/packages/core/src/query-engine/index.ts new file mode 100644 index 000000000..5bcfd9a10 --- /dev/null +++ b/packages/core/src/query-engine/index.ts @@ -0,0 +1 @@ +export type { BaseQueryEngine, QueryBundle, QueryType } from "./base"; diff --git a/packages/core/src/schema/index.ts b/packages/core/src/schema/index.ts index a5cad71e1..b1f891978 100644 --- a/packages/core/src/schema/index.ts +++ b/packages/core/src/schema/index.ts @@ -1,3 +1,4 @@ export * from "./node"; export type { TransformComponent } from "./type"; +export { EngineResponse } from "./type/engine–response"; export * from "./zod"; diff --git a/packages/llamaindex/src/EngineResponse.ts "b/packages/core/src/schema/type/engine\342\200\223response.ts" similarity index 80% rename from packages/llamaindex/src/EngineResponse.ts rename to "packages/core/src/schema/type/engine\342\200\223response.ts" index 132a63940..c2de9e514 100644 --- a/packages/llamaindex/src/EngineResponse.ts +++ "b/packages/core/src/schema/type/engine\342\200\223response.ts" @@ -1,20 +1,16 @@ -import type { - ChatMessage, - ChatResponse, - ChatResponseChunk, -} from "@llamaindex/core/llms"; -import type { NodeWithScore } from "@llamaindex/core/schema"; -import { extractText } from "@llamaindex/core/utils"; +import type { ChatMessage, ChatResponse, ChatResponseChunk } from "../../llms"; +import { extractText } from "../../utils"; +import type { Metadata, NodeWithScore } from "../node"; export class EngineResponse implements ChatResponse, ChatResponseChunk { sourceNodes?: NodeWithScore[]; - metadata: Record<string, unknown> = {}; + metadata: Metadata = {}; message: ChatMessage; raw: object | null; - #stream: boolean; + readonly stream: boolean; private constructor( chatResponse: ChatResponse, @@ -24,7 +20,7 @@ export class EngineResponse implements ChatResponse, ChatResponseChunk { this.message = chatResponse.message; this.raw = chatResponse.raw; this.sourceNodes = sourceNodes; - this.#stream = stream; + this.stream = stream; } static fromResponse( @@ -70,13 +66,15 @@ export class EngineResponse implements ChatResponse, ChatResponseChunk { ); } - // @deprecated use 'message' instead + /** + * @deprecated Use `message` instead. + */ get response(): string { return extractText(this.message.content); } get delta(): string { - if (!this.#stream) { + if (!this.stream) { console.warn( "delta is only available for streaming responses. Consider using 'message' instead.", ); @@ -84,7 +82,7 @@ export class EngineResponse implements ChatResponse, ChatResponseChunk { return extractText(this.message.content); } - toString() { + toString(): string { return this.response ?? ""; } } diff --git a/packages/core/src/utils/llms.ts b/packages/core/src/utils/llms.ts index 8a54cb7f5..d46bce7bd 100644 --- a/packages/core/src/utils/llms.ts +++ b/packages/core/src/utils/llms.ts @@ -3,15 +3,22 @@ import type { MessageContentDetail, MessageContentTextDetail, } from "../llms"; +import type { QueryType } from "../query-engine"; import type { ImageType } from "../schema"; /** - * Extracts just the text from a multi-modal message or the message itself if it's just text. + * Extracts just the text whether from + * a multi-modal message + * a single text message + * or a query * * @param message The message to extract text from. * @returns The extracted text */ -export function extractText(message: MessageContent): string { +export function extractText(message: MessageContent | QueryType): string { + if (typeof message === "object" && "query" in message) { + return extractText(message.query); + } if (typeof message !== "string" && !Array.isArray(message)) { console.warn( "extractText called with non-MessageContent message, this is likely a bug.", diff --git a/packages/llamaindex/src/QuestionGenerator.ts b/packages/llamaindex/src/QuestionGenerator.ts index e3a190d4e..e943a3861 100644 --- a/packages/llamaindex/src/QuestionGenerator.ts +++ b/packages/llamaindex/src/QuestionGenerator.ts @@ -1,4 +1,6 @@ import type { LLM, ToolMetadata } from "@llamaindex/core/llms"; +import type { QueryType } from "@llamaindex/core/query-engine"; +import { extractText } from "@llamaindex/core/utils"; import { SubQuestionOutputParser } from "./OutputParser.js"; import type { SubQuestionPrompt } from "./Prompt.js"; import { buildToolsText, defaultSubQuestionPrompt } from "./Prompt.js"; @@ -43,9 +45,12 @@ export class LLMQuestionGenerator } } - async generate(tools: ToolMetadata[], query: string): Promise<SubQuestion[]> { + async generate( + tools: ToolMetadata[], + query: QueryType, + ): Promise<SubQuestion[]> { const toolsStr = buildToolsText(tools); - const queryStr = query; + const queryStr = extractText(query); const prediction = ( await this.llm.complete({ prompt: this.prompt({ diff --git a/packages/llamaindex/src/agent/base.ts b/packages/llamaindex/src/agent/base.ts index 7d55b3aac..27e8af5c1 100644 --- a/packages/llamaindex/src/agent/base.ts +++ b/packages/llamaindex/src/agent/base.ts @@ -5,10 +5,10 @@ import type { MessageContent, ToolOutput, } from "@llamaindex/core/llms"; +import { EngineResponse } from "@llamaindex/core/schema"; import { wrapEventCaller } from "@llamaindex/core/utils"; import { ReadableStream, TransformStream, randomUUID } from "@llamaindex/env"; import { ChatHistory } from "../ChatHistory.js"; -import { EngineResponse } from "../EngineResponse.js"; import { Settings } from "../Settings.js"; import { type ChatEngine, diff --git a/packages/llamaindex/src/engines/chat/CondenseQuestionChatEngine.ts b/packages/llamaindex/src/engines/chat/CondenseQuestionChatEngine.ts index 099d90377..009a50a06 100644 --- a/packages/llamaindex/src/engines/chat/CondenseQuestionChatEngine.ts +++ b/packages/llamaindex/src/engines/chat/CondenseQuestionChatEngine.ts @@ -1,4 +1,5 @@ import type { ChatMessage, LLM } from "@llamaindex/core/llms"; +import type { EngineResponse } from "@llamaindex/core/schema"; import { extractText, streamReducer, @@ -6,7 +7,6 @@ import { } from "@llamaindex/core/utils"; import type { ChatHistory } from "../../ChatHistory.js"; import { getHistory } from "../../ChatHistory.js"; -import type { EngineResponse } from "../../EngineResponse.js"; import type { CondenseQuestionPrompt } from "../../Prompt.js"; import { defaultCondenseQuestionPrompt, @@ -109,7 +109,8 @@ export class CondenseQuestionChatEngine return streamReducer({ stream, initialValue: "", - reducer: (accumulator, part) => (accumulator += part.response), + reducer: (accumulator, part) => + (accumulator += extractText(part.message.content)), finished: (accumulator) => { chatHistory.addMessage({ content: accumulator, role: "assistant" }); }, @@ -118,7 +119,10 @@ export class CondenseQuestionChatEngine const response = await this.queryEngine.query({ query: condensedQuestion, }); - chatHistory.addMessage({ content: response.response, role: "assistant" }); + chatHistory.addMessage({ + content: response.message.content, + role: "assistant", + }); return response; } diff --git a/packages/llamaindex/src/engines/chat/ContextChatEngine.ts b/packages/llamaindex/src/engines/chat/ContextChatEngine.ts index 3c78d7d45..5230981ea 100644 --- a/packages/llamaindex/src/engines/chat/ContextChatEngine.ts +++ b/packages/llamaindex/src/engines/chat/ContextChatEngine.ts @@ -4,6 +4,7 @@ import type { MessageContent, MessageType, } from "@llamaindex/core/llms"; +import { EngineResponse } from "@llamaindex/core/schema"; import { extractText, streamConverter, @@ -12,7 +13,6 @@ import { } from "@llamaindex/core/utils"; import type { ChatHistory } from "../../ChatHistory.js"; import { getHistory } from "../../ChatHistory.js"; -import { EngineResponse } from "../../EngineResponse.js"; import type { ContextSystemPrompt } from "../../Prompt.js"; import type { BaseRetriever } from "../../Retriever.js"; import { Settings } from "../../Settings.js"; diff --git a/packages/llamaindex/src/engines/chat/SimpleChatEngine.ts b/packages/llamaindex/src/engines/chat/SimpleChatEngine.ts index 8e8cb38ba..bef71cc66 100644 --- a/packages/llamaindex/src/engines/chat/SimpleChatEngine.ts +++ b/packages/llamaindex/src/engines/chat/SimpleChatEngine.ts @@ -1,4 +1,5 @@ import type { LLM } from "@llamaindex/core/llms"; +import { EngineResponse } from "@llamaindex/core/schema"; import { streamConverter, streamReducer, @@ -6,7 +7,6 @@ import { } from "@llamaindex/core/utils"; import type { ChatHistory } from "../../ChatHistory.js"; import { getHistory } from "../../ChatHistory.js"; -import { EngineResponse } from "../../EngineResponse.js"; import { Settings } from "../../Settings.js"; import type { ChatEngine, diff --git a/packages/llamaindex/src/engines/chat/types.ts b/packages/llamaindex/src/engines/chat/types.ts index 26286084f..4f1f9802b 100644 --- a/packages/llamaindex/src/engines/chat/types.ts +++ b/packages/llamaindex/src/engines/chat/types.ts @@ -1,7 +1,6 @@ import type { ChatMessage, MessageContent } from "@llamaindex/core/llms"; -import type { NodeWithScore } from "@llamaindex/core/schema"; +import { EngineResponse, type NodeWithScore } from "@llamaindex/core/schema"; import type { ChatHistory } from "../../ChatHistory.js"; -import type { EngineResponse } from "../../EngineResponse.js"; /** * Represents the base parameters for ChatEngine. diff --git a/packages/llamaindex/src/engines/query/RetrieverQueryEngine.ts b/packages/llamaindex/src/engines/query/RetrieverQueryEngine.ts index 1ab37aef0..659c077c9 100644 --- a/packages/llamaindex/src/engines/query/RetrieverQueryEngine.ts +++ b/packages/llamaindex/src/engines/query/RetrieverQueryEngine.ts @@ -1,6 +1,5 @@ -import type { NodeWithScore } from "@llamaindex/core/schema"; +import { EngineResponse, type NodeWithScore } from "@llamaindex/core/schema"; import { wrapEventCaller } from "@llamaindex/core/utils"; -import type { EngineResponse } from "../../EngineResponse.js"; import type { BaseNodePostprocessor } from "../../postprocessors/index.js"; import { PromptMixin } from "../../prompts/Mixin.js"; import type { BaseRetriever } from "../../Retriever.js"; @@ -78,11 +77,13 @@ export class RetrieverQueryEngine extends PromptMixin implements QueryEngine { const { query, stream } = params; const nodesWithScore = await this.retrieve(query); if (stream) { - return this.responseSynthesizer.synthesize({ - query, - nodesWithScore, - stream: true, - }); + return this.responseSynthesizer.synthesize( + { + query, + nodesWithScore, + }, + true, + ); } return this.responseSynthesizer.synthesize({ query, diff --git a/packages/llamaindex/src/engines/query/RouterQueryEngine.ts b/packages/llamaindex/src/engines/query/RouterQueryEngine.ts index 408d70c4d..6378734f6 100644 --- a/packages/llamaindex/src/engines/query/RouterQueryEngine.ts +++ b/packages/llamaindex/src/engines/query/RouterQueryEngine.ts @@ -1,15 +1,13 @@ -import type { NodeWithScore } from "@llamaindex/core/schema"; +import type { QueryType } from "@llamaindex/core/query-engine"; +import { EngineResponse, type NodeWithScore } from "@llamaindex/core/schema"; import { extractText } from "@llamaindex/core/utils"; -import { EngineResponse } from "../../EngineResponse.js"; import type { ServiceContext } from "../../ServiceContext.js"; import { llmFromSettingsOrContext } from "../../Settings.js"; -import { toQueryBundle } from "../../internal/utils.js"; import { PromptMixin } from "../../prompts/index.js"; import type { BaseSelector } from "../../selectors/index.js"; import { LLMSingleSelector } from "../../selectors/index.js"; import { TreeSummarize } from "../../synthesizers/index.js"; import type { - QueryBundle, QueryEngine, QueryEngineParamsNonStreaming, QueryEngineParamsStreaming, @@ -27,7 +25,7 @@ type RouterQueryEngineMetadata = { async function combineResponses( summarizer: TreeSummarize, responses: EngineResponse[], - queryBundle: QueryBundle, + queryType: QueryType, verbose: boolean = false, ): Promise<EngineResponse> { if (verbose) { @@ -42,11 +40,11 @@ async function combineResponses( sourceNodes.push(...response.sourceNodes); } - responseStrs.push(response.response); + responseStrs.push(extractText(response.message.content)); } const summary = await summarizer.getResponse({ - query: extractText(queryBundle.query), + query: extractText(queryType), textChunks: responseStrs, }); @@ -119,7 +117,7 @@ export class RouterQueryEngine extends PromptMixin implements QueryEngine { ): Promise<EngineResponse | AsyncIterable<EngineResponse>> { const { query, stream } = params; - const response = await this.queryRoute(toQueryBundle(query)); + const response = await this.queryRoute(query); if (stream) { throw new Error("Streaming is not supported yet."); @@ -128,8 +126,8 @@ export class RouterQueryEngine extends PromptMixin implements QueryEngine { return response; } - private async queryRoute(queryBundle: QueryBundle): Promise<EngineResponse> { - const result = await this.selector.select(this.metadatas, queryBundle); + private async queryRoute(query: QueryType): Promise<EngineResponse> { + const result = await this.selector.select(this.metadatas, query); if (result.selections.length > 1) { const responses: EngineResponse[] = []; @@ -144,7 +142,7 @@ export class RouterQueryEngine extends PromptMixin implements QueryEngine { const selectedQueryEngine = this.queryEngines[engineInd.index]; responses.push( await selectedQueryEngine.query({ - query: extractText(queryBundle.query), + query: extractText(query), }), ); } @@ -153,7 +151,7 @@ export class RouterQueryEngine extends PromptMixin implements QueryEngine { const finalResponse = await combineResponses( this.summarizer, responses, - queryBundle, + query, this.verbose, ); @@ -181,7 +179,7 @@ export class RouterQueryEngine extends PromptMixin implements QueryEngine { } const finalResponse = await selectedQueryEngine.query({ - query: extractText(queryBundle.query), + query: extractText(query), }); // add selected result diff --git a/packages/llamaindex/src/engines/query/SubQuestionQueryEngine.ts b/packages/llamaindex/src/engines/query/SubQuestionQueryEngine.ts index dac3a079a..82685eaaa 100644 --- a/packages/llamaindex/src/engines/query/SubQuestionQueryEngine.ts +++ b/packages/llamaindex/src/engines/query/SubQuestionQueryEngine.ts @@ -1,6 +1,8 @@ -import type { NodeWithScore } from "@llamaindex/core/schema"; -import { TextNode } from "@llamaindex/core/schema"; -import type { EngineResponse } from "../../EngineResponse.js"; +import { + EngineResponse, + TextNode, + type NodeWithScore, +} from "@llamaindex/core/schema"; import { LLMQuestionGenerator } from "../../QuestionGenerator.js"; import type { ServiceContext } from "../../ServiceContext.js"; import { PromptMixin } from "../../prompts/Mixin.js"; @@ -10,20 +12,18 @@ import { ResponseSynthesizer, } from "../../synthesizers/index.js"; -import type { - QueryEngine, - QueryEngineParamsNonStreaming, - QueryEngineParamsStreaming, -} from "../../types.js"; - import type { BaseTool, ToolMetadata } from "@llamaindex/core/llms"; +import type { BaseQueryEngine, QueryType } from "@llamaindex/core/query-engine"; import { wrapEventCaller } from "@llamaindex/core/utils"; import type { BaseQuestionGenerator, SubQuestion } from "./types.js"; /** * SubQuestionQueryEngine decomposes a question into subquestions and then */ -export class SubQuestionQueryEngine extends PromptMixin implements QueryEngine { +export class SubQuestionQueryEngine + extends PromptMixin + implements BaseQueryEngine +{ responseSynthesizer: BaseSynthesizer; questionGen: BaseQuestionGenerator; queryEngines: BaseTool[]; @@ -73,15 +73,13 @@ export class SubQuestionQueryEngine extends PromptMixin implements QueryEngine { }); } - query( - params: QueryEngineParamsStreaming, - ): Promise<AsyncIterable<EngineResponse>>; - query(params: QueryEngineParamsNonStreaming): Promise<EngineResponse>; + query(query: QueryType, stream: true): Promise<AsyncIterable<EngineResponse>>; + query(query: QueryType, stream?: false): Promise<EngineResponse>; @wrapEventCaller async query( - params: QueryEngineParamsStreaming | QueryEngineParamsNonStreaming, + query: QueryType, + stream?: boolean, ): Promise<EngineResponse | AsyncIterable<EngineResponse>> { - const { query, stream } = params; const subQuestions = await this.questionGen.generate(this.metadatas, query); const subQNodes = await Promise.all( @@ -92,16 +90,21 @@ export class SubQuestionQueryEngine extends PromptMixin implements QueryEngine { .filter((node) => node !== null) .map((node) => node as NodeWithScore); if (stream) { - return this.responseSynthesizer.synthesize({ + return this.responseSynthesizer.synthesize( + { + query, + nodesWithScore, + }, + true, + ); + } + return this.responseSynthesizer.synthesize( + { query, nodesWithScore, - stream: true, - }); - } - return this.responseSynthesizer.synthesize({ - query, - nodesWithScore, - }); + }, + false, + ); } private async querySubQ(subQ: SubQuestion): Promise<NodeWithScore | null> { diff --git a/packages/llamaindex/src/engines/query/types.ts b/packages/llamaindex/src/engines/query/types.ts index e0e27b90c..72fcbcd53 100644 --- a/packages/llamaindex/src/engines/query/types.ts +++ b/packages/llamaindex/src/engines/query/types.ts @@ -1,10 +1,11 @@ import type { ToolMetadata } from "@llamaindex/core/llms"; +import type { QueryType } from "@llamaindex/core/query-engine"; /** * QuestionGenerators generate new questions for the LLM using tools and a user query. */ export interface BaseQuestionGenerator { - generate(tools: ToolMetadata[], query: string): Promise<SubQuestion[]>; + generate(tools: ToolMetadata[], query: QueryType): Promise<SubQuestion[]>; } export interface SubQuestion { diff --git a/packages/llamaindex/src/evaluation/Correctness.ts b/packages/llamaindex/src/evaluation/Correctness.ts index ba814bf80..3dbf34a6e 100644 --- a/packages/llamaindex/src/evaluation/Correctness.ts +++ b/packages/llamaindex/src/evaluation/Correctness.ts @@ -74,7 +74,7 @@ export class CorrectnessEvaluator extends PromptMixin implements BaseEvaluator { { role: "user", content: defaultUserPrompt({ - query, + query: extractText(query), generatedAnswer: response, referenceAnswer: reference || "(NO REFERENCE ANSWER SUPPLIED)", }), @@ -106,7 +106,7 @@ export class CorrectnessEvaluator extends PromptMixin implements BaseEvaluator { query, response, }: EvaluatorResponseParams): Promise<EvaluationResult> { - const responseStr = response?.response; + const responseStr = extractText(response?.message.content); const contexts = []; if (response) { diff --git a/packages/llamaindex/src/evaluation/Faithfulness.ts b/packages/llamaindex/src/evaluation/Faithfulness.ts index bf7241486..b1a84a551 100644 --- a/packages/llamaindex/src/evaluation/Faithfulness.ts +++ b/packages/llamaindex/src/evaluation/Faithfulness.ts @@ -1,4 +1,5 @@ import { Document, MetadataMode } from "@llamaindex/core/schema"; +import { extractText } from "@llamaindex/core/utils"; import type { ServiceContext } from "../ServiceContext.js"; import { SummaryIndex } from "../indices/summary/index.js"; import { PromptMixin } from "../prompts/Mixin.js"; @@ -132,7 +133,7 @@ export class FaithfulnessEvaluator query, response, }: EvaluatorResponseParams): Promise<EvaluationResult> { - const responseStr = response?.response; + const responseStr = extractText(response?.message.content); const contexts = []; if (response) { diff --git a/packages/llamaindex/src/evaluation/Relevancy.ts b/packages/llamaindex/src/evaluation/Relevancy.ts index 58a1b15a3..f8b161306 100644 --- a/packages/llamaindex/src/evaluation/Relevancy.ts +++ b/packages/llamaindex/src/evaluation/Relevancy.ts @@ -1,4 +1,5 @@ import { Document, MetadataMode } from "@llamaindex/core/schema"; +import { extractText } from "@llamaindex/core/utils"; import type { ServiceContext } from "../ServiceContext.js"; import { SummaryIndex } from "../indices/summary/index.js"; import { PromptMixin } from "../prompts/Mixin.js"; @@ -121,7 +122,7 @@ export class RelevancyEvaluator extends PromptMixin implements BaseEvaluator { query, response, }: EvaluatorResponseParams): Promise<EvaluationResult> { - const responseStr = response?.response; + const responseStr = extractText(response?.message.content); const contexts = []; if (response) { diff --git a/packages/llamaindex/src/evaluation/types.ts b/packages/llamaindex/src/evaluation/types.ts index a38146b0f..66b7dc2f1 100644 --- a/packages/llamaindex/src/evaluation/types.ts +++ b/packages/llamaindex/src/evaluation/types.ts @@ -1,7 +1,8 @@ -import { EngineResponse } from "../EngineResponse.js"; +import type { QueryType } from "@llamaindex/core/query-engine"; +import type { EngineResponse } from "@llamaindex/core/schema"; export type EvaluationResult = { - query?: string; + query?: QueryType; contexts?: string[]; response: string | null; score: number; @@ -13,7 +14,7 @@ export type EvaluationResult = { }; export type EvaluatorParams = { - query: string | null; + query: QueryType; response: string; contexts?: string[]; reference?: string; @@ -21,7 +22,7 @@ export type EvaluatorParams = { }; export type EvaluatorResponseParams = { - query: string | null; + query: QueryType; response: EngineResponse; }; export interface BaseEvaluator { diff --git a/packages/llamaindex/src/index.edge.ts b/packages/llamaindex/src/index.edge.ts index 48cfcd00f..c76d3ea08 100644 --- a/packages/llamaindex/src/index.edge.ts +++ b/packages/llamaindex/src/index.edge.ts @@ -29,7 +29,6 @@ export * from "./ChatHistory.js"; export * from "./cloud/index.js"; export * from "./constants.js"; export * from "./embeddings/index.js"; -export * from "./EngineResponse.js"; export * from "./engines/chat/index.js"; export * from "./engines/query/index.js"; export * from "./evaluation/index.js"; diff --git a/packages/llamaindex/src/internal/utils.ts b/packages/llamaindex/src/internal/utils.ts index c3395bea4..a301c2707 100644 --- a/packages/llamaindex/src/internal/utils.ts +++ b/packages/llamaindex/src/internal/utils.ts @@ -3,7 +3,6 @@ import type { JSONValue } from "@llamaindex/core/global"; import type { ImageType } from "@llamaindex/core/schema"; import { fs } from "@llamaindex/env"; import { filetypemime } from "magic-bytes.js"; -import type { QueryBundle } from "../types.js"; export const isAsyncIterable = ( obj: unknown, @@ -203,10 +202,3 @@ export async function imageToDataUrl(input: ImageType): Promise<string> { } return await blobToDataUrl(input); } - -export function toQueryBundle(query: QueryBundle | string): QueryBundle { - if (typeof query === "string") { - return { query }; - } - return query; -} diff --git a/packages/llamaindex/src/selectors/base.ts b/packages/llamaindex/src/selectors/base.ts index 22a5c66da..cb0d18873 100644 --- a/packages/llamaindex/src/selectors/base.ts +++ b/packages/llamaindex/src/selectors/base.ts @@ -1,6 +1,6 @@ -import { toQueryBundle } from "../internal/utils.js"; +import type { QueryType } from "@llamaindex/core/query-engine"; import { PromptMixin } from "../prompts/Mixin.js"; -import type { QueryBundle, ToolMetadataOnlyDescription } from "../types.js"; +import type { ToolMetadataOnlyDescription } from "../types.js"; export interface SingleSelection { index: number; @@ -24,14 +24,13 @@ function wrapChoice( type MetadataType = string | ToolMetadataOnlyDescription; export abstract class BaseSelector extends PromptMixin { - async select(choices: MetadataType[], query: string | QueryBundle) { + async select(choices: MetadataType[], query: QueryType) { const metadata = choices.map((choice) => wrapChoice(choice)); - const queryBundle = toQueryBundle(query); - return await this._select(metadata, queryBundle); + return await this._select(metadata, query); } abstract _select( choices: ToolMetadataOnlyDescription[], - query: QueryBundle, + query: QueryType, ): Promise<SelectorResult>; } diff --git a/packages/llamaindex/src/selectors/llmSelectors.ts b/packages/llamaindex/src/selectors/llmSelectors.ts index e73966bfa..7ca44b869 100644 --- a/packages/llamaindex/src/selectors/llmSelectors.ts +++ b/packages/llamaindex/src/selectors/llmSelectors.ts @@ -1,10 +1,10 @@ import type { LLM } from "@llamaindex/core/llms"; +import type { QueryBundle } from "@llamaindex/core/query-engine"; import { extractText } from "@llamaindex/core/utils"; import type { Answer } from "../outputParsers/selectors.js"; import { SelectionOutputParser } from "../outputParsers/selectors.js"; import type { BaseOutputParser, - QueryBundle, StructuredOutput, ToolMetadataOnlyDescription, } from "../types.js"; @@ -40,19 +40,17 @@ function structuredOutputToSelectorResult( return { selections }; } -type LLMPredictorType = LLM; - /** * A selector that uses the LLM to select a single or multiple choices from a list of choices. */ export class LLMMultiSelector extends BaseSelector { - llm: LLMPredictorType; + llm: LLM; prompt: MultiSelectPrompt; maxOutputs: number; outputParser: BaseOutputParser<StructuredOutput<Answer[]>>; constructor(init: { - llm: LLMPredictorType; + llm: LLM; prompt?: MultiSelectPrompt; maxOutputs?: number; outputParser?: BaseOutputParser<StructuredOutput<Answer[]>>; @@ -117,12 +115,12 @@ export class LLMMultiSelector extends BaseSelector { * A selector that uses the LLM to select a single choice from a list of choices. */ export class LLMSingleSelector extends BaseSelector { - llm: LLMPredictorType; + llm: LLM; prompt: SingleSelectPrompt; outputParser: BaseOutputParser<StructuredOutput<Answer[]>>; constructor(init: { - llm: LLMPredictorType; + llm: LLM; prompt?: SingleSelectPrompt; outputParser?: BaseOutputParser<StructuredOutput<Answer[]>>; }) { diff --git a/packages/llamaindex/src/synthesizers/MultiModalResponseSynthesizer.ts b/packages/llamaindex/src/synthesizers/MultiModalResponseSynthesizer.ts index 2c998a410..d7cfd5a53 100644 --- a/packages/llamaindex/src/synthesizers/MultiModalResponseSynthesizer.ts +++ b/packages/llamaindex/src/synthesizers/MultiModalResponseSynthesizer.ts @@ -1,16 +1,11 @@ -import { MetadataMode } from "@llamaindex/core/schema"; +import { EngineResponse, MetadataMode } from "@llamaindex/core/schema"; import { streamConverter } from "@llamaindex/core/utils"; -import { EngineResponse } from "../EngineResponse.js"; import type { ServiceContext } from "../ServiceContext.js"; import { llmFromSettingsOrContext } from "../Settings.js"; import { PromptMixin } from "../prompts/Mixin.js"; import type { TextQaPrompt } from "./../Prompt.js"; import { defaultTextQaPrompt } from "./../Prompt.js"; -import type { - BaseSynthesizer, - SynthesizeParamsNonStreaming, - SynthesizeParamsStreaming, -} from "./types.js"; +import type { BaseSynthesizer, SynthesizeQuery } from "./types.js"; import { createMessageContent } from "./utils.js"; export class MultiModalResponseSynthesizer @@ -48,21 +43,22 @@ export class MultiModalResponseSynthesizer } synthesize( - params: SynthesizeParamsStreaming, + query: SynthesizeQuery, + stream: true, ): Promise<AsyncIterable<EngineResponse>>; - synthesize(params: SynthesizeParamsNonStreaming): Promise<EngineResponse>; - async synthesize({ - query, - nodesWithScore, - stream, - }: SynthesizeParamsStreaming | SynthesizeParamsNonStreaming): Promise< - AsyncIterable<EngineResponse> | EngineResponse - > { + synthesize(query: SynthesizeQuery, stream?: false): Promise<EngineResponse>; + async synthesize( + query: SynthesizeQuery, + stream?: boolean, + ): Promise<AsyncIterable<EngineResponse> | EngineResponse> { + const { nodesWithScore } = query; const nodes = nodesWithScore.map(({ node }) => node); const prompt = await createMessageContent( this.textQATemplate, nodes, - { query }, + // fixme: wtf type is this? + // { query }, + {}, this.metadataMode, ); diff --git a/packages/llamaindex/src/synthesizers/ResponseSynthesizer.ts b/packages/llamaindex/src/synthesizers/ResponseSynthesizer.ts index 9998efe9d..7ddbedb27 100644 --- a/packages/llamaindex/src/synthesizers/ResponseSynthesizer.ts +++ b/packages/llamaindex/src/synthesizers/ResponseSynthesizer.ts @@ -1,6 +1,5 @@ -import { MetadataMode } from "@llamaindex/core/schema"; +import { EngineResponse, MetadataMode } from "@llamaindex/core/schema"; import { streamConverter } from "@llamaindex/core/utils"; -import { EngineResponse } from "../EngineResponse.js"; import type { ServiceContext } from "../ServiceContext.js"; import { PromptMixin } from "../prompts/Mixin.js"; import type { ResponseBuilderPrompts } from "./builders.js"; @@ -8,8 +7,7 @@ import { getResponseBuilder } from "./builders.js"; import type { BaseSynthesizer, ResponseBuilder, - SynthesizeParamsNonStreaming, - SynthesizeParamsStreaming, + SynthesizeQuery, } from "./types.js"; /** @@ -56,33 +54,37 @@ export class ResponseSynthesizer } synthesize( - params: SynthesizeParamsStreaming, + query: SynthesizeQuery, + stream: true, ): Promise<AsyncIterable<EngineResponse>>; - synthesize(params: SynthesizeParamsNonStreaming): Promise<EngineResponse>; - async synthesize({ - query, - nodesWithScore, - stream, - }: SynthesizeParamsStreaming | SynthesizeParamsNonStreaming): Promise< - AsyncIterable<EngineResponse> | EngineResponse - > { + synthesize(query: SynthesizeQuery, stream?: false): Promise<EngineResponse>; + async synthesize( + query: SynthesizeQuery, + stream?: boolean, + ): Promise<AsyncIterable<EngineResponse> | EngineResponse> { + const { nodesWithScore } = query; const textChunks: string[] = nodesWithScore.map(({ node }) => node.getContent(this.metadataMode), ); if (stream) { - const response = await this.responseBuilder.getResponse({ - query, - textChunks, - stream, - }); + const response = await this.responseBuilder.getResponse( + { + ...query, + textChunks, + }, + true, + ); return streamConverter(response, (chunk) => EngineResponse.fromResponse(chunk, true, nodesWithScore), ); } - const response = await this.responseBuilder.getResponse({ - query, - textChunks, - }); + const response = await this.responseBuilder.getResponse( + { + ...query, + textChunks, + }, + false, + ); return EngineResponse.fromResponse(response, false, nodesWithScore); } } diff --git a/packages/llamaindex/src/synthesizers/builders.ts b/packages/llamaindex/src/synthesizers/builders.ts index f5ab12e81..46831b445 100644 --- a/packages/llamaindex/src/synthesizers/builders.ts +++ b/packages/llamaindex/src/synthesizers/builders.ts @@ -1,6 +1,6 @@ import type { LLM } from "@llamaindex/core/llms"; +import type { QueryType } from "@llamaindex/core/query-engine"; import { extractText, streamConverter } from "@llamaindex/core/utils"; -import { toQueryBundle } from "../internal/utils.js"; import type { RefinePrompt, SimplePrompt, @@ -20,11 +20,7 @@ import { llmFromSettingsOrContext, promptHelperFromSettingsOrContext, } from "../Settings.js"; -import type { - ResponseBuilder, - ResponseBuilderParamsNonStreaming, - ResponseBuilderParamsStreaming, -} from "./types.js"; +import type { ResponseBuilder, ResponseBuilderQuery } from "./types.js"; /** * Response modes of the response synthesizer @@ -49,20 +45,16 @@ export class SimpleResponseBuilder implements ResponseBuilder { } getResponse( - params: ResponseBuilderParamsStreaming, + query: ResponseBuilderQuery, + stream: true, ): Promise<AsyncIterable<string>>; - getResponse(params: ResponseBuilderParamsNonStreaming): Promise<string>; - async getResponse({ - query, - textChunks, - stream, - }: - | ResponseBuilderParamsStreaming - | ResponseBuilderParamsNonStreaming): Promise< - AsyncIterable<string> | string - > { + getResponse(query: ResponseBuilderQuery, stream?: false): Promise<string>; + async getResponse( + { query, textChunks }: ResponseBuilderQuery, + stream?: boolean, + ): Promise<AsyncIterable<string> | string> { const input = { - query: extractText(toQueryBundle(query).query), + query: extractText(query), context: textChunks.join("\n\n"), }; @@ -123,19 +115,14 @@ export class Refine extends PromptMixin implements ResponseBuilder { } getResponse( - params: ResponseBuilderParamsStreaming, + query: ResponseBuilderQuery, + stream: true, ): Promise<AsyncIterable<string>>; - getResponse(params: ResponseBuilderParamsNonStreaming): Promise<string>; - async getResponse({ - query, - textChunks, - prevResponse, - stream, - }: - | ResponseBuilderParamsStreaming - | ResponseBuilderParamsNonStreaming): Promise< - AsyncIterable<string> | string - > { + getResponse(query: ResponseBuilderQuery, stream?: false): Promise<string>; + async getResponse( + { query, textChunks, prevResponse }: ResponseBuilderQuery, + stream?: boolean, + ): Promise<AsyncIterable<string> | string> { let response: AsyncIterable<string> | string | undefined = prevResponse; for (let i = 0; i < textChunks.length; i++) { @@ -143,14 +130,14 @@ export class Refine extends PromptMixin implements ResponseBuilder { const lastChunk = i === textChunks.length - 1; if (!response) { response = await this.giveResponseSingle( - extractText(toQueryBundle(query).query), + query, chunk, !!stream && lastChunk, ); } else { response = await this.refineResponseSingle( response as string, - extractText(toQueryBundle(query).query), + query, chunk, !!stream && lastChunk, ); @@ -161,12 +148,12 @@ export class Refine extends PromptMixin implements ResponseBuilder { } private async giveResponseSingle( - queryStr: string, + query: QueryType, textChunk: string, stream: boolean, ) { const textQATemplate: SimplePrompt = (input) => - this.textQATemplate({ ...input, query: queryStr }); + this.textQATemplate({ ...input, query: extractText(query) }); const textChunks = this.promptHelper.repack(textQATemplate, [textChunk]); let response: AsyncIterable<string> | string | undefined = undefined; @@ -184,7 +171,7 @@ export class Refine extends PromptMixin implements ResponseBuilder { } else { response = await this.refineResponseSingle( response as string, - queryStr, + query, chunk, stream && lastChunk, ); @@ -197,12 +184,12 @@ export class Refine extends PromptMixin implements ResponseBuilder { // eslint-disable-next-line max-params private async refineResponseSingle( initialReponse: string, - queryStr: string, + query: QueryType, textChunk: string, stream: boolean, ) { const refineTemplate: SimplePrompt = (input) => - this.refineTemplate({ ...input, query: queryStr }); + this.refineTemplate({ ...input, query: extractText(query) }); const textChunks = this.promptHelper.repack(refineTemplate, [textChunk]); @@ -241,28 +228,23 @@ export class Refine extends PromptMixin implements ResponseBuilder { */ export class CompactAndRefine extends Refine { getResponse( - params: ResponseBuilderParamsStreaming, + query: ResponseBuilderQuery, + stream: true, ): Promise<AsyncIterable<string>>; - getResponse(params: ResponseBuilderParamsNonStreaming): Promise<string>; - async getResponse({ - query, - textChunks, - prevResponse, - stream, - }: - | ResponseBuilderParamsStreaming - | ResponseBuilderParamsNonStreaming): Promise< - AsyncIterable<string> | string - > { + getResponse(query: ResponseBuilderQuery, stream?: false): Promise<string>; + async getResponse( + { query, textChunks, prevResponse }: ResponseBuilderQuery, + stream?: boolean, + ): Promise<AsyncIterable<string> | string> { const textQATemplate: SimplePrompt = (input) => this.textQATemplate({ ...input, - query: extractText(toQueryBundle(query).query), + query: extractText(query), }); const refineTemplate: SimplePrompt = (input) => this.refineTemplate({ ...input, - query: extractText(toQueryBundle(query).query), + query: extractText(query), }); const maxPrompt = getBiggestPrompt([textQATemplate, refineTemplate]); @@ -273,10 +255,12 @@ export class CompactAndRefine extends Refine { prevResponse, }; if (stream) { - return super.getResponse({ - ...params, - stream, - }); + return super.getResponse( + { + ...params, + }, + true, + ); } return super.getResponse(params); } @@ -316,18 +300,14 @@ export class TreeSummarize extends PromptMixin implements ResponseBuilder { } getResponse( - params: ResponseBuilderParamsStreaming, + query: ResponseBuilderQuery, + stream: true, ): Promise<AsyncIterable<string>>; - getResponse(params: ResponseBuilderParamsNonStreaming): Promise<string>; - async getResponse({ - query, - textChunks, - stream, - }: - | ResponseBuilderParamsStreaming - | ResponseBuilderParamsNonStreaming): Promise< - AsyncIterable<string> | string - > { + getResponse(query: ResponseBuilderQuery, stream?: false): Promise<string>; + async getResponse( + { query, textChunks }: ResponseBuilderQuery, + stream?: boolean, + ): Promise<AsyncIterable<string> | string> { if (!textChunks || textChunks.length === 0) { throw new Error("Must have at least one text chunk"); } @@ -342,7 +322,7 @@ export class TreeSummarize extends PromptMixin implements ResponseBuilder { const params = { prompt: this.summaryTemplate({ context: packedTextChunks[0], - query: extractText(toQueryBundle(query).query), + query: extractText(query), }), }; if (stream) { @@ -356,7 +336,7 @@ export class TreeSummarize extends PromptMixin implements ResponseBuilder { this.llm.complete({ prompt: this.summaryTemplate({ context: chunk, - query: extractText(toQueryBundle(query).query), + query: extractText(query), }), }), ), @@ -367,10 +347,12 @@ export class TreeSummarize extends PromptMixin implements ResponseBuilder { textChunks: summaries.map((s) => s.text), }; if (stream) { - return this.getResponse({ - ...params, - stream, - }); + return this.getResponse( + { + ...params, + }, + true, + ); } return this.getResponse(params); } diff --git a/packages/llamaindex/src/synthesizers/types.ts b/packages/llamaindex/src/synthesizers/types.ts index f1b7f1d84..4fb269165 100644 --- a/packages/llamaindex/src/synthesizers/types.ts +++ b/packages/llamaindex/src/synthesizers/types.ts @@ -1,56 +1,40 @@ -import type { NodeWithScore } from "@llamaindex/core/schema"; -import type { EngineResponse } from "../EngineResponse.js"; +import type { QueryType } from "@llamaindex/core/query-engine"; +import { EngineResponse, type NodeWithScore } from "@llamaindex/core/schema"; import type { PromptMixin } from "../prompts/Mixin.js"; -export interface SynthesizeParamsBase { - query: string; +export interface SynthesizeQuery { + query: QueryType; nodesWithScore: NodeWithScore[]; } -export interface SynthesizeParamsStreaming extends SynthesizeParamsBase { - stream: true; -} - -export interface SynthesizeParamsNonStreaming extends SynthesizeParamsBase { - stream?: false | null; -} - +// todo(himself65): Move this to @llamaindex/core/schema /** * A BaseSynthesizer is used to generate a response from a query and a list of nodes. */ export interface BaseSynthesizer { synthesize( - params: SynthesizeParamsStreaming, + query: SynthesizeQuery, + stream: true, ): Promise<AsyncIterable<EngineResponse>>; - synthesize(params: SynthesizeParamsNonStreaming): Promise<EngineResponse>; + synthesize(query: SynthesizeQuery, stream?: false): Promise<EngineResponse>; } -export interface ResponseBuilderParamsBase { - query: string; +export interface ResponseBuilderQuery { + query: QueryType; textChunks: string[]; prevResponse?: string; } -export interface ResponseBuilderParamsStreaming - extends ResponseBuilderParamsBase { - stream: true; -} - -export interface ResponseBuilderParamsNonStreaming - extends ResponseBuilderParamsBase { - stream?: false | null; -} - /** * A ResponseBuilder is used in a response synthesizer to generate a response from multiple response chunks. */ export interface ResponseBuilder extends Partial<PromptMixin> { /** * Get the response from a query and a list of text chunks. - * @param params */ getResponse( - params: ResponseBuilderParamsStreaming, + query: ResponseBuilderQuery, + stream: true, ): Promise<AsyncIterable<string>>; - getResponse(params: ResponseBuilderParamsNonStreaming): Promise<string>; + getResponse(query: ResponseBuilderQuery, stream?: false): Promise<string>; } diff --git a/packages/llamaindex/src/tools/QueryEngineTool.ts b/packages/llamaindex/src/tools/QueryEngineTool.ts index f1118dd67..30bac7a92 100644 --- a/packages/llamaindex/src/tools/QueryEngineTool.ts +++ b/packages/llamaindex/src/tools/QueryEngineTool.ts @@ -1,6 +1,6 @@ import type { BaseTool, ToolMetadata } from "@llamaindex/core/llms"; +import type { BaseQueryEngine } from "@llamaindex/core/query-engine"; import type { JSONSchemaType } from "ajv"; -import type { QueryEngine } from "../types.js"; const DEFAULT_NAME = "query_engine_tool"; const DEFAULT_DESCRIPTION = @@ -18,7 +18,7 @@ const DEFAULT_PARAMETERS: JSONSchemaType<QueryEngineParam> = { }; export type QueryEngineToolParams = { - queryEngine: QueryEngine; + queryEngine: BaseQueryEngine; metadata: ToolMetadata<JSONSchemaType<QueryEngineParam>>; }; @@ -27,7 +27,7 @@ export type QueryEngineParam = { }; export class QueryEngineTool implements BaseTool<QueryEngineParam> { - private queryEngine: QueryEngine; + private queryEngine: BaseQueryEngine; metadata: ToolMetadata<JSONSchemaType<QueryEngineParam>>; constructor({ queryEngine, metadata }: QueryEngineToolParams) { @@ -42,6 +42,6 @@ export class QueryEngineTool implements BaseTool<QueryEngineParam> { async call({ query }: QueryEngineParam) { const response = await this.queryEngine.query({ query }); - return response.response; + return response.message.content; } } diff --git a/packages/llamaindex/src/types.ts b/packages/llamaindex/src/types.ts index 66cf1c5f7..20964586f 100644 --- a/packages/llamaindex/src/types.ts +++ b/packages/llamaindex/src/types.ts @@ -1,8 +1,8 @@ /** * Top level types to avoid circular dependencies */ -import type { MessageContent, ToolMetadata } from "@llamaindex/core/llms"; -import type { EngineResponse } from "./EngineResponse.js"; +import type { ToolMetadata } from "@llamaindex/core/llms"; +import type { EngineResponse } from "@llamaindex/core/schema"; /** * Parameters for sending a query. @@ -52,15 +52,4 @@ export interface StructuredOutput<T> { export type ToolMetadataOnlyDescription = Pick<ToolMetadata, "description">; -/** - * @link https://docs.llamaindex.ai/en/stable/api_reference/schema/?h=querybundle#llama_index.core.schema.QueryBundle - * - * We don't have `image_path` here, because it is included in the `query` field. - */ -export type QueryBundle = { - query: string | MessageContent; - customEmbedding?: string[]; - embeddings?: number[]; -}; - export type UUID = `${string}-${string}-${string}-${string}-${string}`; -- GitLab