From 2008efe0eea9f46011af16ba7b3c776f2c84eb49 Mon Sep 17 00:00:00 2001 From: Alex Yang <himself65@outlook.com> Date: Thu, 2 May 2024 19:54:05 -0500 Subject: [PATCH] feat: add verbose mode to Agent (#800) --- .changeset/rude-ducks-invent.md | 5 ++++ examples/.env.example | 1 + examples/agent/openai.ts | 2 +- packages/core/src/Settings.ts | 6 ++-- packages/core/src/agent/anthropic.ts | 7 ++++- packages/core/src/agent/base.ts | 39 +++++++++++++++++++++---- packages/core/src/agent/openai.ts | 13 +++++++-- packages/core/src/agent/react.ts | 16 ++++++---- packages/core/src/agent/types.ts | 2 ++ packages/core/src/agent/utils.ts | 13 +++++++++ packages/core/src/engines/chat/types.ts | 5 ++++ packages/core/src/internal/logger.ts | 17 +++++++++++ 12 files changed, 108 insertions(+), 18 deletions(-) create mode 100644 .changeset/rude-ducks-invent.md create mode 100644 examples/.env.example create mode 100644 packages/core/src/internal/logger.ts diff --git a/.changeset/rude-ducks-invent.md b/.changeset/rude-ducks-invent.md new file mode 100644 index 000000000..5dc171299 --- /dev/null +++ b/.changeset/rude-ducks-invent.md @@ -0,0 +1,5 @@ +--- +"llamaindex": patch +--- + +feat: add verbose mode to Agent diff --git a/examples/.env.example b/examples/.env.example new file mode 100644 index 000000000..a86a908b7 --- /dev/null +++ b/examples/.env.example @@ -0,0 +1 @@ +DEBUG=llamaindex diff --git a/examples/agent/openai.ts b/examples/agent/openai.ts index 857008054..67d0d4b55 100644 --- a/examples/agent/openai.ts +++ b/examples/agent/openai.ts @@ -53,7 +53,7 @@ async function main() { message: "How much is 5 + 5? then divide by 2", }); - console.log(String(response)); + console.log(response.response.message); } void main().then(() => { diff --git a/packages/core/src/Settings.ts b/packages/core/src/Settings.ts index 7e9f8eb90..6852fffcc 100644 --- a/packages/core/src/Settings.ts +++ b/packages/core/src/Settings.ts @@ -55,9 +55,9 @@ class GlobalSettings implements Config { get debug() { const debug = getEnv("DEBUG"); return ( - getEnv("NODE_ENV") === "development" && - Boolean(debug) && - debug?.includes("llamaindex") + (Boolean(debug) && debug?.includes("llamaindex")) || + debug === "*" || + debug === "true" ); } diff --git a/packages/core/src/agent/anthropic.ts b/packages/core/src/agent/anthropic.ts index accc87fca..3ab55521e 100644 --- a/packages/core/src/agent/anthropic.ts +++ b/packages/core/src/agent/anthropic.ts @@ -49,6 +49,7 @@ export class AnthropicAgent extends AgentRunner<Anthropic> { "tools" in params ? params.tools : params.toolRetriever.retrieve.bind(params.toolRetriever), + verbose: params.verbose ?? false, }); } @@ -94,7 +95,11 @@ export class AnthropicAgent extends AgentRunner<Anthropic> { const targetTool = tools.find( (tool) => tool.metadata.name === toolCall.name, ); - const toolOutput = await callTool(targetTool, toolCall); + const toolOutput = await callTool( + targetTool, + toolCall, + step.context.logger, + ); step.context.store.toolOutputs.push(toolOutput); step.context.store.messages = [ ...step.context.store.messages, diff --git a/packages/core/src/agent/base.ts b/packages/core/src/agent/base.ts index e0e14f4f5..aa0a192cb 100644 --- a/packages/core/src/agent/base.ts +++ b/packages/core/src/agent/base.ts @@ -4,12 +4,14 @@ import { pipeline, randomUUID, } from "@llamaindex/env"; +import { Settings } from "../Settings.js"; import { type ChatEngine, type ChatEngineParamsNonStreaming, type ChatEngineParamsStreaming, } from "../engines/chat/index.js"; import { wrapEventCaller } from "../internal/context/EventCaller.js"; +import { consoleLogger, emptyLogger } from "../internal/logger.js"; import { getCallbackManager } from "../internal/settings/CallbackManager.js"; import { isAsyncIterable } from "../internal/utils.js"; import type { @@ -66,6 +68,7 @@ export function createTaskOutputStream< const enqueueOutput = ( output: TaskStepOutput<Model, Store, AdditionalMessageOptions>, ) => { + context.logger.log("Enqueueing output for step(id, %s).", step.id); taskOutputs.push(output); controller.enqueue(output); }; @@ -75,7 +78,9 @@ export function createTaskOutputStream< }, }); + context.logger.log("Starting step(id, %s).", step.id); await handler(step, enqueueOutput); + context.logger.log("Finished step(id, %s).", step.id); // fixme: support multi-thread when there are multiple outputs // todo: for now we pretend there is only one task output const { isLast, taskStep } = taskOutputs[0]; @@ -87,6 +92,10 @@ export function createTaskOutputStream< toolCallCount: 1, }; if (isLast) { + context.logger.log( + "Final step(id, %s) reached, closing task.", + step.id, + ); getCallbackManager().dispatchEvent("agent-end", { payload: { endStep: step, @@ -125,6 +134,7 @@ export type AgentRunnerParams< tools: | BaseToolWithCall[] | ((query: MessageContent) => Promise<BaseToolWithCall[]>); + verbose: boolean; }; export type AgentParamsBase< @@ -139,6 +149,7 @@ export type AgentParamsBase< llm?: AI; chatHistory?: ChatMessage<AdditionalMessageOptions>[]; systemPrompt?: MessageContent; + verbose?: boolean; }; /** @@ -218,6 +229,7 @@ export abstract class AgentRunner< readonly #systemPrompt: MessageContent | null = null; #chatHistory: ChatMessage<AdditionalMessageOptions>[]; readonly #runner: AgentWorker<AI, Store, AdditionalMessageOptions>; + readonly #verbose: boolean; // create extra store abstract createStore(): Store; @@ -229,14 +241,15 @@ export abstract class AgentRunner< protected constructor( params: AgentRunnerParams<AI, Store, AdditionalMessageOptions>, ) { - const { llm, chatHistory, runner, tools } = params; + const { llm, chatHistory, systemPrompt, runner, tools, verbose } = params; this.#llm = llm; this.#chatHistory = chatHistory; this.#runner = runner; - if (params.systemPrompt) { - this.#systemPrompt = params.systemPrompt; + if (systemPrompt) { + this.#systemPrompt = systemPrompt; } this.#tools = tools; + this.#verbose = verbose; } get llm() { @@ -247,6 +260,10 @@ export abstract class AgentRunner< return this.#chatHistory; } + get verbose(): boolean { + return Settings.debug || this.#verbose; + } + public reset(): void { this.#chatHistory = []; } @@ -270,8 +287,11 @@ export abstract class AgentRunner< return task.context.toolCallCount < MAX_TOOL_CALLS; } - // fixme: this shouldn't be async - async createTask(message: MessageContent, stream: boolean = false) { + createTask( + message: MessageContent, + stream: boolean = false, + verbose: boolean | undefined = undefined, + ) { const initialMessages = [...this.#chatHistory]; if (this.#systemPrompt !== null) { const systemPrompt = this.#systemPrompt; @@ -296,6 +316,13 @@ export abstract class AgentRunner< toolOutputs: [] as ToolOutput[], }, shouldContinue: AgentRunner.shouldContinue, + logger: + // disable verbose if explicitly set to false + verbose === false + ? emptyLogger + : verbose || this.verbose + ? consoleLogger + : emptyLogger, }); } @@ -312,7 +339,7 @@ export abstract class AgentRunner< | AgentChatResponse<AdditionalMessageOptions> | ReadableStream<AgentStreamChatResponse<AdditionalMessageOptions>> > { - const task = await this.createTask(params.message, !!params.stream); + const task = this.createTask(params.message, !!params.stream); const stepOutput = await pipeline( task, async ( diff --git a/packages/core/src/agent/openai.ts b/packages/core/src/agent/openai.ts index f7146665d..fec65b7dd 100644 --- a/packages/core/src/agent/openai.ts +++ b/packages/core/src/agent/openai.ts @@ -46,6 +46,7 @@ export class OpenAIAgent extends AgentRunner<OpenAI> { "tools" in params ? params.tools : params.toolRetriever.retrieve.bind(params.toolRetriever), + verbose: params.verbose ?? false, }); } @@ -77,7 +78,11 @@ export class OpenAIAgent extends AgentRunner<OpenAI> { const targetTool = tools.find( (tool) => tool.metadata.name === toolCall.name, ); - const toolOutput = await callTool(targetTool, toolCall); + const toolOutput = await callTool( + targetTool, + toolCall, + step.context.logger, + ); step.context.store.toolOutputs.push(toolOutput); step.context.store.messages = [ ...step.context.store.messages, @@ -154,7 +159,11 @@ export class OpenAIAgent extends AgentRunner<OpenAI> { }, }, ]; - const toolOutput = await callTool(targetTool, toolCall); + const toolOutput = await callTool( + targetTool, + toolCall, + step.context.logger, + ); step.context.store.messages = [ ...step.context.store.messages, { diff --git a/packages/core/src/agent/react.ts b/packages/core/src/agent/react.ts index ebed34fb1..f1e895d94 100644 --- a/packages/core/src/agent/react.ts +++ b/packages/core/src/agent/react.ts @@ -354,6 +354,7 @@ export class ReActAgent extends AgentRunner<LLM, ReACTAgentStore> { "tools" in params ? params.tools : params.toolRetriever.retrieve.bind(params.toolRetriever), + verbose: params.verbose ?? false, }); } @@ -387,14 +388,19 @@ export class ReActAgent extends AgentRunner<LLM, ReACTAgentStore> { isLast: type !== "action", }); }); + step.context.logger.log("current reason: %O", reason); step.context.store.reasons = [...step.context.store.reasons, reason]; if (reason.type === "action") { const tool = tools.find((tool) => tool.metadata.name === reason.action); - const toolOutput = await callTool(tool, { - id: randomUUID(), - input: reason.input, - name: reason.action, - }); + const toolOutput = await callTool( + tool, + { + id: randomUUID(), + input: reason.input, + name: reason.action, + }, + step.context.logger, + ); step.context.store.reasons = [ ...step.context.store.reasons, { diff --git a/packages/core/src/agent/types.ts b/packages/core/src/agent/types.ts index b3d48374c..22d562c1a 100644 --- a/packages/core/src/agent/types.ts +++ b/packages/core/src/agent/types.ts @@ -1,4 +1,5 @@ import { ReadableStream } from "@llamaindex/env"; +import type { Logger } from "../internal/logger.js"; import type { BaseEvent } from "../internal/type.js"; import type { ChatMessage, @@ -32,6 +33,7 @@ export type AgentTaskContext< toolOutputs: ToolOutput[]; messages: ChatMessage<AdditionalMessageOptions>[]; } & Store; + logger: Readonly<Logger>; }; export type TaskStep< diff --git a/packages/core/src/agent/utils.ts b/packages/core/src/agent/utils.ts index df8f6b8cd..d385016d8 100644 --- a/packages/core/src/agent/utils.ts +++ b/packages/core/src/agent/utils.ts @@ -1,4 +1,5 @@ import { ReadableStream } from "@llamaindex/env"; +import type { Logger } from "../internal/logger.js"; import { getCallbackManager } from "../internal/settings/CallbackManager.js"; import { isAsyncIterable, prettifyError } from "../internal/utils.js"; import type { @@ -13,12 +14,14 @@ import type { BaseTool, JSONObject, JSONValue, ToolOutput } from "../types.js"; export async function callTool( tool: BaseTool | undefined, toolCall: ToolCall | PartialToolCall, + logger: Logger, ): Promise<ToolOutput> { const input: JSONObject = typeof toolCall.input === "string" ? JSON.parse(toolCall.input) : toolCall.input; if (!tool) { + logger.error(`Tool ${toolCall.name} does not exist.`); const output = `Tool ${toolCall.name} does not exist.`; return { tool, @@ -30,6 +33,9 @@ export async function callTool( const call = tool.call; let output: JSONValue; if (!call) { + logger.error( + `Tool ${tool.metadata.name} (remote:${toolCall.name}) does not have a implementation.`, + ); output = `Tool ${tool.metadata.name} (remote:${toolCall.name}) does not have a implementation.`; return { tool, @@ -45,6 +51,10 @@ export async function callTool( }, }); output = await call.call(tool, input); + logger.log( + `Tool ${tool.metadata.name} (remote:${toolCall.name}) succeeded.`, + ); + logger.log(`Output: ${JSON.stringify(output)}`); const toolOutput: ToolOutput = { tool, input, @@ -60,6 +70,9 @@ export async function callTool( return toolOutput; } catch (e) { output = prettifyError(e); + logger.error( + `Tool ${tool.metadata.name} (remote:${toolCall.name}) failed: ${output}`, + ); } return { tool, diff --git a/packages/core/src/engines/chat/types.ts b/packages/core/src/engines/chat/types.ts index d81e0035a..0b00f1d1d 100644 --- a/packages/core/src/engines/chat/types.ts +++ b/packages/core/src/engines/chat/types.ts @@ -13,6 +13,11 @@ export interface ChatEngineParamsBase { * Optional chat history if you want to customize the chat history. */ chatHistory?: ChatMessage[] | ChatHistory; + /** + * Optional flag to enable verbose mode. + * @default false + */ + verbose?: boolean; } export interface ChatEngineParamsStreaming extends ChatEngineParamsBase { diff --git a/packages/core/src/internal/logger.ts b/packages/core/src/internal/logger.ts new file mode 100644 index 000000000..686d7e170 --- /dev/null +++ b/packages/core/src/internal/logger.ts @@ -0,0 +1,17 @@ +export type Logger = { + log: (...args: unknown[]) => void; + error: (...args: unknown[]) => void; + warn: (...args: unknown[]) => void; +}; + +export const emptyLogger: Logger = Object.freeze({ + log: () => {}, + error: () => {}, + warn: () => {}, +}); + +export const consoleLogger: Logger = Object.freeze({ + log: console.log.bind(console), + error: console.error.bind(console), + warn: console.warn.bind(console), +}); -- GitLab