From c1b5be518232328a937528e01ce925654269ab98 Mon Sep 17 00:00:00 2001 From: Thuc Pham <51660321+thucpn@users.noreply.github.com> Date: Mon, 10 Mar 2025 11:48:51 +0700 Subject: [PATCH] feat: make AgentWorkflow llm param optional (#1727) --- .changeset/shiny-camels-deny.md | 6 ++++++ .../content/docs/llamaindex/modules/workflows.mdx | 2 +- examples/agentworkflow/multiple_agents.ts | 2 +- examples/agentworkflow/single_agent.ts | 5 +++-- examples/node/workflow/basic.ts | 2 +- examples/workflow/app-creator.ts | 2 +- examples/workflow/conditional.ts | 2 +- examples/workflow/joke.ts | 7 +------ examples/workflow/stream-events.ts | 2 +- examples/workflow/timeout.ts | 2 +- examples/workflow/validation.ts | 7 +------ packages/llamaindex/src/index.edge.ts | 1 + packages/workflow/src/agent/agent-workflow.ts | 2 +- packages/workflow/src/agent/function-agent.ts | 5 +++-- unit/workflow/workflow-ui.test.tsx | 7 +------ unit/workflow/workflow.test.ts | 13 ++----------- 16 files changed, 26 insertions(+), 41 deletions(-) create mode 100644 .changeset/shiny-camels-deny.md diff --git a/.changeset/shiny-camels-deny.md b/.changeset/shiny-camels-deny.md new file mode 100644 index 000000000..ca8da4eff --- /dev/null +++ b/.changeset/shiny-camels-deny.md @@ -0,0 +1,6 @@ +--- +"llamaindex": patch +"@llamaindex/workflow": patch +--- + +feat: make AgentWorkflow llm param optional diff --git a/apps/next/src/content/docs/llamaindex/modules/workflows.mdx b/apps/next/src/content/docs/llamaindex/modules/workflows.mdx index f717bd5f5..eb53567a4 100644 --- a/apps/next/src/content/docs/llamaindex/modules/workflows.mdx +++ b/apps/next/src/content/docs/llamaindex/modules/workflows.mdx @@ -119,7 +119,7 @@ Lastly, we run the workflow. The `.run()` method is async, so we use await here Optionally, you can choose to use a shared context between steps by specifying a context type when creating the workflow. Here's an example where multiple steps access a shared state: ```typescript -import { HandlerContext } from "@llamaindex/workflow"; +import { HandlerContext } from "llamaindex"; type MyContextData = { query: string; diff --git a/examples/agentworkflow/multiple_agents.ts b/examples/agentworkflow/multiple_agents.ts index 4ff86699d..1340ebc98 100644 --- a/examples/agentworkflow/multiple_agents.ts +++ b/examples/agentworkflow/multiple_agents.ts @@ -4,7 +4,6 @@ * 2. TemperatureConverterAgent - Converts the temperature from Fahrenheit to Celsius */ import { OpenAI } from "@llamaindex/openai"; -import { StopEvent } from "@llamaindex/workflow"; import { AgentInput, AgentOutput, @@ -14,6 +13,7 @@ import { AgentWorkflow, FunctionAgent, FunctionTool, + StopEvent, } from "llamaindex"; import { z } from "zod"; diff --git a/examples/agentworkflow/single_agent.ts b/examples/agentworkflow/single_agent.ts index ab1300151..9a849ab52 100644 --- a/examples/agentworkflow/single_agent.ts +++ b/examples/agentworkflow/single_agent.ts @@ -2,17 +2,18 @@ * This example shows how to use AgentWorkflow as a single agent with tools */ import { OpenAI } from "@llamaindex/openai"; -import { AgentWorkflow } from "llamaindex"; +import { AgentWorkflow, Settings } from "llamaindex"; import { getWeatherTool } from "../agent/utils/tools"; const llm = new OpenAI({ model: "gpt-4o", }); +Settings.llm = llm; + async function singleWeatherAgent() { const workflow = AgentWorkflow.fromTools({ tools: [getWeatherTool], - llm, verbose: false, }); diff --git a/examples/node/workflow/basic.ts b/examples/node/workflow/basic.ts index 39c8fa9d1..65d855ddc 100644 --- a/examples/node/workflow/basic.ts +++ b/examples/node/workflow/basic.ts @@ -1,4 +1,4 @@ -import { StartEvent, StopEvent, Workflow } from "@llamaindex/workflow"; +import { StartEvent, StopEvent, Workflow } from "llamaindex"; type ContextData = { counter: number; diff --git a/examples/workflow/app-creator.ts b/examples/workflow/app-creator.ts index c1b05659e..08f9508c6 100644 --- a/examples/workflow/app-creator.ts +++ b/examples/workflow/app-creator.ts @@ -5,7 +5,7 @@ import { StopEvent, Workflow, WorkflowEvent, -} from "@llamaindex/workflow"; +} from "llamaindex"; const MAX_REVIEWS = 3; diff --git a/examples/workflow/conditional.ts b/examples/workflow/conditional.ts index 6a2e38406..25fafe501 100644 --- a/examples/workflow/conditional.ts +++ b/examples/workflow/conditional.ts @@ -5,7 +5,7 @@ import { StopEvent, Workflow, WorkflowEvent, -} from "@llamaindex/workflow"; +} from "llamaindex"; // Create LLM instance const llm = new OpenAI(); diff --git a/examples/workflow/joke.ts b/examples/workflow/joke.ts index b13e4dd18..76f5ccbfb 100644 --- a/examples/workflow/joke.ts +++ b/examples/workflow/joke.ts @@ -1,10 +1,5 @@ import { OpenAI } from "@llamaindex/openai"; -import { - StartEvent, - StopEvent, - Workflow, - WorkflowEvent, -} from "@llamaindex/workflow"; +import { StartEvent, StopEvent, Workflow, WorkflowEvent } from "llamaindex"; // Create LLM instance const llm = new OpenAI(); diff --git a/examples/workflow/stream-events.ts b/examples/workflow/stream-events.ts index 8d337b354..576229c92 100644 --- a/examples/workflow/stream-events.ts +++ b/examples/workflow/stream-events.ts @@ -5,7 +5,7 @@ import { StopEvent, Workflow, WorkflowEvent, -} from "@llamaindex/workflow"; +} from "llamaindex"; // Create LLM instance const llm = new OpenAI(); diff --git a/examples/workflow/timeout.ts b/examples/workflow/timeout.ts index 261fb3980..fee67464d 100644 --- a/examples/workflow/timeout.ts +++ b/examples/workflow/timeout.ts @@ -1,4 +1,4 @@ -import { StartEvent, StopEvent, Workflow } from "@llamaindex/workflow"; +import { StartEvent, StopEvent, Workflow } from "llamaindex"; const longRunning = async (_: unknown, ev: StartEvent<string>) => { await new Promise((resolve) => setTimeout(resolve, 2000)); // Wait for 2 seconds diff --git a/examples/workflow/validation.ts b/examples/workflow/validation.ts index 7bfacf844..2b34fd638 100644 --- a/examples/workflow/validation.ts +++ b/examples/workflow/validation.ts @@ -1,10 +1,5 @@ import { OpenAI } from "@llamaindex/openai"; -import { - StartEvent, - StopEvent, - Workflow, - WorkflowEvent, -} from "@llamaindex/workflow"; +import { StartEvent, StopEvent, Workflow, WorkflowEvent } from "llamaindex"; // Create LLM instance const llm = new OpenAI(); diff --git a/packages/llamaindex/src/index.edge.ts b/packages/llamaindex/src/index.edge.ts index 3a79a2080..5750a8f7c 100644 --- a/packages/llamaindex/src/index.edge.ts +++ b/packages/llamaindex/src/index.edge.ts @@ -67,6 +67,7 @@ export * from "@llamaindex/core/storage/index-store"; export * from "@llamaindex/core/storage/kv-store"; export * from "@llamaindex/core/utils"; export * from "@llamaindex/openai"; +export * from "@llamaindex/workflow"; export * from "@llamaindex/workflow/agent"; export * from "./agent/index.js"; export * from "./cloud/index.js"; diff --git a/packages/workflow/src/agent/agent-workflow.ts b/packages/workflow/src/agent/agent-workflow.ts index aa83ad3be..5471ae746 100644 --- a/packages/workflow/src/agent/agent-workflow.ts +++ b/packages/workflow/src/agent/agent-workflow.ts @@ -159,7 +159,7 @@ export class AgentWorkflow { timeout, }: { tools: BaseToolWithCall[]; - llm: ToolCallLLM; + llm?: ToolCallLLM; systemPrompt?: string; verbose?: boolean; timeout?: number; diff --git a/packages/workflow/src/agent/function-agent.ts b/packages/workflow/src/agent/function-agent.ts index d59155e8e..29b6e9d85 100644 --- a/packages/workflow/src/agent/function-agent.ts +++ b/packages/workflow/src/agent/function-agent.ts @@ -1,4 +1,5 @@ import type { JSONObject } from "@llamaindex/core/global"; +import { Settings } from "@llamaindex/core/global"; import type { BaseToolWithCall, ChatMessage, @@ -23,7 +24,7 @@ export type FunctionAgentParams = { /** * LLM to use for the agent, required. */ - llm: ToolCallLLM; + llm?: ToolCallLLM | undefined; /** * Description of the agent, useful for task assignment. * Should provide the capabilities or responsibilities of the agent. @@ -60,7 +61,7 @@ export class FunctionAgent implements BaseWorkflowAgent { systemPrompt, }: FunctionAgentParams) { this.name = name; - this.llm = llm; + this.llm = llm ?? (Settings.llm as ToolCallLLM); this.description = description; this.tools = tools; if (tools.length === 0) { diff --git a/unit/workflow/workflow-ui.test.tsx b/unit/workflow/workflow-ui.test.tsx index ffd4b30d2..5c0b8f6d7 100644 --- a/unit/workflow/workflow-ui.test.tsx +++ b/unit/workflow/workflow-ui.test.tsx @@ -1,9 +1,4 @@ -import { - StartEvent, - StopEvent, - Workflow, - WorkflowEvent, -} from "@llamaindex/workflow"; +import { StartEvent, StopEvent, Workflow, WorkflowEvent } from "llamaindex"; import type { ReactNode } from "react"; import { describe, expect, test } from "vitest"; diff --git a/unit/workflow/workflow.test.ts b/unit/workflow/workflow.test.ts index 522bc3a37..fd7ed5b73 100644 --- a/unit/workflow/workflow.test.ts +++ b/unit/workflow/workflow.test.ts @@ -1,14 +1,5 @@ -import type { - HandlerContext, - StepHandler, - StepParameters, -} from "@llamaindex/workflow"; -import { - StartEvent, - StopEvent, - Workflow, - WorkflowEvent, -} from "@llamaindex/workflow"; +import type { HandlerContext, StepHandler, StepParameters } from "llamaindex"; +import { StartEvent, StopEvent, Workflow, WorkflowEvent } from "llamaindex"; import { beforeEach, describe, -- GitLab