Skip to content
Snippets Groups Projects
Unverified Commit bd239aaf authored by Marcus Schiesser's avatar Marcus Schiesser Committed by GitHub
Browse files

docs: update main agent docs (#1735)

parent 98eebf72
No related branches found
No related tags found
No related merge requests found
...@@ -60,7 +60,7 @@ export default function HomePage() { ...@@ -60,7 +60,7 @@ export default function HomePage() {
icon={Footprints} icon={Footprints}
subheading="Progressive" subheading="Progressive"
heading="From the simplest to the most complex" heading="From the simplest to the most complex"
description="LlamaIndex.TS is designed to be simple to get started, but powerful enough to build complex, agentic AI applications." description="LlamaIndex.TS is designed to be simple to get started, but powerful enough to build complex, agentic AI applications using multi-agents."
> >
<Suspense <Suspense
fallback={ fallback={
...@@ -76,44 +76,48 @@ export default function HomePage() { ...@@ -76,44 +76,48 @@ export default function HomePage() {
> >
<MagicMove <MagicMove
code={[ code={[
`import { OpenAI } from "@llamaindex/openai"; `import { openai } from "@llamaindex/openai";
const llm = new OpenAI(); const llm = openai();
const response = await llm.complete({ prompt: "How are you?" });`, const response = await llm.complete({ prompt: "How are you?" });`,
`import { OpenAI } from "@llamaindex/openai"; `import { openai } from "@llamaindex/openai";
const llm = new OpenAI(); const llm = openai();
const response = await llm.chat({ const response = await llm.chat({
messages: [{ content: "Tell me a joke.", role: "user" }], messages: [{ content: "Tell me a joke.", role: "user" }],
});`, });`,
`import { ChatMemoryBuffer } from "llamaindex"; `import { agent } from "llamaindex";
import { OpenAI } from "@llamaindex/openai"; import { openai } from "@llamaindex/openai";
const llm = new OpenAI({ model: 'gpt4o-turbo' }); const analyseAgent = agent({
const buffer = new ChatMemoryBuffer({ llm: openai({ model: "gpt-4o" }),
tokenLimit: 128_000, tools: [analyseTools],
})
buffer.put({ content: "Tell me a joke.", role: "user" })
const response = await llm.chat({
messages: buffer.getMessages(),
stream: true
});`,
`import { ChatMemoryBuffer } from "llamaindex";
import { OpenAIAgent } from "@llamaindex/openai";
const agent = new OpenAIAgent({
llm,
tools: [...myTools]
systemPrompt, systemPrompt,
}); });
const buffer = new ChatMemoryBuffer({ const response = await analyseAgent.run(\`Analyse the given data:
tokenLimit: 128_000, \${data}\`);`,
}) `import { agent, multiAgent } from "llamaindex";
buffer.put({ content: "Analysis the data based on the given data.", role: "user" }) import { openai } from "@llamaindex/openai";
buffer.put({ content: \`\${data}\`, role: "user" })
const response = await agent.chat({ const analyseAgent = agent({
message: buffer.getMessages(), name: "AnalyseAgent",
});`, llm: openai({ model: "gpt-4o" }),
tools: [analyseTools],
});
const reporterAgent = agent({
name: "ReporterAgent",
llm: openai({ model: "gpt-4o" }),
tools: [reporterTools],
canHandoffTo: [analyseAgent],
});
const agents = multiAgent({
agents: [analyseAgent, reporterAgent],
rootAgent: reporterAgent,
});
const response = await agents.run(\`Analyse the given data:
\${data}\`);`,
]} ]}
/> />
</Suspense> </Suspense>
...@@ -125,17 +129,17 @@ const response = await agent.chat({ ...@@ -125,17 +129,17 @@ const response = await agent.chat({
description="Truly powerful retrieval-augmented generation applications use agentic techniques, and LlamaIndex.TS makes it easy to build them." description="Truly powerful retrieval-augmented generation applications use agentic techniques, and LlamaIndex.TS makes it easy to build them."
> >
<CodeBlock <CodeBlock
code={`import { agent } from "llamaindex"; code={`import { agent, SimpleDirectoryReader, VectorStoreIndex } from "llamaindex";
import { OpenAI } from "@llamaindex/openai"; import { openai } from "@llamaindex/openai";
// using a previously created LlamaIndex index to query information from // load documents from current directoy into an index
const queryTool = index.queryTool(); const reader = new SimpleDirectoryReader();
const documents = await reader.loadData(currentDir);
const index = await VectorStoreIndex.fromDocuments(documents);
const agent = agent({ const agent = agent({
llm: new OpenAI({ llm: openai({ model: "gpt-4o" }),
model: "gpt-4o", tools: [index.queryTool()],
}),
tools: [queryTool],
}); });
await agent.run('...');`} await agent.run('...');`}
......
...@@ -2,9 +2,6 @@ ...@@ -2,9 +2,6 @@
title: Agent Workflow title: Agent Workflow
--- ---
import { DynamicCodeBlock } from 'fumadocs-ui/components/dynamic-codeblock';
import CodeSource from "!raw-loader!../../../../../../../examples/agentworkflow/blog-writer.ts";
import { Tab, Tabs } from "fumadocs-ui/components/tabs";
Agent Workflows are a powerful system that enables you to create and orchestrate one or multiple agents with tools to perform specific tasks. It's built on top of the base `Workflow` system and provides a streamlined interface for agent interactions. Agent Workflows are a powerful system that enables you to create and orchestrate one or multiple agents with tools to perform specific tasks. It's built on top of the base `Workflow` system and provides a streamlined interface for agent interactions.
...@@ -15,11 +12,11 @@ Agent Workflows are a powerful system that enables you to create and orchestrate ...@@ -15,11 +12,11 @@ Agent Workflows are a powerful system that enables you to create and orchestrate
The simplest use case is creating a single agent with specific tools. Here's an example of creating an assistant that tells jokes: The simplest use case is creating a single agent with specific tools. Here's an example of creating an assistant that tells jokes:
```typescript ```typescript
import { agent, FunctionTool } from "llamaindex"; import { agent, tool } from "llamaindex";
import { OpenAI } from "@llamaindex/openai"; import { openai } from "@llamaindex/openai";
// Define a joke-telling tool // Define a joke-telling tool
const jokeTool = FunctionTool.from( const jokeTool = tool(
() => "Baby Llama is called cria", () => "Baby Llama is called cria",
{ {
name: "joke", name: "joke",
...@@ -28,15 +25,13 @@ const jokeTool = FunctionTool.from( ...@@ -28,15 +25,13 @@ const jokeTool = FunctionTool.from(
); );
// Create an single agent workflow with the tool // Create an single agent workflow with the tool
const workflow = agent({ const jokeAgent = agent({
tools: [jokeTool], tools: [jokeTool],
llm: new OpenAI({ llm: openai({ model: "gpt-4o-mini" }),
model: "gpt-4o-mini",
}),
}); });
// Run the workflow // Run the workflow
const result = await workflow.run("Tell me something funny"); const result = await jokeAgent.run("Tell me something funny");
console.log(result); // Baby Llama is called cria console.log(result); // Baby Llama is called cria
``` ```
...@@ -73,8 +68,8 @@ An Agent Workflow can orchestrate multiple agents, enabling complex interactions ...@@ -73,8 +68,8 @@ An Agent Workflow can orchestrate multiple agents, enabling complex interactions
Here's an example of a multi-agent system that combines joke-telling and weather information: Here's an example of a multi-agent system that combines joke-telling and weather information:
```typescript ```typescript
import { multiAgent, agent, FunctionTool } from "llamaindex"; import { multiAgent, agent, tool } from "llamaindex";
import { OpenAI } from "@llamaindex/openai"; import { openai } from "@llamaindex/openai";
import { z } from "zod"; import { z } from "zod";
// Create a weather agent // Create a weather agent
...@@ -82,18 +77,18 @@ const weatherAgent = agent({ ...@@ -82,18 +77,18 @@ const weatherAgent = agent({
name: "WeatherAgent", name: "WeatherAgent",
description: "Provides weather information for any city", description: "Provides weather information for any city",
tools: [ tools: [
FunctionTool.from( tool(
({ city }: { city: string }) => `The weather in ${city} is sunny`,
{ {
name: "fetchWeather", name: "fetchWeather",
description: "Get weather information for a city", description: "Get weather information for a city",
parameters: z.object({ parameters: z.object({
city: z.string(), city: z.string(),
}), }),
execute: ({ city }) => `The weather in ${city} is sunny`,
} }
), ),
], ],
llm: new OpenAI({ model: "gpt-4o-mini" }), llm: openai({ model: "gpt-4o-mini" }),
}); });
// Create a joke-telling agent // Create a joke-telling agent
...@@ -101,18 +96,18 @@ const jokeAgent = agent({ ...@@ -101,18 +96,18 @@ const jokeAgent = agent({
name: "JokeAgent", name: "JokeAgent",
description: "Tells jokes and funny stories", description: "Tells jokes and funny stories",
tools: [jokeTool], // Using the joke tool defined earlier tools: [jokeTool], // Using the joke tool defined earlier
llm: new OpenAI({ model: "gpt-4o-mini" }), llm: openai({ model: "gpt-4o-mini" }),
canHandoffTo: [weatherAgent], // Can hand off to the weather agent canHandoffTo: [weatherAgent], // Can hand off to the weather agent
}); });
// Create the multi-agent workflow // Create the multi-agent workflow
const workflow = multiAgent({ const agents = multiAgent({
agents: [jokeAgent, weatherAgent], agents: [jokeAgent, weatherAgent],
rootAgent: jokeAgent, // Start with the joke agent rootAgent: jokeAgent, // Start with the joke agent
}); });
// Run the workflow // Run the workflow
const result = await workflow.run( const result = await agents.run(
"Give me a morning greeting with a joke and the weather in San Francisco" "Give me a morning greeting with a joke and the weather in San Francisco"
); );
``` ```
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment