Skip to content
Snippets Groups Projects
Unverified Commit 5263bde8 authored by Marcus Schiesser's avatar Marcus Schiesser Committed by GitHub
Browse files

feat: Use selected LlamaCloud index in multi-agent template (#350)

parent 4dee65b9
No related branches found
No related tags found
No related merge requests found
---
"create-llama": patch
---
Use selected LlamaCloud index in multi-agent template
......@@ -28,8 +28,8 @@ async def chat(
# but agent workflow does not support them yet
# ignore chat params and use all documents for now
# TODO: generate filters based on doc_ids
# TODO: use chat params
engine = get_chat_engine(chat_history=messages)
params = data.data or {}
engine = get_chat_engine(chat_history=messages, params=params)
event_handler = engine.run(input=last_message_content, streaming=True)
return VercelStreamResponse(
......
......@@ -18,11 +18,11 @@ def get_chat_engine(
agent_type = os.getenv("EXAMPLE_TYPE", "").lower()
match agent_type:
case "choreography":
agent = create_choreography(chat_history)
agent = create_choreography(chat_history, **kwargs)
case "orchestrator":
agent = create_orchestrator(chat_history)
agent = create_orchestrator(chat_history, **kwargs)
case _:
agent = create_workflow(chat_history)
agent = create_workflow(chat_history, **kwargs)
logger.info(f"Using agent pattern: {agent_type}")
......
......@@ -8,8 +8,8 @@ from app.examples.researcher import create_researcher
from llama_index.core.chat_engine.types import ChatMessage
def create_choreography(chat_history: Optional[List[ChatMessage]] = None):
researcher = create_researcher(chat_history)
def create_choreography(chat_history: Optional[List[ChatMessage]] = None, **kwargs):
researcher = create_researcher(chat_history, **kwargs)
publisher = create_publisher(chat_history)
reviewer = FunctionCallingAgent(
name="reviewer",
......@@ -21,12 +21,14 @@ def create_choreography(chat_history: Optional[List[ChatMessage]] = None):
name="writer",
agents=[researcher, reviewer, publisher],
description="expert in writing blog posts, needs researched information and images to write a blog post",
system_prompt=dedent("""
system_prompt=dedent(
"""
You are an expert in writing blog posts. You are given a task to write a blog post. Before starting to write the post, consult the researcher agent to get the information you need. Don't make up any information yourself.
After creating a draft for the post, send it to the reviewer agent to receive feedback and make sure to incorporate the feedback from the reviewer.
You can consult the reviewer and researcher a maximum of two times. Your output should contain only the blog post.
Finally, always request the publisher to create a document (PDF, HTML) and publish the blog post.
"""),
"""
),
# TODO: add chat_history support to AgentCallingAgent
# chat_history=chat_history,
)
......@@ -8,28 +8,32 @@ from app.examples.researcher import create_researcher
from llama_index.core.chat_engine.types import ChatMessage
def create_orchestrator(chat_history: Optional[List[ChatMessage]] = None):
researcher = create_researcher(chat_history)
def create_orchestrator(chat_history: Optional[List[ChatMessage]] = None, **kwargs):
researcher = create_researcher(chat_history, **kwargs)
writer = FunctionCallingAgent(
name="writer",
description="expert in writing blog posts, need information and images to write a post",
system_prompt=dedent("""
system_prompt=dedent(
"""
You are an expert in writing blog posts.
You are given a task to write a blog post. Do not make up any information yourself.
If you don't have the necessary information to write a blog post, reply "I need information about the topic to write the blog post".
If you need to use images, reply "I need images about the topic to write the blog post". Do not use any dummy images made up by you.
If you have all the information needed, write the blog post.
"""),
"""
),
chat_history=chat_history,
)
reviewer = FunctionCallingAgent(
name="reviewer",
description="expert in reviewing blog posts, needs a written blog post to review",
system_prompt=dedent("""
system_prompt=dedent(
"""
You are an expert in reviewing blog posts. You are given a task to review a blog post. Review the post and fix any issues found yourself. You must output a final blog post.
A post must include at least one valid image. If not, reply "I need images about the topic to write the blog post". An image URL starting with "example" or "your website" is not valid.
Especially check for logical inconsistencies and proofread the post for grammar and spelling errors.
"""),
"""
),
chat_history=chat_history,
)
publisher = create_publisher(chat_history)
......
......@@ -3,17 +3,19 @@ from textwrap import dedent
from typing import List
from app.agents.single import FunctionCallingAgent
from app.engine.index import get_index
from app.engine.index import IndexConfig, get_index
from app.engine.tools import ToolFactory
from llama_index.core.chat_engine.types import ChatMessage
from llama_index.core.tools import QueryEngineTool, ToolMetadata
def _create_query_engine_tool() -> QueryEngineTool:
def _create_query_engine_tool(params=None) -> QueryEngineTool:
"""
Provide an agent worker that can be used to query the index.
"""
index = get_index()
# Add query tool if index exists
index_config = IndexConfig(**(params or {}))
index = get_index(index_config)
if index is None:
return None
top_k = int(os.getenv("TOP_K", 0))
......@@ -31,13 +33,13 @@ def _create_query_engine_tool() -> QueryEngineTool:
)
def _get_research_tools() -> QueryEngineTool:
def _get_research_tools(**kwargs) -> QueryEngineTool:
"""
Researcher take responsibility for retrieving information.
Try init wikipedia or duckduckgo tool if available.
"""
tools = []
query_engine_tool = _create_query_engine_tool()
query_engine_tool = _create_query_engine_tool(**kwargs)
if query_engine_tool is not None:
tools.append(query_engine_tool)
researcher_tool_names = ["duckduckgo", "wikipedia.WikipediaToolSpec"]
......@@ -48,16 +50,17 @@ def _get_research_tools() -> QueryEngineTool:
return tools
def create_researcher(chat_history: List[ChatMessage]):
def create_researcher(chat_history: List[ChatMessage], **kwargs):
"""
Researcher is an agent that take responsibility for using tools to complete a given task.
"""
tools = _get_research_tools()
tools = _get_research_tools(**kwargs)
return FunctionCallingAgent(
name="researcher",
tools=tools,
description="expert in retrieving any unknown content or searching for images from the internet",
system_prompt=dedent("""
system_prompt=dedent(
"""
You are a researcher agent. You are given a research task.
If the conversation already includes the information and there is no new request for additional information from the user, you should return the appropriate content to the writer.
......@@ -77,6 +80,7 @@ def create_researcher(chat_history: List[ChatMessage]):
If you use the tools but don't find any related information, please return "I didn't find any new information for {the topic}." along with the content you found. Don't try to make up information yourself.
If the request doesn't need any new information because it was in the conversation history, please return "The task doesn't need any new information. Please reuse the existing content in the conversation history."
"""),
"""
),
chat_history=chat_history,
)
......@@ -17,9 +17,10 @@ from llama_index.core.workflow import (
)
def create_workflow(chat_history: Optional[List[ChatMessage]] = None):
def create_workflow(chat_history: Optional[List[ChatMessage]] = None, **kwargs):
researcher = create_researcher(
chat_history=chat_history,
**kwargs,
)
publisher = create_publisher(
chat_history=chat_history,
......@@ -127,7 +128,8 @@ class BlogPostWorkflow(Workflow):
self, input: str, chat_history: List[ChatMessage]
) -> str:
prompt_template = PromptTemplate(
dedent("""
dedent(
"""
You are an expert in decision-making, helping people write and publish blog posts.
If the user is asking for a file or to publish content, respond with 'publish'.
If the user requests to write or update a blog post, respond with 'not_publish'.
......@@ -140,7 +142,8 @@ class BlogPostWorkflow(Workflow):
Given the chat history and the new user request, decide whether to publish based on existing information.
Decision (respond with either 'not_publish' or 'publish'):
""")
"""
)
)
chat_history_str = "\n".join(
......
......@@ -23,7 +23,7 @@ async def chat_config() -> ChatConfig:
try:
from app.engine.service import LLamaCloudFileService
logger.info("LlamaCloud is configured. Adding /config/llamacloud route.")
print("LlamaCloud is configured. Adding /config/llamacloud route.")
@r.get("/llamacloud")
async def chat_llama_cloud_config():
......@@ -42,7 +42,5 @@ try:
}
except ImportError:
logger.debug(
"LlamaCloud is not configured. Skipping adding /config/llamacloud route."
)
print("LlamaCloud is not configured. Skipping adding /config/llamacloud route.")
pass
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment