Skip to content
Snippets Groups Projects
Commit 5ec1947d authored by leehuwuj's avatar leehuwuj
Browse files

support request api

parent cbebd031
No related branches found
No related tags found
No related merge requests found
import json
import logging
from fastapi import APIRouter, BackgroundTasks, HTTPException, Request, status
from llama_index.core.agent.workflow import AgentOutput
from llama_index.core.llms import MessageRole
from app.api.callbacks.llamacloud import LlamaCloudFileDownload
......@@ -10,7 +12,6 @@ from app.api.routers.models import (
ChatData,
Message,
Result,
SourceNodes,
)
from app.engine.engine import get_engine
from app.engine.query_filter import generate_filters
......@@ -58,27 +59,33 @@ async def chat(
) from e
# TODO: Update non-streaming endpoint
# Would be better if we use same chat.py endpoint for both agent and multiagent templates
# # non-streaming endpoint - delete if not needed
# @r.post("/request")
# async def chat_request(
# data: ChatData,
# ) -> Result:
# last_message_content = data.get_last_message_content()
# messages = data.get_history_messages()
# non-streaming endpoint - delete if not needed
@r.post("/request")
async def chat_request(
data: ChatData,
) -> Result:
last_message_content = data.get_last_message_content()
messages = data.get_history_messages()
# doc_ids = data.get_chat_document_ids()
# filters = generate_filters(doc_ids)
# params = data.data or {}
# logger.info(
# f"Creating chat engine with filters: {str(filters)}",
# )
doc_ids = data.get_chat_document_ids()
filters = generate_filters(doc_ids)
params = data.data or {}
logger.info(
f"Creating chat engine with filters: {str(filters)}",
)
engine = get_engine(filters=filters, params=params)
# chat_engine = get_chat_engine(filters=filters, params=params)
response = await engine.run(
user_msg=last_message_content,
chat_history=messages,
stream=False,
)
output = response
if isinstance(output, AgentOutput):
content = output.response.content
else:
content = json.dumps(output)
# response = await chat_engine.achat(last_message_content, messages)
# return Result(
# result=Message(role=MessageRole.ASSISTANT, content=response.response),
# nodes=SourceNodes.from_source_nodes(response.source_nodes),
# )
return Result(
result=Message(role=MessageRole.ASSISTANT, content=content),
)
......@@ -317,7 +317,6 @@ class SourceNodes(BaseModel):
class Result(BaseModel):
result: Message
nodes: List[SourceNodes]
class ChatConfig(BaseModel):
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment