From 5ec1947d4a89f6f067b26a469785145e88bfd67b Mon Sep 17 00:00:00 2001 From: leehuwuj <leehuwuj@gmail.com> Date: Wed, 12 Feb 2025 12:51:05 +0700 Subject: [PATCH] support request api --- .../streaming/fastapi/app/api/routers/chat.py | 51 +++++++++++-------- .../fastapi/app/api/routers/models.py | 1 - 2 files changed, 29 insertions(+), 23 deletions(-) diff --git a/templates/types/streaming/fastapi/app/api/routers/chat.py b/templates/types/streaming/fastapi/app/api/routers/chat.py index 8c2c7f38..8103a4b5 100644 --- a/templates/types/streaming/fastapi/app/api/routers/chat.py +++ b/templates/types/streaming/fastapi/app/api/routers/chat.py @@ -1,6 +1,8 @@ +import json import logging from fastapi import APIRouter, BackgroundTasks, HTTPException, Request, status +from llama_index.core.agent.workflow import AgentOutput from llama_index.core.llms import MessageRole from app.api.callbacks.llamacloud import LlamaCloudFileDownload @@ -10,7 +12,6 @@ from app.api.routers.models import ( ChatData, Message, Result, - SourceNodes, ) from app.engine.engine import get_engine from app.engine.query_filter import generate_filters @@ -58,27 +59,33 @@ async def chat( ) from e -# TODO: Update non-streaming endpoint -# Would be better if we use same chat.py endpoint for both agent and multiagent templates -# # non-streaming endpoint - delete if not needed -# @r.post("/request") -# async def chat_request( -# data: ChatData, -# ) -> Result: -# last_message_content = data.get_last_message_content() -# messages = data.get_history_messages() +# non-streaming endpoint - delete if not needed +@r.post("/request") +async def chat_request( + data: ChatData, +) -> Result: + last_message_content = data.get_last_message_content() + messages = data.get_history_messages() -# doc_ids = data.get_chat_document_ids() -# filters = generate_filters(doc_ids) -# params = data.data or {} -# logger.info( -# f"Creating chat engine with filters: {str(filters)}", -# ) + doc_ids = data.get_chat_document_ids() + filters = generate_filters(doc_ids) + params = data.data or {} + logger.info( + f"Creating chat engine with filters: {str(filters)}", + ) + engine = get_engine(filters=filters, params=params) -# chat_engine = get_chat_engine(filters=filters, params=params) + response = await engine.run( + user_msg=last_message_content, + chat_history=messages, + stream=False, + ) + output = response + if isinstance(output, AgentOutput): + content = output.response.content + else: + content = json.dumps(output) -# response = await chat_engine.achat(last_message_content, messages) -# return Result( -# result=Message(role=MessageRole.ASSISTANT, content=response.response), -# nodes=SourceNodes.from_source_nodes(response.source_nodes), -# ) + return Result( + result=Message(role=MessageRole.ASSISTANT, content=content), + ) diff --git a/templates/types/streaming/fastapi/app/api/routers/models.py b/templates/types/streaming/fastapi/app/api/routers/models.py index 31f2fa46..6c766a42 100644 --- a/templates/types/streaming/fastapi/app/api/routers/models.py +++ b/templates/types/streaming/fastapi/app/api/routers/models.py @@ -317,7 +317,6 @@ class SourceNodes(BaseModel): class Result(BaseModel): result: Message - nodes: List[SourceNodes] class ChatConfig(BaseModel): -- GitLab