Spaces:
Running
Running
Update main.py
Browse files
main.py
CHANGED
@@ -626,7 +626,6 @@ async def followup_agent(query: FollowupQueryModel, background_tasks: Background
|
|
626 |
return StreamingResponse(process_response(), media_type="text/event-stream")
|
627 |
|
628 |
|
629 |
-
|
630 |
@app.post("/v4/followup-agent")
|
631 |
async def followup_agent_v4(query: FollowupQueryModel, background_tasks: BackgroundTasks, api_key: str = Depends(verify_api_key)):
|
632 |
"""
|
@@ -720,6 +719,52 @@ async def followup_agent(query: FollowupQueryModel, background_tasks: Background
|
|
720 |
|
721 |
return StreamingResponse(process_response(), media_type="text/event-stream")
|
722 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
723 |
from fastapi.middleware.cors import CORSMiddleware
|
724 |
|
725 |
# CORS middleware setup
|
|
|
626 |
return StreamingResponse(process_response(), media_type="text/event-stream")
|
627 |
|
628 |
|
|
|
629 |
@app.post("/v4/followup-agent")
|
630 |
async def followup_agent_v4(query: FollowupQueryModel, background_tasks: BackgroundTasks, api_key: str = Depends(verify_api_key)):
|
631 |
"""
|
|
|
719 |
|
720 |
return StreamingResponse(process_response(), media_type="text/event-stream")
|
721 |
|
722 |
+
|
723 |
+
@app.post("v2/digiyatra-followup")
|
724 |
+
async def followup_agent(query: FollowupQueryModel, background_tasks: BackgroundTasks, api_key: str = Depends(verify_api_key)):
|
725 |
+
"""
|
726 |
+
Followup agent endpoint that provides helpful responses or generates clarifying questions based on user queries.
|
727 |
+
Requires API Key authentication via X-API-Key header.
|
728 |
+
"""
|
729 |
+
logger.info(f"Received followup agent query: {query.query}")
|
730 |
+
|
731 |
+
if query.conversation_id not in conversations:
|
732 |
+
conversations[query.conversation_id] = [
|
733 |
+
{"role": "system", "content": FOLLOWUP_DIGIYATRA_PROMPT}
|
734 |
+
]
|
735 |
+
|
736 |
+
conversations[query.conversation_id].append({"role": "user", "content": query.query})
|
737 |
+
last_activity[query.conversation_id] = time.time()
|
738 |
+
|
739 |
+
# Limit tokens in the conversation history
|
740 |
+
limited_conversation = conversations[query.conversation_id]
|
741 |
+
|
742 |
+
def process_response():
|
743 |
+
full_response = ""
|
744 |
+
for content in chat_with_llama_stream(limited_conversation, model=query.model_id):
|
745 |
+
full_response += content
|
746 |
+
yield json.dumps({"content": content, "type": "response"})
|
747 |
+
|
748 |
+
logger.info(f"LLM RAW response for query: {query.query}: {full_response}")
|
749 |
+
response_content, interact,tools = parse_followup_and_tools(full_response)
|
750 |
+
|
751 |
+
|
752 |
+
result = {
|
753 |
+
"response": response_content,
|
754 |
+
"clarification": interact
|
755 |
+
}
|
756 |
+
|
757 |
+
yield "\n\n" + json.dumps(result)
|
758 |
+
|
759 |
+
# Add the assistant's response to the conversation history
|
760 |
+
conversations[query.conversation_id].append({"role": "assistant", "content": full_response})
|
761 |
+
|
762 |
+
background_tasks.add_task(update_db, query.user_id, query.conversation_id, query.query, full_response)
|
763 |
+
logger.info(f"Completed followup agent response for query: {query.query}, send result: {result}")
|
764 |
+
|
765 |
+
return StreamingResponse(process_response(), media_type="text/event-stream")
|
766 |
+
|
767 |
+
|
768 |
from fastapi.middleware.cors import CORSMiddleware
|
769 |
|
770 |
# CORS middleware setup
|