File tree Expand file tree Collapse file tree 1 file changed +10
-8
lines changed
Expand file tree Collapse file tree 1 file changed +10
-8
lines changed Original file line number Diff line number Diff line change @@ -105,25 +105,27 @@ async def start_chat(session: SessionDep, current_user: CurrentUser):
105105@router .post ("/recommend_questions/{chat_record_id}" )
106106async def recommend_questions (session : SessionDep , current_user : CurrentUser , chat_record_id : int ,
107107 current_assistant : CurrentAssistant ):
108+ def _return_empty ():
109+ yield 'data:' + orjson .dumps ( {'content' : [], 'type' : 'recommended_question' }).decode () + '\n \n '
110+
108111 try :
109112 record = get_chat_record_by_id (session , chat_record_id )
110113
111114 if not record :
112- raise HTTPException (
113- status_code = 400 ,
114- detail = f"Chat record with id { chat_record_id } not found"
115- )
115+ return StreamingResponse (_return_empty (), media_type = "text/event-stream" )
116+
116117 request_question = ChatQuestion (chat_id = record .chat_id , question = record .question if record .question else '' )
117118
118119 llm_service = await LLMService .create (current_user , request_question , current_assistant , True )
119120 llm_service .set_record (record )
120121 llm_service .run_recommend_questions_task_async ()
121122 except Exception as e :
122123 traceback .print_exc ()
123- raise HTTPException (
124- status_code = 500 ,
125- detail = str (e )
126- )
124+
125+ def _err (_e : Exception ):
126+ yield 'data:' + orjson .dumps ({'content' : str (_e ), 'type' : 'error' }).decode () + '\n \n '
127+
128+ return StreamingResponse (_err (e ), media_type = "text/event-stream" )
127129
128130 return StreamingResponse (llm_service .await_result (), media_type = "text/event-stream" )
129131
You can’t perform that action at this time.
0 commit comments