@@ -89,45 +89,49 @@ async def search_handler(
89
89
return [ItemPublic .model_validate (item .to_dict ()) for item in results ]
90
90
91
91
92
- @router .post ("/chat" , response_model = RetrievalResponse )
92
+ @router .post ("/chat" , response_model = RetrievalResponse | dict )
93
93
async def chat_handler (
94
94
context : CommonDeps ,
95
95
database_session : DBSession ,
96
96
openai_embed : EmbeddingsClient ,
97
97
openai_chat : ChatClient ,
98
98
chat_request : ChatRequest ,
99
99
):
100
- searcher = PostgresSearcher (
101
- db_session = database_session ,
102
- openai_embed_client = openai_embed .client ,
103
- embed_deployment = context .openai_embed_deployment ,
104
- embed_model = context .openai_embed_model ,
105
- embed_dimensions = context .openai_embed_dimensions ,
106
- embedding_column = context .embedding_column ,
107
- )
108
- rag_flow : SimpleRAGChat | AdvancedRAGChat
109
- if chat_request .context .overrides .use_advanced_flow :
110
- rag_flow = AdvancedRAGChat (
111
- searcher = searcher ,
112
- openai_chat_client = openai_chat .client ,
113
- chat_model = context .openai_chat_model ,
114
- chat_deployment = context .openai_chat_deployment ,
100
+ try :
101
+ searcher = PostgresSearcher (
102
+ db_session = database_session ,
103
+ openai_embed_client = openai_embed .client ,
104
+ embed_deployment = context .openai_embed_deployment ,
105
+ embed_model = context .openai_embed_model ,
106
+ embed_dimensions = context .openai_embed_dimensions ,
107
+ embedding_column = context .embedding_column ,
115
108
)
116
- else :
117
- rag_flow = SimpleRAGChat (
118
- searcher = searcher ,
119
- openai_chat_client = openai_chat .client ,
120
- chat_model = context .openai_chat_model ,
121
- chat_deployment = context .openai_chat_deployment ,
109
+ rag_flow : SimpleRAGChat | AdvancedRAGChat
110
+ if chat_request .context .overrides .use_advanced_flow :
111
+ rag_flow = AdvancedRAGChat (
112
+ searcher = searcher ,
113
+ openai_chat_client = openai_chat .client ,
114
+ chat_model = context .openai_chat_model ,
115
+ chat_deployment = context .openai_chat_deployment ,
116
+ )
117
+ else :
118
+ rag_flow = SimpleRAGChat (
119
+ searcher = searcher ,
120
+ openai_chat_client = openai_chat .client ,
121
+ chat_model = context .openai_chat_model ,
122
+ chat_deployment = context .openai_chat_deployment ,
123
+ )
124
+
125
+ chat_params = rag_flow .get_params (chat_request .messages , chat_request .context .overrides )
126
+
127
+ contextual_messages , results , thoughts = await rag_flow .prepare_context (chat_params )
128
+ response = await rag_flow .answer (
129
+ chat_params = chat_params , contextual_messages = contextual_messages , results = results , earlier_thoughts = thoughts
122
130
)
123
-
124
- chat_params = rag_flow .get_params (chat_request .messages , chat_request .context .overrides )
125
-
126
- contextual_messages , results , thoughts = await rag_flow .prepare_context (chat_params )
127
- response = await rag_flow .answer (
128
- chat_params = chat_params , contextual_messages = contextual_messages , results = results , earlier_thoughts = thoughts
129
- )
130
- return response
131
+ return response
132
+ except Exception as e :
133
+ # return exception inside JSON
134
+ return {"error" : str (e )}
131
135
132
136
133
137
@router .post ("/chat/stream" )
0 commit comments