added gemini too
Browse files- controller.py +11 -2
controller.py
CHANGED
@@ -25,6 +25,7 @@ import numpy as np
|
|
25 |
import matplotlib.pyplot as plt
|
26 |
import matplotlib
|
27 |
import seaborn as sns
|
|
|
28 |
from intitial_q_handler import if_initial_chart_question, if_initial_chat_question
|
29 |
from orchestrator_agent import csv_orchestrator_chat
|
30 |
from supabase_service import upload_image_to_supabase
|
@@ -347,7 +348,10 @@ async def csv_chat(request: Dict, authorization: str = Header(None)):
|
|
347 |
generate_report = request.get("generate_report")
|
348 |
|
349 |
if generate_report is True:
|
350 |
-
|
|
|
|
|
|
|
351 |
|
352 |
if if_initial_chat_question(query):
|
353 |
answer = await asyncio.to_thread(
|
@@ -847,8 +851,13 @@ async def csv_chart(request: dict, authorization: str = Header(None)):
|
|
847 |
detailed_answer = request.get("detailed_answer", False)
|
848 |
conversation_history = request.get("conversation_history", [])
|
849 |
generate_report = request.get("generate_report", False)
|
|
|
850 |
if generate_report is True:
|
851 |
-
|
|
|
|
|
|
|
|
|
852 |
|
853 |
loop = asyncio.get_running_loop()
|
854 |
# First, try the langchain-based method if the question qualifies
|
|
|
25 |
import matplotlib.pyplot as plt
|
26 |
import matplotlib
|
27 |
import seaborn as sns
|
28 |
+
from gemini_report_generator import generate_csv_report
|
29 |
from intitial_q_handler import if_initial_chart_question, if_initial_chat_question
|
30 |
from orchestrator_agent import csv_orchestrator_chat
|
31 |
from supabase_service import upload_image_to_supabase
|
|
|
348 |
generate_report = request.get("generate_report")
|
349 |
|
350 |
if generate_report is True:
|
351 |
+
report_files = await asyncio.to_thread(
|
352 |
+
generate_csv_report, decoded_url, query
|
353 |
+
)
|
354 |
+
return {"answer": jsonable_encoder(report_files)}
|
355 |
|
356 |
if if_initial_chat_question(query):
|
357 |
answer = await asyncio.to_thread(
|
|
|
851 |
detailed_answer = request.get("detailed_answer", False)
|
852 |
conversation_history = request.get("conversation_history", [])
|
853 |
generate_report = request.get("generate_report", False)
|
854 |
+
|
855 |
if generate_report is True:
|
856 |
+
report_files = await asyncio.to_thread(
|
857 |
+
generate_csv_report, csv_url, query
|
858 |
+
)
|
859 |
+
if report_files is not None:
|
860 |
+
return {"orchestrator_response": jsonable_encoder(report_files)}
|
861 |
|
862 |
loop = asyncio.get_running_loop()
|
863 |
# First, try the langchain-based method if the question qualifies
|