Why fear when every bad thing happens only to you
Browse files- controller.py +23 -18
controller.py
CHANGED
@@ -1,5 +1,6 @@
|
|
1 |
# Import necessary modules
|
2 |
from concurrent.futures import ProcessPoolExecutor
|
|
|
3 |
import os
|
4 |
import asyncio
|
5 |
import threading
|
@@ -34,9 +35,13 @@ matplotlib.use('Agg')
|
|
34 |
# Initialize FastAPI app
|
35 |
app = FastAPI()
|
36 |
|
|
|
|
|
|
|
|
|
37 |
# Initialize the ProcessPoolExecutor
|
38 |
max_cpus = os.cpu_count()
|
39 |
-
|
40 |
|
41 |
# Ensure the cache directory exists
|
42 |
os.makedirs("/app/cache", exist_ok=True)
|
@@ -93,12 +98,12 @@ async def root():
|
|
93 |
async def basic_csv_data(request: CsvUrlRequest):
|
94 |
try:
|
95 |
decoded_url = unquote(request.csv_url)
|
96 |
-
|
97 |
csv_data = csv_service.get_csv_basic_info(decoded_url)
|
98 |
-
|
99 |
return {"data": csv_data}
|
100 |
except Exception as e:
|
101 |
-
|
102 |
raise HTTPException(status_code=400, detail=f"Failed to retrieve CSV data: {str(e)}")
|
103 |
|
104 |
|
@@ -121,7 +126,7 @@ async def get_image(request: ImageRequest, authorization: str = Header(None)):
|
|
121 |
image_file_path = request.image_path
|
122 |
return FileResponse(image_file_path, media_type="image/png")
|
123 |
except Exception as e:
|
124 |
-
|
125 |
return {"answer": "error"}
|
126 |
|
127 |
|
@@ -130,11 +135,11 @@ async def get_image(request: ImageRequest, authorization: str = Header(None)):
|
|
130 |
async def get_csv_data(request: CsvUrlRequest):
|
131 |
try:
|
132 |
decoded_url = unquote(request.csv_url)
|
133 |
-
|
134 |
csv_data = csv_service.generate_csv_data(decoded_url)
|
135 |
return csv_data
|
136 |
except Exception as e:
|
137 |
-
|
138 |
raise HTTPException(status_code=400, detail=f"Failed to retrieve CSV data: {str(e)}")
|
139 |
|
140 |
|
@@ -250,7 +255,7 @@ def langchain_csv_chat(csv_url: str, question: str, chart_required: bool):
|
|
250 |
return result.get("output")
|
251 |
|
252 |
except Exception as e:
|
253 |
-
|
254 |
|
255 |
return {"error": "All API keys exhausted"}
|
256 |
|
@@ -274,12 +279,12 @@ async def csv_chat(request: Dict, authorization: str = Header(None)):
|
|
274 |
answer = await asyncio.to_thread(
|
275 |
langchain_csv_chat, decoded_url, query, False
|
276 |
)
|
277 |
-
|
278 |
return {"answer": jsonable_encoder(answer)}
|
279 |
|
280 |
# Process with groq_chat first
|
281 |
groq_answer = await asyncio.to_thread(groq_chat, decoded_url, query)
|
282 |
-
|
283 |
|
284 |
if process_answer(groq_answer) == "Empty response received.":
|
285 |
return {"answer": "Sorry, I couldn't find relevant data..."}
|
@@ -295,7 +300,7 @@ async def csv_chat(request: Dict, authorization: str = Header(None)):
|
|
295 |
return {"answer": jsonable_encoder(groq_answer)}
|
296 |
|
297 |
except Exception as e:
|
298 |
-
|
299 |
return {"answer": "error"}
|
300 |
|
301 |
def handle_out_of_range_float(value):
|
@@ -386,7 +391,7 @@ def groq_chart(csv_url: str, question: str):
|
|
386 |
with current_groq_chart_lock:
|
387 |
current_groq_chart_key_index = (current_groq_chart_key_index + 1) % len(groq_api_keys)
|
388 |
else:
|
389 |
-
|
390 |
return {"error": error}
|
391 |
|
392 |
return {"error": "All API keys exhausted for chart generation"}
|
@@ -721,10 +726,10 @@ def langchain_csv_chart(csv_url: str, question: str, chart_required: bool):
|
|
721 |
return [os.path.join(image_file_path, f) for f in chart_files]
|
722 |
|
723 |
if attempt < len(groq_api_keys) - 1:
|
724 |
-
|
725 |
|
726 |
except Exception as e:
|
727 |
-
|
728 |
|
729 |
return "Chart generation failed after all retries"
|
730 |
|
@@ -755,7 +760,7 @@ async def csv_chart(request: dict, authorization: str = Header(None)):
|
|
755 |
langchain_result = await loop.run_in_executor(
|
756 |
process_executor, langchain_csv_chart, csv_url, query, True
|
757 |
)
|
758 |
-
|
759 |
if isinstance(langchain_result, list) and len(langchain_result) > 0:
|
760 |
return FileResponse(langchain_result[0], media_type="image/png")
|
761 |
|
@@ -763,7 +768,7 @@ async def csv_chart(request: dict, authorization: str = Header(None)):
|
|
763 |
groq_result = await loop.run_in_executor(
|
764 |
process_executor, groq_chart, csv_url, query
|
765 |
)
|
766 |
-
|
767 |
if isinstance(groq_result, str) and groq_result != "Chart not generated":
|
768 |
return FileResponse(groq_result, media_type="image/png")
|
769 |
|
@@ -771,13 +776,13 @@ async def csv_chart(request: dict, authorization: str = Header(None)):
|
|
771 |
langchain_paths = await loop.run_in_executor(
|
772 |
process_executor, langchain_csv_chart, csv_url, query, True
|
773 |
)
|
774 |
-
|
775 |
if isinstance(langchain_paths, list) and len(langchain_paths) > 0:
|
776 |
return FileResponse(langchain_paths[0], media_type="image/png")
|
777 |
else:
|
778 |
return {"error": "All chart generation methods failed"}
|
779 |
|
780 |
except Exception as e:
|
781 |
-
|
782 |
return {"error": "Internal system error"}
|
783 |
|
|
|
1 |
# Import necessary modules
|
2 |
from concurrent.futures import ProcessPoolExecutor
|
3 |
+
import logging
|
4 |
import os
|
5 |
import asyncio
|
6 |
import threading
|
|
|
35 |
# Initialize FastAPI app
|
36 |
app = FastAPI()
|
37 |
|
38 |
+
# Set up logging
|
39 |
+
logging.basicConfig(level=logging.INFO)
|
40 |
+
logger = logging.getLogger(__name__)
|
41 |
+
|
42 |
# Initialize the ProcessPoolExecutor
|
43 |
max_cpus = os.cpu_count()
|
44 |
+
logger.info(f"Max CPUs: {max_cpus}")
|
45 |
|
46 |
# Ensure the cache directory exists
|
47 |
os.makedirs("/app/cache", exist_ok=True)
|
|
|
98 |
async def basic_csv_data(request: CsvUrlRequest):
|
99 |
try:
|
100 |
decoded_url = unquote(request.csv_url)
|
101 |
+
logger.info(f"Fetching CSV data from URL: {decoded_url}")
|
102 |
csv_data = csv_service.get_csv_basic_info(decoded_url)
|
103 |
+
logger.info(f"CSV data fetched successfully: {csv_data}")
|
104 |
return {"data": csv_data}
|
105 |
except Exception as e:
|
106 |
+
logger.error(f"Error while fetching CSV data: {e}")
|
107 |
raise HTTPException(status_code=400, detail=f"Failed to retrieve CSV data: {str(e)}")
|
108 |
|
109 |
|
|
|
126 |
image_file_path = request.image_path
|
127 |
return FileResponse(image_file_path, media_type="image/png")
|
128 |
except Exception as e:
|
129 |
+
logger.error(f"Error: {e}")
|
130 |
return {"answer": "error"}
|
131 |
|
132 |
|
|
|
135 |
async def get_csv_data(request: CsvUrlRequest):
|
136 |
try:
|
137 |
decoded_url = unquote(request.csv_url)
|
138 |
+
logger.info(f"Fetching CSV data from URL: {decoded_url}")
|
139 |
csv_data = csv_service.generate_csv_data(decoded_url)
|
140 |
return csv_data
|
141 |
except Exception as e:
|
142 |
+
logger.error(f"Error while fetching CSV data: {e}")
|
143 |
raise HTTPException(status_code=400, detail=f"Failed to retrieve CSV data: {str(e)}")
|
144 |
|
145 |
|
|
|
255 |
return result.get("output")
|
256 |
|
257 |
except Exception as e:
|
258 |
+
logger.error(f"Error with key index {current_key}: {str(e)}")
|
259 |
|
260 |
return {"error": "All API keys exhausted"}
|
261 |
|
|
|
279 |
answer = await asyncio.to_thread(
|
280 |
langchain_csv_chat, decoded_url, query, False
|
281 |
)
|
282 |
+
logger.info("langchain_answer:", answer)
|
283 |
return {"answer": jsonable_encoder(answer)}
|
284 |
|
285 |
# Process with groq_chat first
|
286 |
groq_answer = await asyncio.to_thread(groq_chat, decoded_url, query)
|
287 |
+
logger.info("groq_answer:", groq_answer)
|
288 |
|
289 |
if process_answer(groq_answer) == "Empty response received.":
|
290 |
return {"answer": "Sorry, I couldn't find relevant data..."}
|
|
|
300 |
return {"answer": jsonable_encoder(groq_answer)}
|
301 |
|
302 |
except Exception as e:
|
303 |
+
logger.error(f"Error processing request: {str(e)}")
|
304 |
return {"answer": "error"}
|
305 |
|
306 |
def handle_out_of_range_float(value):
|
|
|
391 |
with current_groq_chart_lock:
|
392 |
current_groq_chart_key_index = (current_groq_chart_key_index + 1) % len(groq_api_keys)
|
393 |
else:
|
394 |
+
logger.error(f"Chart generation error: {error}")
|
395 |
return {"error": error}
|
396 |
|
397 |
return {"error": "All API keys exhausted for chart generation"}
|
|
|
726 |
return [os.path.join(image_file_path, f) for f in chart_files]
|
727 |
|
728 |
if attempt < len(groq_api_keys) - 1:
|
729 |
+
logger.info(f"Langchain chart error (key {current_key}): {output}")
|
730 |
|
731 |
except Exception as e:
|
732 |
+
logger.error(f"Langchain chart error (key {current_key}): {str(e)}")
|
733 |
|
734 |
return "Chart generation failed after all retries"
|
735 |
|
|
|
760 |
langchain_result = await loop.run_in_executor(
|
761 |
process_executor, langchain_csv_chart, csv_url, query, True
|
762 |
)
|
763 |
+
logger.info("Langchain chart result:", langchain_result)
|
764 |
if isinstance(langchain_result, list) and len(langchain_result) > 0:
|
765 |
return FileResponse(langchain_result[0], media_type="image/png")
|
766 |
|
|
|
768 |
groq_result = await loop.run_in_executor(
|
769 |
process_executor, groq_chart, csv_url, query
|
770 |
)
|
771 |
+
logger.info(f"Groq chart result: {groq_result}")
|
772 |
if isinstance(groq_result, str) and groq_result != "Chart not generated":
|
773 |
return FileResponse(groq_result, media_type="image/png")
|
774 |
|
|
|
776 |
langchain_paths = await loop.run_in_executor(
|
777 |
process_executor, langchain_csv_chart, csv_url, query, True
|
778 |
)
|
779 |
+
logger.info("Fallback langchain chart result:", langchain_paths)
|
780 |
if isinstance(langchain_paths, list) and len(langchain_paths) > 0:
|
781 |
return FileResponse(langchain_paths[0], media_type="image/png")
|
782 |
else:
|
783 |
return {"error": "All chart generation methods failed"}
|
784 |
|
785 |
except Exception as e:
|
786 |
+
logger.error(f"Critical chart error: {str(e)}")
|
787 |
return {"error": "Internal system error"}
|
788 |
|