File size: 8,728 Bytes
f8d95b7 d7d1d4e 7797795 d7d1d4e b707dc6 d7d1d4e b707dc6 d7d1d4e e35735a f8d95b7 b707dc6 d7d1d4e 837fd40 b707dc6 d7d1d4e 837fd40 b707dc6 d7d1d4e 69a0b7f 837fd40 b707dc6 d7d1d4e 7797795 d7d1d4e 837fd40 b707dc6 d7d1d4e b707dc6 d7d1d4e 1f6b1ac 837fd40 d7d1d4e e326328 d7d1d4e 837fd40 d7d1d4e 837fd40 d7d1d4e 837fd40 d7d1d4e 837fd40 d7d1d4e 837fd40 d7d1d4e 837fd40 af41fa4 837fd40 d7d1d4e 837fd40 d7d1d4e 837fd40 d7d1d4e e326328 d7d1d4e 837fd40 d7d1d4e e326328 d7d1d4e 7abd4e3 837fd40 d7d1d4e 837fd40 d7d1d4e f8d95b7 837fd40 f8d95b7 837fd40 f8d95b7 837fd40 f8d95b7 837fd40 f8d95b7 837fd40 f8d95b7 e326328 2e0bfc4 e326328 d7d1d4e 2e9a90c e326328 d7d1d4e e326328 af41fa4 e326328 af41fa4 e326328 af41fa4 e326328 af41fa4 e326328 af41fa4 e326328 f8d95b7 e326328 f8d95b7 e326328 f8d95b7 e326328 f8d95b7 e326328 837fd40 f8d95b7 837fd40 e326328 837fd40 e326328 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 |
import os
from dotenv import load_dotenv
import uuid
import matplotlib.pyplot as plt
from pathlib import Path
from typing import Dict, Any, List, Literal, Optional
import pandas as pd
import numpy as np
import json
import io
import contextlib
import traceback
import time
from datetime import datetime, timedelta
import seaborn as sns
import scipy.stats as stats
from pydantic import BaseModel
from supabase_service import upload_file_to_supabase
# Load environment variables from .env file
load_dotenv()
class CodeResponse(BaseModel):
"""Container for code-related responses"""
language: str = "python"
code: str
class ChartSpecification(BaseModel):
"""Details about requested charts"""
image_description: str
code: Optional[str] = None
class AnalysisOperation(BaseModel):
"""Container for a single analysis operation with its code and result"""
code: CodeResponse
result_var: str
class CsvChatResult(BaseModel):
"""Structured response for CSV-related AI interactions"""
response_type: Literal["casual", "data_analysis", "visualization", "mixed"]
casual_response: str
analysis_operations: List[AnalysisOperation]
charts: Optional[List[ChartSpecification]] = None
class PythonExecutor:
"""Handles execution of Python code with comprehensive data analysis libraries"""
def __init__(self, df: pd.DataFrame, charts_folder: str = "generated_charts"):
"""
Initialize the PythonExecutor with a DataFrame
Args:
df (pd.DataFrame): The DataFrame to operate on
charts_folder (str): Folder to save charts in
"""
self.df = df
self.charts_folder = Path(charts_folder)
self.charts_folder.mkdir(exist_ok=True)
self.exec_locals = {}
def execute_code(self, code: str) -> Dict[str, Any]:
"""
Execute Python code with full data analysis context and return results
Args:
code (str): Python code to execute
Returns:
dict: Dictionary containing execution results and any generated plots
"""
output = ""
error = None
plots = []
# Capture stdout
stdout = io.StringIO()
# Monkey patch plt.show() to save figures
original_show = plt.show
def custom_show():
"""Custom show function that saves plots instead of displaying them"""
for i, fig in enumerate(plt.get_fignums()):
figure = plt.figure(fig)
# Save plot to bytes buffer
buf = io.BytesIO()
figure.savefig(buf, format='png', bbox_inches='tight')
buf.seek(0)
plots.append(buf.read())
plt.close('all')
try:
# Create comprehensive execution context with data analysis libraries
exec_globals = {
# Core data analysis
'pd': pd,
'np': np,
'df': self.df,
# Visualization
'plt': plt,
'sns': sns,
# Statistics
'stats': stats,
# Date/time
'datetime': datetime,
'timedelta': timedelta,
'time': time,
# Utilities
'json': json,
'__builtins__': __builtins__,
}
# Replace plt.show with custom implementation
plt.show = custom_show
# Execute code and capture output
with contextlib.redirect_stdout(stdout):
exec(code, exec_globals, self.exec_locals)
output = stdout.getvalue()
except Exception as e:
error = {
"message": str(e),
"traceback": traceback.format_exc()
}
finally:
# Restore original plt.show
plt.show = original_show
return {
'output': output,
'error': error,
'plots': plots,
'locals': self.exec_locals
}
async def save_plot_to_supabase(self, plot_data: bytes, description: str, chat_id: str) -> str:
"""
Save plot to Supabase storage and return the public URL
Args:
plot_data (bytes): Image data in bytes
description (str): Description of the plot
chat_id (str): ID of the chat session
Returns:
str: Public URL of the uploaded chart
"""
# Generate unique filename
filename = f"chart_{uuid.uuid4().hex}.png"
filepath = self.charts_folder / filename
# Save the plot locally first
with open(filepath, 'wb') as f:
f.write(plot_data)
try:
# Upload to Supabase
public_url = await upload_file_to_supabase(
file_path=str(filepath),
file_name=filename,
chat_id=chat_id
)
# Remove the local file after upload
os.remove(filepath)
return public_url
except Exception as e:
# Clean up local file if upload fails
if os.path.exists(filepath):
os.remove(filepath)
raise Exception(f"Failed to upload plot to Supabase: {e}")
def _format_result(self, result: Any) -> str:
"""Format the result for display"""
if isinstance(result, (pd.DataFrame, pd.Series)):
# Convert DataFrame to a string, then to a list of dicts (handles NumPy types)
json_str = result.to_json(orient='records', date_format='iso')
json.dumps(json.loads(json_str), indent=2) # Re-parse for pretty formatting
elif isinstance(result, (dict, list)):
return json.dumps(result, indent=2)
return str(result)
async def process_response(self, response: CsvChatResult, chat_id: str) -> str:
"""Process the response with proper variable handling"""
output_parts = [response.casual_response]
# Process analysis operations first
for operation in response.analysis_operations:
execution_result = self.execute_code(operation.code.code)
# Get the result from locals
result = self.exec_locals.get(operation.result_var)
if execution_result['error']:
output_parts.append(f"\nError in operation '{operation.result_var}':")
output_parts.append("```python\n" + execution_result['error']['message'] + "\n```")
elif result is not None:
output_parts.append(f"\nResult for '{operation.result_var}':")
output_parts.append("```python\n" + self._format_result(result) + "\n```")
else:
output_parts.append(f"\nOperation '{operation.result_var}' output:")
if execution_result['output'].strip():
output_parts.append("```\n" + execution_result['output'].strip() + "\n```")
else:
output_parts.append("No output or variable found")
# Process charts after all operations
if response.charts:
output_parts.append("\n## Visualizations")
for chart in response.charts:
if chart.code:
chart_result = self.execute_code(chart.code)
if chart_result['plots']:
for plot_data in chart_result['plots']:
try:
public_url = await self.save_plot_to_supabase(
plot_data=plot_data,
description=chart.image_description,
chat_id=chat_id
)
output_parts.append(f"\n### {chart.image_description}")
output_parts.append(f"")
except Exception as e:
output_parts.append(f"\nError uploading chart: {str(e)}")
elif chart_result['error']:
output_parts.append("```python\n" + f"Error generating {chart.image_description}: {chart_result['error']['message']}" + "\n```")
return "\n".join(output_parts) |