Really-amin commited on
Commit
62ccadb
·
verified ·
1 Parent(s): a9a222c

Update app/api/dashboard.py

Browse files
Files changed (1) hide show
  1. app/api/dashboard.py +154 -395
app/api/dashboard.py CHANGED
@@ -1,434 +1,193 @@
1
- from fastapi import APIRouter, HTTPException, Depends
2
- from typing import List, Dict, Any, Optional
3
  import logging
4
- from datetime import datetime, timedelta
5
  import os
6
  import psutil
 
 
7
  from pathlib import Path
 
 
8
 
9
- logger = logging.getLogger(__name__)
 
 
 
 
 
10
 
 
11
  router = APIRouter()
12
 
13
- # Mock data for demonstration - replace with actual database queries
14
- MOCK_STATS = {
15
- "total_documents": 145,
16
- "processed_today": 23,
17
- "active_reports": 8,
18
- "uptime": "99.8%"
19
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
20
 
21
- MOCK_ACTIVITIES = [
22
- {
23
- "id": 1,
24
- "type": "upload",
25
- "title": "فایل جدید آپلود شد",
26
- "description": "document_contract_2024.pdf",
27
- "timestamp": "۲ ساعت پیش"
28
- },
29
- {
30
- "id": 2,
31
- "type": "process",
32
- "title": "پردازش OCR تکمیل شد",
33
- "description": "استخراج متن از ۳ سند",
34
- "timestamp": "۴ ساعت پیش"
35
- },
36
- {
37
- "id": 3,
38
- "type": "search",
39
- "title": "جستجو انجام شد",
40
- "description": "جستجو برای 'قرارداد خرید'",
41
- "timestamp": "۶ ساعت پیش"
42
- },
43
- {
44
- "id": 4,
45
- "type": "export",
46
- "title": "گزارش صادر شد",
47
- "description": "گزارش آماری ماهانه",
48
- "timestamp": "۱ روز پیش"
49
  }
50
- ]
 
 
 
51
 
52
  @router.get("/stats")
53
  async def get_dashboard_stats():
54
- """
55
- Get dashboard statistics
56
- """
 
 
57
  try:
58
- # Get actual system stats
59
- stats = {}
60
-
61
- # Try to get document count from database
62
- try:
63
- # This would normally query your database
64
- stats["total_documents"] = MOCK_STATS["total_documents"]
65
- stats["processed_today"] = MOCK_STATS["processed_today"]
66
- stats["active_reports"] = MOCK_STATS["active_reports"]
67
- except Exception as e:
68
- logger.warning(f"Could not get database stats: {e}")
69
- stats.update(MOCK_STATS)
70
-
71
- # Get system uptime
72
  try:
73
  uptime_seconds = psutil.boot_time()
74
  uptime = datetime.now() - datetime.fromtimestamp(uptime_seconds)
75
- stats["uptime"] = f"{uptime.days}d {uptime.seconds//3600}h"
76
  except Exception:
77
  stats["uptime"] = "N/A"
78
-
79
- # Get storage info
80
- try:
81
- disk_usage = psutil.disk_usage('/')
82
- stats["storage_used"] = f"{disk_usage.used // (1024**3)}GB"
83
- stats["storage_total"] = f"{disk_usage.total // (1024**3)}GB"
84
- stats["storage_percent"] = round((disk_usage.used / disk_usage.total) * 100, 1)
85
- except Exception:
86
- stats["storage_used"] = "N/A"
87
- stats["storage_total"] = "N/A"
88
- stats["storage_percent"] = 0
89
-
90
- # Get memory info
91
- try:
92
- memory = psutil.virtual_memory()
93
- stats["memory_percent"] = round(memory.percent, 1)
94
- stats["memory_used"] = f"{memory.used // (1024**2)}MB"
95
- stats["memory_total"] = f"{memory.total // (1024**2)}MB"
96
- except Exception:
97
- stats["memory_percent"] = 0
98
- stats["memory_used"] = "N/A"
99
- stats["memory_total"] = "N/A"
100
-
101
- return {
102
- "success": True,
103
- "data": stats
104
- }
105
-
106
  except Exception as e:
107
- logger.error(f"Error getting dashboard stats: {e}")
108
- return {
109
- "success": False,
110
- "error": str(e),
111
- "data": MOCK_STATS
112
- }
113
 
114
  @router.get("/recent-activity")
115
  async def get_recent_activity(limit: int = 10):
116
- """
117
- Get recent system activities
118
- """
119
- try:
120
- # This would normally query your database for recent activities
121
- activities = MOCK_ACTIVITIES[:limit]
122
-
123
- return {
124
- "success": True,
125
- "activities": activities,
126
- "total": len(activities)
127
- }
128
-
129
- except Exception as e:
130
- logger.error(f"Error getting recent activities: {e}")
131
- return {
132
- "success": False,
133
- "error": str(e),
134
- "activities": []
135
- }
136
 
137
  @router.get("/system-info")
138
  async def get_system_info():
139
- """
140
- Get detailed system information
141
- """
142
- try:
143
- system_info = {}
144
-
145
- # Basic system info
146
- try:
147
- system_info["platform"] = psutil.platform
148
- system_info["cpu_count"] = psutil.cpu_count()
149
- system_info["cpu_percent"] = psutil.cpu_percent(interval=1)
150
- except Exception as e:
151
- logger.warning(f"Could not get system info: {e}")
152
- system_info.update({
153
- "platform": "Unknown",
154
- "cpu_count": "N/A",
155
- "cpu_percent": 0
156
- })
157
-
158
- # Application info
159
- system_info["app_version"] = "2.0.0"
160
- system_info["python_version"] = f"{psutil.sys.version_info.major}.{psutil.sys.version_info.minor}.{psutil.sys.version_info.micro}"
161
-
162
- # Directory info
163
- try:
164
- data_dir = Path("/app/data")
165
- logs_dir = Path("/app/logs")
166
- cache_dir = Path("/app/cache")
167
-
168
- system_info["directories"] = {
169
- "data": {
170
- "exists": data_dir.exists(),
171
- "size": get_directory_size(data_dir) if data_dir.exists() else 0
172
- },
173
- "logs": {
174
- "exists": logs_dir.exists(),
175
- "size": get_directory_size(logs_dir) if logs_dir.exists() else 0
176
- },
177
- "cache": {
178
- "exists": cache_dir.exists(),
179
- "size": get_directory_size(cache_dir) if cache_dir.exists() else 0
180
- }
181
- }
182
- except Exception as e:
183
- logger.warning(f"Could not get directory info: {e}")
184
- system_info["directories"] = {}
185
-
186
- return {
187
- "success": True,
188
- "system_info": system_info
189
- }
190
-
191
- except Exception as e:
192
- logger.error(f"Error getting system info: {e}")
193
- return {
194
- "success": False,
195
- "error": str(e)
196
- }
197
 
198
- @router.get("/health-check")
199
- async def dashboard_health_check():
200
- """
201
- Comprehensive health check for dashboard
202
- """
203
  try:
204
- health_status = {
205
- "overall": "healthy",
206
- "checks": {}
 
 
 
207
  }
208
-
209
- # Check database
210
- try:
211
- # This would check database connection
212
- health_status["checks"]["database"] = {
213
- "status": "healthy",
214
- "message": "Database connection OK"
215
- }
216
- except Exception as e:
217
- health_status["checks"]["database"] = {
218
- "status": "unhealthy",
219
- "message": f"Database error: {str(e)}"
220
- }
221
- health_status["overall"] = "degraded"
222
-
223
- # Check file system
224
- try:
225
- data_dir = Path("/app/data")
226
- if not data_dir.exists():
227
- data_dir.mkdir(parents=True, exist_ok=True)
228
-
229
- # Test write permissions
230
- test_file = data_dir / "health_check.tmp"
231
- test_file.write_text("test")
232
- test_file.unlink()
233
-
234
- health_status["checks"]["filesystem"] = {
235
- "status": "healthy",
236
- "message": "File system OK"
237
- }
238
- except Exception as e:
239
- health_status["checks"]["filesystem"] = {
240
- "status": "unhealthy",
241
- "message": f"File system error: {str(e)}"
242
- }
243
- health_status["overall"] = "degraded"
244
-
245
- # Check memory usage
246
- try:
247
- memory = psutil.virtual_memory()
248
- if memory.percent > 90:
249
- health_status["checks"]["memory"] = {
250
- "status": "warning",
251
- "message": f"High memory usage: {memory.percent}%"
252
- }
253
- health_status["overall"] = "degraded"
254
- else:
255
- health_status["checks"]["memory"] = {
256
- "status": "healthy",
257
- "message": f"Memory usage OK: {memory.percent}%"
258
- }
259
- except Exception as e:
260
- health_status["checks"]["memory"] = {
261
- "status": "unknown",
262
- "message": f"Could not check memory: {str(e)}"
263
- }
264
-
265
- # Check disk usage
266
- try:
267
- disk = psutil.disk_usage('/')
268
- disk_percent = (disk.used / disk.total) * 100
269
- if disk_percent > 90:
270
- health_status["checks"]["disk"] = {
271
- "status": "warning",
272
- "message": f"High disk usage: {disk_percent:.1f}%"
273
- }
274
- health_status["overall"] = "degraded"
275
- else:
276
- health_status["checks"]["disk"] = {
277
- "status": "healthy",
278
- "message": f"Disk usage OK: {disk_percent:.1f}%"
279
- }
280
- except Exception as e:
281
- health_status["checks"]["disk"] = {
282
- "status": "unknown",
283
- "message": f"Could not check disk: {str(e)}"
284
  }
285
-
286
- return {
287
- "success": True,
288
- "health": health_status,
289
- "timestamp": datetime.now().isoformat()
290
- }
291
-
292
  except Exception as e:
293
- logger.error(f"Error in health check: {e}")
294
- return {
295
- "success": False,
296
- "error": str(e),
297
- "health": {
298
- "overall": "unhealthy",
299
- "checks": {}
300
- }
301
- }
302
 
303
  @router.get("/performance-metrics")
304
  async def get_performance_metrics():
305
- """
306
- Get system performance metrics
307
- """
308
  try:
309
- metrics = {}
310
-
311
- # CPU metrics
312
- try:
313
- metrics["cpu"] = {
314
- "percent": psutil.cpu_percent(interval=1),
315
- "count": psutil.cpu_count(),
316
- "freq": psutil.cpu_freq()._asdict() if psutil.cpu_freq() else None
317
- }
318
- except Exception as e:
319
- logger.warning(f"Could not get CPU metrics: {e}")
320
- metrics["cpu"] = {"percent": 0, "count": "N/A", "freq": None}
321
-
322
- # Memory metrics
323
- try:
324
- memory = psutil.virtual_memory()
325
- metrics["memory"] = {
326
- "total": memory.total,
327
- "used": memory.used,
328
- "free": memory.free,
329
- "percent": memory.percent
330
- }
331
- except Exception as e:
332
- logger.warning(f"Could not get memory metrics: {e}")
333
- metrics["memory"] = {"total": 0, "used": 0, "free": 0, "percent": 0}
334
-
335
- # Disk metrics
336
- try:
337
- disk = psutil.disk_usage('/')
338
- metrics["disk"] = {
339
- "total": disk.total,
340
- "used": disk.used,
341
- "free": disk.free,
342
- "percent": (disk.used / disk.total) * 100
343
- }
344
- except Exception as e:
345
- logger.warning(f"Could not get disk metrics: {e}")
346
- metrics["disk"] = {"total": 0, "used": 0, "free": 0, "percent": 0}
347
-
348
- # Network metrics (if available)
349
- try:
350
- network = psutil.net_io_counters()
351
- metrics["network"] = {
352
- "bytes_sent": network.bytes_sent,
353
- "bytes_recv": network.bytes_recv,
354
- "packets_sent": network.packets_sent,
355
- "packets_recv": network.packets_recv
356
- }
357
- except Exception as e:
358
- logger.warning(f"Could not get network metrics: {e}")
359
- metrics["network"] = {"bytes_sent": 0, "bytes_recv": 0, "packets_sent": 0, "packets_recv": 0}
360
-
361
- return {
362
- "success": True,
363
- "metrics": metrics,
364
- "timestamp": datetime.now().isoformat()
365
  }
366
-
367
  except Exception as e:
368
- logger.error(f"Error getting performance metrics: {e}")
369
- return {
370
- "success": False,
371
- "error": str(e)
372
- }
373
 
374
- def get_directory_size(path: Path) -> int:
375
- """Get directory size in bytes"""
376
- try:
377
- total_size = 0
378
- for dirpath, dirnames, filenames in os.walk(path):
379
- for filename in filenames:
380
- filepath = os.path.join(dirpath, filename)
381
- try:
382
- total_size += os.path.getsize(filepath)
383
- except (OSError, FileNotFoundError):
384
- pass
385
- return total_size
386
- except Exception:
387
- return 0
388
 
389
- @router.post("/log-activity")
390
- async def log_activity(activity_data: dict):
391
- """
392
- Log a new activity (for future database integration)
393
- """
394
- try:
395
- # This would normally save to database
396
- logger.info(f"Activity logged: {activity_data}")
397
-
398
- return {
399
- "success": True,
400
- "message": "Activity logged successfully"
401
- }
402
-
403
- except Exception as e:
404
- logger.error(f"Error logging activity: {e}")
405
- return {
406
- "success": False,
407
- "error": str(e)
408
- }
409
-
410
- @router.get("/export-dashboard-data")
411
- async def export_dashboard_data():
412
- """
413
- Export dashboard data for backup or analysis
414
- """
415
- try:
416
- export_data = {
417
- "timestamp": datetime.now().isoformat(),
418
- "stats": await get_dashboard_stats(),
419
- "activities": await get_recent_activity(),
420
- "system_info": await get_system_info(),
421
- "performance": await get_performance_metrics()
422
- }
423
-
424
- return {
425
- "success": True,
426
- "data": export_data
427
- }
428
-
429
- except Exception as e:
430
- logger.error(f"Error exporting dashboard data: {e}")
431
- return {
432
- "success": False,
433
- "error": str(e)
434
- }
 
1
+ from fastapi import APIRouter, Depends
2
+ from typing import Dict, Any
3
  import logging
4
+ from datetime import datetime
5
  import os
6
  import psutil
7
+ import sys
8
+ import platform
9
  from pathlib import Path
10
+ import json
11
+ import asyncio
12
 
13
+ # Redis Client
14
+ try:
15
+ import redis
16
+ REDIS_AVAILABLE = True
17
+ except ImportError:
18
+ REDIS_AVAILABLE = False
19
 
20
+ logger = logging.getLogger(__name__)
21
  router = APIRouter()
22
 
23
+ # ----------------------------
24
+ # Redis Connection
25
+ # ----------------------------
26
+ REDIS_HOST = os.getenv("REDIS_HOST", "localhost")
27
+ REDIS_PORT = int(os.getenv("REDIS_PORT", 6379))
28
+ REDIS_CACHE_TTL = 30 # seconds
29
+
30
+ redis_client = None
31
+ if REDIS_AVAILABLE:
32
+ try:
33
+ redis_client = redis.StrictRedis(host=REDIS_HOST, port=REDIS_PORT, decode_responses=True)
34
+ redis_client.ping()
35
+ logger.info("✅ Connected to Redis")
36
+ except Exception as e:
37
+ redis_client = None
38
+ logger.warning(f"⚠️ Redis unavailable: {e}")
39
+
40
+ # ----------------------------
41
+ # In-memory fallback cache
42
+ # ----------------------------
43
+ LOCAL_CACHE = {}
44
+
45
+ def set_cache(key: str, value: Any, ttl: int = REDIS_CACHE_TTL):
46
+ try:
47
+ if redis_client:
48
+ redis_client.setex(key, ttl, json.dumps(value))
49
+ else:
50
+ LOCAL_CACHE[key] = {"data": value, "expire": datetime.now().timestamp() + ttl}
51
+ except Exception:
52
+ LOCAL_CACHE[key] = {"data": value, "expire": datetime.now().timestamp() + ttl}
53
+
54
+ def get_cache(key: str):
55
+ try:
56
+ if redis_client:
57
+ data = redis_client.get(key)
58
+ return json.loads(data) if data else None
59
+ else:
60
+ cached = LOCAL_CACHE.get(key)
61
+ if cached and cached["expire"] > datetime.now().timestamp():
62
+ return cached["data"]
63
+ return None
64
+ except Exception:
65
+ return None
66
 
67
+ # ----------------------------
68
+ # Simulated database query
69
+ # ----------------------------
70
+ async def query_database_stats():
71
+ """
72
+ TODO: Replace with actual DB query logic
73
+ """
74
+ await asyncio.sleep(0.1) # simulate async query
75
+ return {
76
+ "total_documents": 200,
77
+ "processed_today": 35,
78
+ "active_reports": 10
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
79
  }
80
+
81
+ # ----------------------------
82
+ # Endpoints
83
+ # ----------------------------
84
 
85
  @router.get("/stats")
86
  async def get_dashboard_stats():
87
+ cache_key = "dashboard_stats"
88
+ cached = get_cache(cache_key)
89
+ if cached:
90
+ return {"success": True, "data": cached}
91
+
92
  try:
93
+ stats = await query_database_stats()
94
+
95
+ # Uptime
 
 
 
 
 
 
 
 
 
 
 
96
  try:
97
  uptime_seconds = psutil.boot_time()
98
  uptime = datetime.now() - datetime.fromtimestamp(uptime_seconds)
99
+ stats["uptime"] = f"{uptime.days}d {uptime.seconds // 3600}h"
100
  except Exception:
101
  stats["uptime"] = "N/A"
102
+
103
+ # Storage
104
+ disk_usage = psutil.disk_usage('/')
105
+ stats["storage_used"] = f"{disk_usage.used // (1024**3)}GB"
106
+ stats["storage_total"] = f"{disk_usage.total // (1024**3)}GB"
107
+ stats["storage_percent"] = round((disk_usage.used / disk_usage.total) * 100, 1)
108
+
109
+ # Memory
110
+ memory = psutil.virtual_memory()
111
+ stats["memory_percent"] = round(memory.percent, 1)
112
+ stats["memory_used"] = f"{memory.used // (1024**2)}MB"
113
+ stats["memory_total"] = f"{memory.total // (1024**2)}MB"
114
+
115
+ set_cache(cache_key, stats)
116
+ return {"success": True, "data": stats}
 
 
 
 
 
 
 
 
 
 
 
 
 
117
  except Exception as e:
118
+ logger.error(f"Error fetching stats: {e}")
119
+ return {"success": False, "error": str(e)}
120
+
 
 
 
121
 
122
  @router.get("/recent-activity")
123
  async def get_recent_activity(limit: int = 10):
124
+ cache_key = f"recent_activity_{limit}"
125
+ cached = get_cache(cache_key)
126
+ if cached:
127
+ return {"success": True, "activities": cached, "total": len(cached)}
128
+
129
+ # TODO: Replace with DB query
130
+ activities = [
131
+ {"id": 1, "type": "upload", "title": "فایل جدید آپلود شد", "description": "document_contract_2024.pdf", "timestamp": "۲ ساعت پیش"},
132
+ {"id": 2, "type": "process", "title": "پردازش OCR تکمیل شد", "description": "استخراج متن از ۳ سند", "timestamp": "۴ ساعت پیش"},
133
+ ][:limit]
134
+
135
+ set_cache(cache_key, activities)
136
+ return {"success": True, "activities": activities, "total": len(activities)}
137
+
 
 
 
 
 
 
138
 
139
  @router.get("/system-info")
140
  async def get_system_info():
141
+ cache_key = "system_info"
142
+ cached = get_cache(cache_key)
143
+ if cached:
144
+ return {"success": True, "system_info": cached}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
145
 
 
 
 
 
 
146
  try:
147
+ info = {
148
+ "platform": platform.platform(),
149
+ "cpu_count": psutil.cpu_count(),
150
+ "cpu_percent": psutil.cpu_percent(interval=1),
151
+ "app_version": "2.1.0",
152
+ "python_version": f"{sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro}",
153
  }
154
+
155
+ dirs = {}
156
+ for name in ["data", "logs", "cache"]:
157
+ path = Path(f"/app/{name}")
158
+ dirs[name] = {
159
+ "exists": path.exists(),
160
+ "size": get_directory_size(path) if path.exists() else 0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
161
  }
162
+ info["directories"] = dirs
163
+
164
+ set_cache(cache_key, info)
165
+ return {"success": True, "system_info": info}
 
 
 
166
  except Exception as e:
167
+ return {"success": False, "error": str(e)}
168
+
 
 
 
 
 
 
 
169
 
170
  @router.get("/performance-metrics")
171
  async def get_performance_metrics():
 
 
 
172
  try:
173
+ metrics = {
174
+ "cpu": {"percent": psutil.cpu_percent(interval=1), "count": psutil.cpu_count()},
175
+ "memory": dict(psutil.virtual_memory()._asdict()),
176
+ "disk": dict(psutil.disk_usage('/')._asdict()),
177
+ "network": dict(psutil.net_io_counters()._asdict())
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
178
  }
179
+ return {"success": True, "metrics": metrics, "timestamp": datetime.now().isoformat()}
180
  except Exception as e:
181
+ return {"success": False, "error": str(e)}
 
 
 
 
182
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
183
 
184
+ def get_directory_size(path: Path) -> int:
185
+ total_size = 0
186
+ for dirpath, _, filenames in os.walk(path):
187
+ for filename in filenames:
188
+ filepath = os.path.join(dirpath, filename)
189
+ try:
190
+ total_size += os.path.getsize(filepath)
191
+ except (OSError, FileNotFoundError):
192
+ pass
193
+ return total_size